diff --git a/.github/Dockerfile b/.github/Dockerfile index d56ec6a59f2d..59d46fd169d9 100644 --- a/.github/Dockerfile +++ b/.github/Dockerfile @@ -15,7 +15,8 @@ RUN apt-get update && \ openjdk-17-jdk-headless \ openjdk-21-jdk-headless && \ (curl -fsSL https://deb.nodesource.com/setup_18.x | bash -) && \ - apt-get install -y nodejs + apt-get install -y nodejs && \ + apt-get install -y zip unzip # Install sbt diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f9cb18a0ad00..cce85f675a12 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,3 +8,12 @@ updates: - hamzaremmal reviewers: - hamzaremmal + - package-ecosystem: bundler + directory: '/docs/_spec' + schedule: + interval: weekly + assignees: + - hamzaremmal + reviewers: + - hamzaremmal + diff --git a/.github/workflows/build-chocolatey.yml b/.github/workflows/build-chocolatey.yml new file mode 100644 index 000000000000..9de87d8e5ad6 --- /dev/null +++ b/.github/workflows/build-chocolatey.yml @@ -0,0 +1,57 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD SCALA WITH CHOCOLATEY ### +### HOW TO USE: ### +### ### +### NOTE: ### +### ### +################################################################################################### + + +name: Build 'scala' Chocolatey Package +run-name: Build 'scala' (${{ inputs.version }}) Chocolatey Package + +on: + workflow_call: + inputs: + version: + required: true + type : string + url: + required: true + type : string + digest: + required: true + type : string + +jobs: + build: + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - name: Replace the version placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/scala.nuspec + search-text: '@LAUNCHER_VERSION@' + replacement-text: ${{ inputs.version }} + - name: Replace the URL placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/tools/chocolateyInstall.ps1 + search-text: '@LAUNCHER_URL@' + replacement-text: ${{ inputs.url }} + - name: Replace the CHECKSUM placeholder + uses: richardrigutins/replace-in-files@v2 + with: + files: ./pkgs/chocolatey/tools/chocolateyInstall.ps1 + search-text: '@LAUNCHER_SHA256@' + replacement-text: ${{ inputs.digest }} + - name: Build the Chocolatey package (.nupkg) + run: choco pack ./pkgs/chocolatey/scala.nuspec --out ./pkgs/chocolatey + - name: Upload the Chocolatey package to GitHub + uses: actions/upload-artifact@v4 + with: + name: scala.nupkg + path: ./pkgs/chocolatey/scala.${{ inputs.version }}.nupkg + if-no-files-found: error + \ No newline at end of file diff --git a/.github/workflows/build-msi.yml b/.github/workflows/build-msi.yml new file mode 100644 index 000000000000..14838c589d6a --- /dev/null +++ b/.github/workflows/build-msi.yml @@ -0,0 +1,38 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD SCALA MSI ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL UPLOAD TO GITHUB THE MSI FILE FOR SCALA UNDER THE 'scala.msi' NAME ### +### ### +### NOTE: ### +### - WE SHOULD BUILD SCALA USING JAVA 8 ### +################################################################################################### + +name: Build the MSI Package + +on: + workflow_call: + +env: + # Release only happends when triggering CI by pushing tag + RELEASEBUILD: ${{ startsWith(github.event.ref, 'refs/tags/') && 'yes' || 'no' }} + +jobs: + build: + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: 'adopt' + java-version: '8' + cache: 'sbt' + - name: Build MSI package + run: sbt 'dist-win-x86_64/Windows/packageBin' + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} + - name: Upload MSI Artifact + uses: actions/upload-artifact@v4 + with: + name: scala.msi + path: ./dist/win-x86_64/target/windows/scala.msi diff --git a/.github/workflows/build-sdk.yml b/.github/workflows/build-sdk.yml new file mode 100644 index 000000000000..cd111df1a083 --- /dev/null +++ b/.github/workflows/build-sdk.yml @@ -0,0 +1,117 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO BUILD THE SCALA LAUNCHERS ### +### HOW TO USE: ### +### - THSI WORKFLOW WILL PACKAGE THE ALL THE LAUNCHERS AND UPLOAD THEM TO GITHUB ARTIFACTS ### +### ### +### NOTE: ### +### - SEE THE WORFLOW FOR THE NAMES OF THE ARTIFACTS ### +################################################################################################### + + +name: Build Scala Launchers +run-name: Build Scala Launchers + +on: + workflow_call: + inputs: + java-version: + type : string + required : true + outputs: + universal-id: + description: ID of the `universal` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.universal-id }} + linux-x86_64-id: + description: ID of the `linux x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.linux-x86_64-id }} + linux-aarch64-id: + description: ID of the `linux aarch64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.linux-aarch64-id }} + mac-x86_64-id: + description: ID of the `mac x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.mac-x86_64-id }} + mac-aarch64-id: + description: ID of the `mac aarch64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.mac-aarch64-id }} + win-x86_64-id: + description: ID of the `win x86-64` package from GitHub Artifacts (Authentication Required) + value : ${{ jobs.build.outputs.win-x86_64-id }} + win-x86_64-digest: + description: The SHA256 of the uploaded artifact (`win x86-64`) + value : ${{ jobs.build.outputs.win-x86_64-digest }} + + +jobs: + build: + runs-on: ubuntu-latest + outputs: + universal-id : ${{ steps.universal.outputs.artifact-id }} + linux-x86_64-id : ${{ steps.linux-x86_64.outputs.artifact-id }} + linux-aarch64-id : ${{ steps.linux-aarch64.outputs.artifact-id }} + mac-x86_64-id : ${{ steps.mac-x86_64.outputs.artifact-id }} + mac-aarch64-id : ${{ steps.mac-aarch64.outputs.artifact-id }} + win-x86_64-id : ${{ steps.win-x86_64.outputs.artifact-id }} + win-x86_64-digest: ${{ steps.win-x86_64-digest.outputs.digest }} + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + cache : sbt + - uses: sbt/setup-sbt@v1 + - name: Build and pack the SDK (universal) + run : ./project/scripts/sbt dist/Universal/stage + - name: Build and pack the SDK (linux x86-64) + run : ./project/scripts/sbt dist-linux-x86_64/Universal/stage + - name: Build and pack the SDK (linux aarch64) + run : ./project/scripts/sbt dist-linux-aarch64/Universal/stage + - name: Build and pack the SDK (mac x86-64) + run : ./project/scripts/sbt dist-mac-x86_64/Universal/stage + - name: Build and pack the SDK (mac aarch64) + run : ./project/scripts/sbt dist-mac-aarch64/Universal/stage + - name: Build and pack the SDK (win x86-64) + run : ./project/scripts/sbt dist-win-x86_64/Universal/stage + - name: Upload zip archive to GitHub Artifact (universal) + uses: actions/upload-artifact@v4 + id : universal + with: + path: ./dist/target/universal/stage + name: scala3-universal + - name: Upload zip archive to GitHub Artifact (linux x86-64) + uses: actions/upload-artifact@v4 + id : linux-x86_64 + with: + path: ./dist/linux-x86_64/target/universal/stage + name: scala3-x86_64-pc-linux + - name: Upload zip archive to GitHub Artifact (linux aarch64) + uses: actions/upload-artifact@v4 + id : linux-aarch64 + with: + path: ./dist/linux-aarch64/target/universal/stage + name: scala3-aarch64-pc-linux + - name: Upload zip archive to GitHub Artifact (mac x86-64) + uses: actions/upload-artifact@v4 + id : mac-x86_64 + with: + path: ./dist/mac-x86_64/target/universal/stage + name: scala3-x86_64-apple-darwin + - name: Upload zip archive to GitHub Artifact (mac aarch64) + uses: actions/upload-artifact@v4 + id : mac-aarch64 + with: + path: ./dist/mac-aarch64/target/universal/stage + name: scala3-aarch64-apple-darwin + - name: Upload zip archive to GitHub Artifact (win x86-64) + uses: actions/upload-artifact@v4 + id : win-x86_64 + with: + path: ./dist/win-x86_64/target/universal/stage + name: scala3-x86_64-pc-win32 + - name: Compute SHA256 of the uploaded artifact (win x86-64) + id : win-x86_64-digest + run : | + curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -o artifact.zip -L https://api.github.com/repos/scala/scala3/actions/artifacts/${{ steps.win-x86_64.outputs.artifact-id }}/zip + echo "digest=$(sha256sum artifact.zip | cut -d " " -f 1)" >> "$GITHUB_OUTPUT" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2747830fb7d6..5931219f472a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,6 +28,7 @@ on: env: DOTTY_CI_RUN: true + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} # In this file, we set `--cpu-shares 4096` on every job. This might seem useless # since it means that every container has the same weight which should be @@ -47,7 +48,7 @@ jobs: test_non_bootstrapped: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -87,8 +88,8 @@ jobs: run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: Test - # DON'T add dist/pack! - # Adding dist/pack bootstraps the compiler + # DON'T add dist/Universal/stage! + # Adding dist/Universal/stage bootstraps the compiler # which undermines the point of these tests: # to quickly run the tests without the cost of bootstrapping # and also to run tests when the compiler doesn't bootstrap @@ -99,7 +100,7 @@ jobs: test: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -159,7 +160,7 @@ jobs: test_scala2_library_tasty: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -227,11 +228,11 @@ jobs: uses: actions/checkout@v4 - name: Test - run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation" + run: sbt ";scala3-bootstrapped/compile; scala3-bootstrapped/testCompilation; scala3-presentation-compiler/test; scala3-language-server/test" shell: cmd - name: build binary - run: sbt "dist-win-x86_64/pack" & bash -version + run: sbt "dist-win-x86_64/Universal/stage" & bash -version shell: cmd - name: cygwin tests @@ -271,7 +272,7 @@ jobs: uses: actions/checkout@v4 - name: build binary - run: sbt "dist-win-x86_64/pack" + run: sbt "dist-win-x86_64/Universal/stage" shell: cmd - name: Test @@ -286,7 +287,7 @@ jobs: name: MiMa runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -337,7 +338,7 @@ jobs: community_build_a: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -394,7 +395,7 @@ jobs: community_build_b: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -451,7 +452,7 @@ jobs: community_build_c: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -508,7 +509,7 @@ jobs: test_sbt: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -553,7 +554,7 @@ jobs: test_java8: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -613,7 +614,7 @@ jobs: publish_nightly: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -662,6 +663,14 @@ jobs: echo "This build version: $ver" echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV + - name: Check is version matching pattern + shell: bash + run: | + if ! grep -Eo "3\.[0-9]+\.[0-9]+-RC[0-9]+-bin-[0-9]{8}-[a-zA-Z0-9]{7}-NIGHTLY" <<< "${{ env.THISBUILD_VERSION }}"; then + echo "Version used by compiler to publish nightly release does not match expected pattern" + exit 1 + fi + - name: Check whether not yet published id: not_yet_published continue-on-error: true @@ -676,7 +685,7 @@ jobs: nightly_documentation: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -686,14 +695,6 @@ jobs: if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'scala/scala3'" env: NIGHTLYBUILD: yes - DOTTY_WEBSITE_BOT_TOKEN: ${{ secrets.BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/lampepfl/dotty-website - # Currently unused token, no need to deploy anything to docs.scala-lang - # DOCS_SCALALANG_BOT_TOKEN: ${{ secrets.DOCS_SCALALANG_BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/scala/docs.scala-lang - steps: - name: Reset existing repo run: | @@ -717,26 +718,26 @@ jobs: git config --global --add safe.directory /__w/scala3/scala3 ./project/scripts/genDocs -doc-snapshot - - name: Deploy Website to dotty-website + - name: Deploy Website to https://dotty.epfl.ch uses: peaceiris/actions-gh-pages@v4 with: - personal_token: ${{ env.DOTTY_WEBSITE_BOT_TOKEN }} + personal_token: ${{ secrets.DOTTYBOT_TOKEN }} publish_dir: docs/_site - external_repository: lampepfl/dotty-website - publish_branch: gh-pages + external_repository: scala/dotty.epfl.ch + publish_branch: main publish_release: permissions: - contents: write # for actions/create-release to create a release + contents: write # for GH CLI to create a release runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] + needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8, build-sdk-package, build-msi-package] if: "github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')" @@ -769,230 +770,91 @@ jobs: - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true + # Extract the release tag - name: Extract the release tag run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - # BUILD THE SDKs - - name: Build and pack the SDK (universal) - run : | - ./project/scripts/sbt dist/packArchive - sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - - name: Build and pack the SDK (linux x86-64) - run : | - ./project/scripts/sbt dist-linux-x86_64/packArchive - sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (linux aarch64) - run : | - ./project/scripts/sbt dist-linux-aarch64/packArchive - sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (mac x86-64) - run : | - ./project/scripts/sbt dist-mac-x86_64/packArchive - sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (mac aarch64) + + - name: Check compiler version + shell: bash run : | - ./project/scripts/sbt dist-mac-aarch64/packArchive - sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (win x86-64) + version=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) + echo "This build version: ${version}" + if [ "${version}" != "${{ env.RELEASE_TAG }}" ]; then + echo "Compiler version for this build '${version}', does not match tag: ${{ env.RELEASE_TAG }}" + exit 1 + fi + + - name: Prepare the SDKs + shell: bash run : | - ./project/scripts/sbt dist-win-x86_64/packArchive - sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + prepareSDK() { + distroSuffix="$1" + sbtProject="$2" + distDir="$3" + + # Build binaries + ./project/scripts/sbt "all ${sbtProject}/Universal/packageBin ${sbtProject}/Universal/packageZipTarball" + + artifactName="scala3-${{ env.RELEASE_TAG }}${distroSuffix}" + + # Caluclate SHA for each of archive files + for file in "${artifactName}.zip" "${artifactName}.tar.gz"; do + mv ${distDir}/target/universal/$file $file + sha256sum "${file}" > "${file}.sha256" + done + } + prepareSDK "" "dist" "./dist/" + prepareSDK "-aarch64-pc-linux" "dist-linux-aarch64" "./dist/linux-aarch64/" + prepareSDK "-x86_64-pc-linux" "dist-linux-x86_64" "./dist/linux-x86_64/" + prepareSDK "-aarch64-apple-darwin" "dist-mac-aarch64" "./dist/mac-aarch64/" + prepareSDK "-x86_64-apple-darwin" "dist-mac-x86_64" "./dist/mac-x86_64/" + prepareSDK "-x86_64-pc-win32" "dist-win-x86_64" "./dist/win-x86_64/" + + - name: Download MSI package + uses: actions/download-artifact@v4 + with: + name: scala.msi + path: . + - name: Prepare MSI package + shell: bash + run: | + msiInstaller="scala3-${{ env.RELEASE_TAG }}.msi" + mv scala.msi "${msiInstaller}" + sha256sum "${msiInstaller}" > "${msiInstaller}.sha256" + + - name: Install GH CLI + uses: dev-hanz-ops/install-gh-cli-action@v0.2.0 + with: + gh-cli-version: 2.59.0 + # Create the GitHub release - name: Create GitHub Release - id: create_gh_release - uses: actions/create-release@latest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token - with: - tag_name: ${{ github.ref }} - release_name: ${{ github.ref }} - body_path: ./changelogs/${{ env.RELEASE_TAG }}.md - draft: true - prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - - name: Upload zip archive to GitHub Release (universal) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip - asset_name: scala3-${{ env.RELEASE_TAG }}.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (universal) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (linux x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (linux aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (mac x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (mac aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (win x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (win x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_content_type: application/gzip - - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/sha256sum.txt - asset_name: sha256sum.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-linux.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-pc-linux.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-apple-darwin.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-apple-darwin.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-win32.txt - asset_content_type: text/plain + shell: bash + run: | + git config --global --add safe.directory /__w/scala3/scala3 + gh release create \ + --draft \ + --title "${{ env.RELEASE_TAG }}" \ + --notes-file ./changelogs/${{ env.RELEASE_TAG }}.md \ + --latest=${{ !contains(env.RELEASE_TAG, '-RC') }} \ + --prerelease=${{ contains(env.RELEASE_TAG, '-RC') }} \ + --verify-tag ${{ env.RELEASE_TAG }} \ + scala3-${{ env.RELEASE_TAG }}*.zip \ + scala3-${{ env.RELEASE_TAG }}*.tar.gz \ + scala3-${{ env.RELEASE_TAG }}*.sha256 \ + scala3-${{ env.RELEASE_TAG }}.msi - name: Publish Release - run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" + run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleUpload" open_issue_on_failure: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 needs: [nightly_documentation, test_windows_full] # The `failure()` expression is true iff at least one of the dependencies # of this job (including transitive dependencies) has failed. @@ -1008,3 +870,45 @@ jobs: WORKFLOW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: filename: .github/workflows/issue_nightly_failed.md + + build-msi-package: + uses: ./.github/workflows/build-msi.yml + if : + (github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]')) || + (github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')) + + test-msi-package: + uses: ./.github/workflows/test-msi.yml + needs: [build-msi-package] + with: + # Ensure that version starts with prefix 3. + # In the future it can be adapted to compare with git tag or version set in the project/Build.scala + version: "3." + java-version: 8 + + build-sdk-package: + uses: ./.github/workflows/build-sdk.yml + if: + (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]')) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3') || + (github.event_name == 'schedule' && github.repository == 'scala/scala3') || + github.event_name == 'push' || + github.event_name == 'merge_group' + with: + java-version: 8 + + build-chocolatey-package: + uses: ./.github/workflows/build-chocolatey.yml + needs: [ build-sdk-package ] + with: + version: 3.6.0-local # TODO: FIX THIS + url : https://api.github.com/repos/scala/scala3/actions/artifacts/${{ needs.build-sdk-package.outputs.win-x86_64-id }}/zip + digest : ${{ needs.build-sdk-package.outputs.win-x86_64-digest }} + + test-chocolatey-package: + uses: ./.github/workflows/test-chocolatey.yml + with: + version : 3.6.0-local # TODO: FIX THIS + java-version: 8 + if: github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_chocolatey]') + needs: [ build-chocolatey-package ] diff --git a/.github/workflows/dependency-graph.yml b/.github/workflows/dependency-graph.yml index e96c3efbc8aa..6a3f8174b2d7 100644 --- a/.github/workflows/dependency-graph.yml +++ b/.github/workflows/dependency-graph.yml @@ -9,4 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: scalacenter/sbt-dependency-submission@v2 + - uses: sbt/setup-sbt@v1 + - uses: scalacenter/sbt-dependency-submission@v3 + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml index 786785eaa4a2..d79f4d029a77 100644 --- a/.github/workflows/language-reference.yaml +++ b/.github/workflows/language-reference.yaml @@ -36,6 +36,7 @@ jobs: distribution: 'temurin' java-version: 17 cache: 'sbt' + - uses: sbt/setup-sbt@v1 - name: Generate reference documentation and test links run: | @@ -43,6 +44,8 @@ jobs: ./project/scripts/sbt "scaladoc/generateReferenceDocumentation --no-regenerate-expected-links" ./project/scripts/docsLinksStability ./scaladoc/output/reference ./project/scripts/expected-links/reference-expected-links.txt cd .. + env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} - name: Push changes to scala3-reference-docs if: github.event_name == 'push' diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml index 818e3b72b06b..4ee07e4bfcc9 100644 --- a/.github/workflows/launchers.yml +++ b/.github/workflows/launchers.yml @@ -3,10 +3,15 @@ on: pull_request: workflow_dispatch: +env: + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} + jobs: linux-x86_64: name: Deploy and Test on Linux x64 architecture runs-on: ubuntu-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -15,6 +20,7 @@ jobs: java-version: '17' distribution: 'temurin' cache: 'sbt' + - uses: sbt/setup-sbt@v1 - name: Build and test launcher command run: ./project/scripts/native-integration/bashTests env: @@ -32,9 +38,7 @@ jobs: java-version: '17' distribution: 'temurin' cache: 'sbt' - # https://github.com/actions/runner-images/issues/9369 - - name: Install sbt - run: brew install sbt + - uses: sbt/setup-sbt@v1 - name: Build and test launcher command run: ./project/scripts/native-integration/bashTests env: @@ -43,6 +47,8 @@ jobs: mac-x86_64: name: Deploy and Test on Mac x64 architecture runs-on: macos-13 + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -51,9 +57,7 @@ jobs: java-version: '17' distribution: 'temurin' cache: 'sbt' - # https://github.com/actions/runner-images/issues/9369 - - name: Install sbt - run: brew install sbt + - uses: sbt/setup-sbt@v1 - name: Build and test launcher command run: ./project/scripts/native-integration/bashTests env: @@ -62,6 +66,8 @@ jobs: mac-aarch64: name: Deploy and Test on Mac ARM64 architecture runs-on: macos-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -70,9 +76,7 @@ jobs: java-version: '17' distribution: 'temurin' cache: 'sbt' - # https://github.com/actions/runner-images/issues/9369 - - name: Install sbt - run: brew install sbt + - uses: sbt/setup-sbt@v1 - name: Build and test launcher command run: ./project/scripts/native-integration/bashTests env: @@ -81,6 +85,8 @@ jobs: win-x86_64: name: Deploy and Test on Windows x64 architecture runs-on: windows-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') ) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3' ) steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -89,8 +95,9 @@ jobs: java-version: '17' distribution: 'temurin' cache: 'sbt' + - uses: sbt/setup-sbt@v1 - name: Build the launcher command - run: sbt "dist-win-x86_64/pack" + run: sbt "dist-win-x86_64/Universal/stage" - name: Run the launcher command tests run: './project/scripts/native-integration/winTests.bat' shell: cmd diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 9c3405235b31..24d2329ed9da 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.3.0 + - uses: VirtusLab/scala-cli-setup@v1.5.3 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} diff --git a/.github/workflows/publish-chocolatey.yml b/.github/workflows/publish-chocolatey.yml new file mode 100644 index 000000000000..3b31728a50ba --- /dev/null +++ b/.github/workflows/publish-chocolatey.yml @@ -0,0 +1,39 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO CHOCOLATEY ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO CHOCOLATEY THE MSI ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE NAME OF THE MSI WITH THE ACTUAL BUILD ### +### - WE SHOULD KEEP IN SYNC THE URL OF THE RELEASE ### +### - IT ASSUMES THAT THE `build-chocolatey` WORKFLOW WAS EXECUTED BEFORE ### +################################################################################################### + + +name: Publish Scala to Chocolatey +run-name: Publish Scala ${{ inputs.version }} to Chocolatey + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + # Connect to https://community.chocolatey.org/profiles/scala + # Accessible via https://community.chocolatey.org/account + API-KEY: + required: true + +jobs: + publish: + runs-on: windows-latest + steps: + - name: Fetch the Chocolatey package from GitHub + uses: actions/download-artifact@v4 + with: + name: scala.nupkg + - name: Publish the package to Chocolatey + run: choco push scala.nupkg --source https://push.chocolatey.org/ --api-key ${{ secrets.API-KEY }} + \ No newline at end of file diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index 6f10ac128b6e..e47c95d01f19 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: hamzaremmal/sdkman-release-action@4cb6c8cf99cfdf0ed5de586d6b38500558737e65 + - uses: sdkman/sdkman-release-action@1f2d4209b4f5a38721d4ae20014ea8e1689d869e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-latest needs: publish steps: - - uses: hamzaremmal/sdkman-default-action@f312ff69dec7c4f83b060c3df90df7ed19e2d70e + - uses: sdkman/sdkman-default-action@b3f991bd109e40155af1b13a4c6fc8e8ccada65e with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} diff --git a/.github/workflows/publish-winget.yml b/.github/workflows/publish-winget.yml new file mode 100644 index 000000000000..03ebc5d0fa7d --- /dev/null +++ b/.github/workflows/publish-winget.yml @@ -0,0 +1,36 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO WINGET ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH THE MSI TO WINGET ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE https://github.com/dottybot/winget-pkgs REPOSITORY ### +################################################################################################### + + +name: Publish Scala to winget +run-name: Publish Scala ${{ inputs.version }} to winget + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + DOTTYBOT-TOKEN: + required: true + +jobs: + publish: + runs-on: windows-latest + steps: + - uses: vedantmgoyal9/winget-releaser@b87a066d9e624db1394edcd947f8c4e5a7e30cd7 + with: + identifier : Scala.Scala.3 + version : ${{ inputs.version }} + installers-regex: '\.msi$' + release-tag : ${{ inputs.version }} + fork-user : dottybot + token : ${{ secrets.DOTTYBOT-WINGET-TOKEN }} \ No newline at end of file diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index 4b75dd1b737d..ab921ec588d2 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -29,5 +29,44 @@ jobs: secrets: CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} - + + publish-winget: + uses: ./.github/workflows/publish-winget.yml + with: + version: ${{ inputs.version }} + secrets: + DOTTYBOT-TOKEN: ${{ secrets.DOTTYBOT_WINGET_TOKEN }} + + compute-digest: + runs-on: ubuntu-latest + outputs: + digest: ${{ steps.digest.outputs.digest }} + steps: + - name: Compute the SHA256 of scala3-${{ inputs.version }}-x86_64-pc-win32.zip in GitHub Release + id: digest + run: | + curl -o artifact.zip -L https://github.com/scala/scala3/releases/download/${{ inputs.version }}/scala3-${{ inputs.version }}-x86_64-pc-win32.zip + echo "digest=$(sha256sum artifact.zip | cut -d " " -f 1)" >> "$GITHUB_OUTPUT" + + build-chocolatey: + uses: ./.github/workflows/build-chocolatey.yml + needs: compute-digest + with: + version: ${{ inputs.version }} + url : 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}/scala3-${{ inputs.version }}-x86_64-pc-win32.zip' + digest : ${{ needs.compute-digest.outputs.digest }} + test-chocolatey: + uses: ./.github/workflows/test-chocolatey.yml + needs: build-chocolatey + with: + version : ${{ inputs.version }} + java-version: 8 + publish-chocolatey: + uses: ./.github/workflows/publish-chocolatey.yml + needs: [ build-chocolatey, test-chocolatey ] + with: + version: ${{ inputs.version }} + secrets: + API-KEY: ${{ secrets.CHOCOLATEY_KEY }} + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 98ce94718fe5..d2e3071e765b 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -16,6 +16,7 @@ jobs: build: env: AZURE_STORAGE_SAS_TOKEN: ${{ secrets.AZURE_STORAGE_SAS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} runs-on: ubuntu-latest if: "github.event_name == 'merge_group' || ( github.event_name == 'pull_request' @@ -36,6 +37,7 @@ jobs: java-version: 17 cache: 'sbt' + - uses: sbt/setup-sbt@v1 - name: Compile and test scala3doc-js run: ./project/scripts/sbt scaladoc-js-main/test diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml index a639c80bbda9..ab5f2b3d2fe1 100644 --- a/.github/workflows/spec.yml +++ b/.github/workflows/spec.yml @@ -16,6 +16,10 @@ env: jobs: specification: runs-on: ubuntu-latest + if: (github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]')) || + (github.event_name == 'workflow_dispatch' && github.repository == 'scala/scala3') || + github.event_name == 'push' || + github.event_name == 'merge_group' defaults: run: working-directory: ./docs/_spec diff --git a/.github/workflows/test-chocolatey.yml b/.github/workflows/test-chocolatey.yml new file mode 100644 index 000000000000..b6ca9bf74b12 --- /dev/null +++ b/.github/workflows/test-chocolatey.yml @@ -0,0 +1,51 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO TEST SCALA WITH CHOCOLATEY ### +### HOW TO USE: ### +### ### +### NOTE: ### +### ### +################################################################################################### + +name: Test 'scala' Chocolatey Package +run-name: Test 'scala' (${{ inputs.version }}) Chocolatey Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + java-version: + required: true + type : string + +env: + CHOCOLATEY-REPOSITORY: chocolatey-pkgs + DOTTY_CI_INSTALLATION: ${{ secrets.GITHUB_TOKEN }} + +jobs: + test: + runs-on: windows-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + - name: Download the 'nupkg' from GitHub Artifacts + uses: actions/download-artifact@v4 + with: + name: scala.nupkg + path: ${{ env.CHOCOLATEY-REPOSITORY }} + - name : Install the `scala` package with Chocolatey + run : choco install scala --source "${{ env.CHOCOLATEY-REPOSITORY }}" --pre # --pre since we might be testing non-stable releases + shell: pwsh + - name : Test the `scala` command + run : scala --version + shell: pwsh + - name : Test the `scalac` command + run : scalac --version + - name : Test the `scaladoc` command + run : scaladoc --version + - name : Uninstall the `scala` package + run : choco uninstall scala + \ No newline at end of file diff --git a/.github/workflows/test-msi.yml b/.github/workflows/test-msi.yml new file mode 100644 index 000000000000..1299c3d55061 --- /dev/null +++ b/.github/workflows/test-msi.yml @@ -0,0 +1,77 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO TEST SCALA WITH MSI RUNNER ### +### HOW TO USE: ### +### Provide optional `version` to test if installed binaries are installed with ### +### correct Scala version. ### +### NOTE: Requires `scala.msi` artifact uploaded within the same run ### +### ### +################################################################################################### + +name: Test 'scala' MSI Package +run-name: Test 'scala' (${{ inputs.version }}) MSI Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + java-version: + required: true + type : string + +jobs: + test: + runs-on: windows-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + - name: Download MSI artifact + uses: actions/download-artifact@v4 + with: + name: scala.msi + path: . + + # Run the MSI installer + # During normal installation msiexec would modify the PATH automatically. + # However, it seems not to work in GH Actions. Append the PATH manually instead. + - name: Install Scala Runner + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/I "scala.msi" /L*V "install.log" /qb' -Wait + Get-Content 'install.log' + Add-Content $env:GITHUB_PATH "C:\Program Files (x86)\scala\bin" + + # Run tests to ensure the Scala Runner was installed and works + - name: Test Scala Runner + shell: pwsh + run: | + scala --version + if (-not (scala --version | Select-String "Scala version \(default\): ${{ inputs.version }}")) { + Write-Host "Invalid Scala version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scalac` command + shell: pwsh + run: | + scalac --version + if (-not (scalac --version | Select-String "Scala compiler version ${{ inputs.version }}")) { + Write-Host "Invalid scalac version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scaladoc` command + shell: pwsh + run: | + scaladoc --version + if (-not (scaladoc --version | Select-String "Scaladoc version ${{ inputs.version }}")) { + Write-Host "Invalid scaladoc version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Uninstall the `scala` package + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/X "scala.msi" /L*V "uninstall.log" /qb' -Wait + Get-Content 'uninstall.log' + \ No newline at end of file diff --git a/.gitignore b/.gitignore index 0fc39ecbae5b..7ee4342439be 100644 --- a/.gitignore +++ b/.gitignore @@ -64,6 +64,8 @@ testlogs/ local/ compiler/test/debug/Gen.jar +/bin/.cp + before-pickling.txt after-pickling.txt bench/compile.txt diff --git a/.idea/icon.png b/.idea/icon.png new file mode 100644 index 000000000000..8280fd4bfc3f Binary files /dev/null and b/.idea/icon.png differ diff --git a/NOTICE.md b/NOTICE.md index fd931397a500..b3f97913df2f 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -1,6 +1,6 @@ -Dotty (https://dotty.epfl.ch) -Copyright 2012-2024 EPFL -Copyright 2012-2024 Lightbend, Inc. +Scala 3 (https://www.scala-lang.org) +Copyright 2012-2025 EPFL +Copyright 2012-2025 Lightbend, Inc. dba Akka Licensed under the Apache License, Version 2.0 (the "License"): http://www.apache.org/licenses/LICENSE-2.0 @@ -11,12 +11,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -The dotty compiler frontend has been developed since November 2012 by Martin -Odersky. It is expected and hoped for that the list of contributors to the -codebase will grow quickly. Dotty draws inspiration and code from the original -Scala compiler "nsc", which is developed at scala/scala [1]. +The Scala 3 compiler is also known as Dotty. The Dotty compiler +frontend has been developed since November 2012 by Martin Odersky. It +is expected and hoped for that the list of contributors to the +codebase will grow quickly. Dotty draws inspiration and code from the +original Scala 2 compiler "nsc", which is still developed at scala/scala [1]. -The majority of the dotty codebase is new code, with the exception of the +The majority of the Dotty codebase is new code, with the exception of the components mentioned below. We have for each component tried to come up with a list of the original authors in the scala/scala [1] codebase. Apologies if some major authors were omitted by oversight. @@ -28,7 +29,7 @@ major authors were omitted by oversight. * dotty.tools.dotc.classpath: The classpath handling is taken mostly as is from scala/scala [1]. The original authors were Grzegorz Kossakowski, - Michał Pociecha, Lukas Rytz, Jason Zaugg and others. + Michał Pociecha, Lukas Rytz, Jason Zaugg and others. * dotty.tools.dotc.config: The configuration components were adapted and extended from scala/scala [1]. The original sources were authored by Paul diff --git a/README.md b/README.md index 7a2bda3f8073..2146cda7be23 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ Dotty ===== [![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=main)](https://github.com/scala/scala3/actions?query=branch%3Amain) [![Join the chat at https://discord.com/invite/scala](https://img.shields.io/discord/632150470000902164)](https://discord.com/invite/scala) +[![Revved up by Develocity](https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A)](https://develocity.scala-lang.org) * [Documentation](https://docs.scala-lang.org/scala3/) @@ -11,7 +12,7 @@ To try it in your project see also the [Getting Started User Guide](https://docs Building a Local Distribution ============================= -1. `sbt dist/packArchive` +1. `sbt dist/Universal/packageBin` 2. Find the newly-built distributions in `dist/target/` Code of Conduct diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000000..ddc7be95bf71 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,5 @@ +# Security Policy + +The details about the security policy of the Scala Programming Language organisation can be found in [https://scala-lang.org/security](https://scala-lang.org/security). + +For any additional information related to our security policy, please contact [security@scala-lang.org](mailto:security@scala-lang.org). diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala new file mode 100644 index 000000000000..310a1745171f --- /dev/null +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/AnnotationsMappingBenchmark.scala @@ -0,0 +1,71 @@ +package dotty.tools.benchmarks + +import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Level, Measurement, Mode as JMHMode, Param, Scope, Setup, State, Warmup} +import java.util.concurrent.TimeUnit.SECONDS + +import dotty.tools.dotc.{Driver, Run, Compiler} +import dotty.tools.dotc.ast.{tpd, TreeTypeMap}, tpd.{Apply, Block, Tree, TreeAccumulator, TypeApply} +import dotty.tools.dotc.core.Annotations.{Annotation, ConcreteAnnotation, EmptyAnnotation} +import dotty.tools.dotc.core.Contexts.{ContextBase, Context, ctx, withMode} +import dotty.tools.dotc.core.Mode +import dotty.tools.dotc.core.Phases.Phase +import dotty.tools.dotc.core.Symbols.{defn, mapSymbols, Symbol} +import dotty.tools.dotc.core.Types.{AnnotatedType, NoType, SkolemType, TermRef, Type, TypeMap} +import dotty.tools.dotc.parsing.Parser +import dotty.tools.dotc.typer.TyperPhase + +/** Measures the performance of mapping over annotated types. + * + * Run with: scala3-bench-micro / Jmh / run AnnotationsMappingBenchmark + */ +@Fork(value = 4) +@Warmup(iterations = 4, time = 1, timeUnit = SECONDS) +@Measurement(iterations = 4, time = 1, timeUnit = SECONDS) +@BenchmarkMode(Array(JMHMode.Throughput)) +@State(Scope.Thread) +class AnnotationsMappingBenchmark: + var tp: Type = null + var specialIntTp: Type = null + var context: Context = null + var typeFunction: Context ?=> Type => Type = null + var typeMap: TypeMap = null + + @Param(Array("v1", "v2", "v3", "v4")) + var valName: String = null + + @Param(Array("id", "mapInts")) + var typeFunctionName: String = null + + @Setup(Level.Iteration) + def setup(): Unit = + val testPhase = + new Phase: + final override def phaseName = "testPhase" + final override def run(using ctx: Context): Unit = + val pkg = ctx.compilationUnit.tpdTree.symbol + tp = pkg.requiredClass("Test").requiredValueRef(valName).underlying + specialIntTp = pkg.requiredClass("Test").requiredType("SpecialInt").typeRef + context = ctx + + val compiler = + new Compiler: + private final val baseCompiler = new Compiler() + final override def phases = List(List(Parser()), List(TyperPhase()), List(testPhase)) + + val driver = + new Driver: + final override def newCompiler(using Context): Compiler = compiler + + driver.process(Array("-classpath", System.getProperty("BENCH_CLASS_PATH"), "tests/someAnnotatedTypes.scala")) + + typeFunction = + typeFunctionName match + case "id" => tp => tp + case "mapInts" => tp => (if tp frozen_=:= defn.IntType then specialIntTp else tp) + case _ => throw new IllegalArgumentException(s"Unknown type function: $typeFunctionName") + + typeMap = + new TypeMap(using context): + final override def apply(tp: Type): Type = typeFunction(mapOver(tp)) + + @Benchmark def applyTypeMap() = typeMap.apply(tp) diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala index fb2cedbb7d41..12713b297759 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/ContendedInitialization.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -16,12 +17,12 @@ import java.util.concurrent.{Executors, ExecutorService} class ContendedInitialization { @Param(Array("2000000", "5000000")) - var size: Int = _ + var size: Int = uninitialized @Param(Array("2", "4", "8")) - var nThreads: Int = _ + var nThreads: Int = uninitialized - var executor: ExecutorService = _ + var executor: ExecutorService = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala index d413458d0049..34bd652cbd2d 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccess.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccess { - var holder: LazyHolder = _ + var holder: LazyHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala index 8c75f6bb11a2..4e044dcaee52 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessAny.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyAnyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessAny { - var holder: LazyAnyHolder = _ + var holder: LazyAnyHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala index a9fecae6281e..4c1a0c6d7417 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessGeneric.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyGenericHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessGeneric { - var holder: LazyGenericHolder[String] = _ + var holder: LazyGenericHolder[String] = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala index 2a115ad63496..6ff8622a82e8 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessInt.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations.* import org.openjdk.jmh.infra.Blackhole import LazyVals.LazyIntHolder @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessInt { - var holder: LazyIntHolder = _ + var holder: LazyIntHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala index 4f3c75fd920b..9416bac36c33 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessMultiple.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessMultiple { - var holders: Array[LazyHolder] = _ + var holders: Array[LazyHolder] = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala index e6c6cd5eb2e3..af751d782010 100644 --- a/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala +++ b/bench-micro/src/main/scala/dotty/tools/benchmarks/lazyvals/InitializedAccessString.scala @@ -1,5 +1,6 @@ package dotty.tools.benchmarks.lazyvals +import compiletime.uninitialized import org.openjdk.jmh.annotations._ import LazyVals.LazyStringHolder import org.openjdk.jmh.infra.Blackhole @@ -14,7 +15,7 @@ import java.util.concurrent.TimeUnit @State(Scope.Benchmark) class InitializedAccessString { - var holder: LazyStringHolder = _ + var holder: LazyStringHolder = uninitialized @Setup def prepare: Unit = { diff --git a/bench-micro/tests/someAnnotatedTypes.scala b/bench-micro/tests/someAnnotatedTypes.scala new file mode 100644 index 000000000000..8b12d4f7c2c6 --- /dev/null +++ b/bench-micro/tests/someAnnotatedTypes.scala @@ -0,0 +1,28 @@ +class Test: + class FlagAnnot extends annotation.StaticAnnotation + class StringAnnot(val s: String) extends annotation.StaticAnnotation + class LambdaAnnot(val f: Int => Boolean) extends annotation.StaticAnnotation + + type SpecialInt <: Int + + val v1: Int @FlagAnnot = 42 + + val v2: Int @StringAnnot("hello") = 42 + + val v3: Int @LambdaAnnot(it => it == 42) = 42 + + val v4: Int @LambdaAnnot(it => { + def g(x: Int, y: Int) = x - y + 5 + g(it, 7) * 2 == 80 + }) = 42 + + /*val v5: Int @LambdaAnnot(it => { + class Foo(x: Int): + def xPlus10 = x + 10 + def xPlus20 = x + 20 + def xPlus(y: Int) = x + y + val foo = Foo(it) + foo.xPlus10 - foo.xPlus20 + foo.xPlus(30) == 62 + }) = 42*/ + + def main(args: Array[String]): Unit = ??? diff --git a/bin/common b/bin/common index 37b2ebd1ff93..ad179412f590 100755 --- a/bin/common +++ b/bin/common @@ -13,14 +13,14 @@ shift # Mutates $@ by deleting the first element ($1) source "$ROOT/bin/common-platform" # Marker file used to obtain the date of latest call to sbt-back -version="$ROOT/$DIST_DIR/target/pack/VERSION" +version="$ROOT/$DIST_DIR/target/universal/stage/VERSION" # Create the target if absent or if file changed in ROOT/compiler new_files="$(find "$ROOT/compiler" \( -iname "*.scala" -o -iname "*.java" \) -newer "$version" 2> /dev/null)" if [ ! -f "$version" ] || [ ! -z "$new_files" ]; then echo "Building Dotty..." - (cd $ROOT && sbt "$DIST_PROJECT/pack") + (cd $ROOT && sbt "$DIST_PROJECT/Universal/stage") fi -"$ROOT/$DIST_DIR/target/pack/bin/$target" "$@" +"$ROOT/$DIST_DIR/target/universal/stage/bin/$target" "$@" diff --git a/bin/commonQ b/bin/commonQ new file mode 100755 index 000000000000..a25d52db3d90 --- /dev/null +++ b/bin/commonQ @@ -0,0 +1,6 @@ +cp=$(cat $ROOT/bin/.cp) 2> /dev/null + +if [[ "$cp" == "" ]]; then + echo "run 'sbt buildQuick' first" + exit 1 +fi diff --git a/bin/scalaQ b/bin/scalaQ new file mode 100755 index 000000000000..c14a2f0372ff --- /dev/null +++ b/bin/scalaQ @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." +. $ROOT/bin/commonQ + +java -cp $cp dotty.tools.MainGenericRunner -usejavacp "$@" diff --git a/bin/scalacQ b/bin/scalacQ new file mode 100755 index 000000000000..f3dafba9fe27 --- /dev/null +++ b/bin/scalacQ @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." +. $ROOT/bin/commonQ + +java -cp $cp dotty.tools.MainGenericCompiler -usejavacp "$@" diff --git a/build.sbt b/build.sbt index f357044c91ca..9d29bfcb6d6a 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,6 @@ val `dist-linux-aarch64` = Build.`dist-linux-aarch64` val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` -val `scala3-presentation-compiler-bootstrapped` = Build.`scala3-presentation-compiler-bootstrapped` val sjsSandbox = Build.sjsSandbox val sjsJUnitTests = Build.sjsJUnitTests diff --git a/changelogs/3.6.3-RC1.md b/changelogs/3.6.3-RC1.md new file mode 100644 index 000000000000..201201fbf1bc --- /dev/null +++ b/changelogs/3.6.3-RC1.md @@ -0,0 +1,179 @@ +# Highlights of the release + +- Scala 2 forwardport: `-Yprofile-trace` [#19897](https://github.com/scala/scala3/pull/19897) + +# Other changes and fixes + +## Annotations + +- Fix Java parsing of annotations on qualified types [#21867](https://github.com/scala/scala3/pull/21867) +- Consider all arguments in Annotations.refersToParamOf [#22001](https://github.com/scala/scala3/pull/22001) + +## Backend + +- Flag class file collision as error [#19332](https://github.com/scala/scala3/pull/19332) + +## Compiler Phases + +- Fix #21939: Update names and descriptions for cc and setup phases [#21942](https://github.com/scala/scala3/pull/21942) + +## Experimental: Explicit Nulls + +- Improve warning for wildcard matching only null under the explicit nulls flag (scala#21577) [#21623](https://github.com/scala/scala3/pull/21623) +- Fix warning message for matching on redundant nulls [#21850](https://github.com/scala/scala3/pull/21850) + +## Experimental: Capture Checking + +- Fix #21868, #21869, and #21870: handle CapsOf in more places [#21875](https://github.com/scala/scala3/pull/21875) +- Consolidate CC [#21863](https://github.com/scala/scala3/pull/21863) +- Add path support for capture checking [#21445](https://github.com/scala/scala3/pull/21445) + +## Experimentals + +- Replace symbol traversal with tree traversal when finding top level experimentals [#21827](https://github.com/scala/scala3/pull/21827) + +## Extension Methods + +- Nowarn extension matching nonpublic member [#21825](https://github.com/scala/scala3/pull/21825) + +## Implicits + +- Apply implicit conversion from derived Conversion instance defined as implicit rather than given [#21785](https://github.com/scala/scala3/pull/21785) + +## Imports + +- Allow imports nested in packagings to shadow [#21539](https://github.com/scala/scala3/pull/21539) + +## Inline + +- Avoid using the current denotation in NamedType.disambiguate [#21414](https://github.com/scala/scala3/pull/21414) +- Drop phase.isTyper use in isLegalPrefix/asf [#21954](https://github.com/scala/scala3/pull/21954) +- Fix for macro annotation that resolves macro-based implicit crashing the compiler [#20353](https://github.com/scala/scala3/pull/20353) +- Allow macro annotations to recover from suspension [#21969](https://github.com/scala/scala3/pull/21969) + +## Linting + +- Disallow open modifier on objects [#21922](https://github.com/scala/scala3/pull/21922) +- Allow discarding "Discarded non-Unit" warnings with `: Unit` [#21927](https://github.com/scala/scala3/pull/21927) + +## Opaque Types + +- Fix pkg obj prefix of opaque tp ext meth [#21527](https://github.com/scala/scala3/pull/21527) + +## Parser + +- Fix: don't consider `into` as a soft-modifier [#21924](https://github.com/scala/scala3/pull/21924) + +## Pattern Matching + +- Drop inaccessible subclasses from refineUsingParent [#21799](https://github.com/scala/scala3/pull/21799) +- (Re-)Drop inaccessible subclasses from refineUsingParent [#21930](https://github.com/scala/scala3/pull/21930) +- Fix use of class terms in match analysis [#21848](https://github.com/scala/scala3/pull/21848) +- Don't project nested wildcard patterns to nullable [#21934](https://github.com/scala/scala3/pull/21934) +- Fix provablyDisjoint handling enum constants with mixins [#21876](https://github.com/scala/scala3/pull/21876) +- Do not consider uninhabited constructors when performing exhaustive match checking [#21750](https://github.com/scala/scala3/pull/21750) + +## Presentation Compiler + +- Update mtags to 1.4.1 and backport remaining changes [#21859](https://github.com/scala/scala3/pull/21859) +- Backport changes for the presentation compiler from Metals [#21756](https://github.com/scala/scala3/pull/21756) + +## Pickling + +- Avoid orphan param from default arg [#21824](https://github.com/scala/scala3/pull/21824) +- Make sure definition tree has the defined symbol [#21851](https://github.com/scala/scala3/pull/21851) + +## REPL + +- Allow top-level opaque type definitions in REPL [#21753](https://github.com/scala/scala3/pull/21753) + +## Scaladoc + +- Fix scaladoc TastyInspector regressions [#21716](https://github.com/scala/scala3/pull/21716) +- Bring back the fix for scaladoc TastyInspector regressions [#21929](https://github.com/scala/scala3/pull/21929) + +## Standard Library + +- Combine cases of `Tuple.Zip` disjoint from `(h1 *: t1, h2 *: t2)` [#21287](https://github.com/scala/scala3/pull/21287) + +## Quotes + +- Fix #20471: owners of top-level symbols in cached quoted code being incorrect [#21945](https://github.com/scala/scala3/pull/21945) + +## Reporting + +- Do not warn about expected missing positions in quotes.reflect.Symbol [#21677](https://github.com/scala/scala3/pull/21677) +- Add missing error messages to asserts in QuotesImpl [#21852](https://github.com/scala/scala3/pull/21852) +- Don't point to the compiler backlog when a compiler plugin phase crashes [#21887](https://github.com/scala/scala3/pull/21887) +- Better error message for polytypes wrapping capturing types [#21843](https://github.com/scala/scala3/pull/21843) +- Pretty-print lambdas [#21846](https://github.com/scala/scala3/pull/21846) + +## Scala.js + +- Shade scalajs.ir under dotty.tools [#21765](https://github.com/scala/scala3/pull/21765) + +## Scaladoc + +- Fix scaladoc graph highlight background color in dark mode [#21814](https://github.com/scala/scala3/pull/21814) + +## SemanticDB + +- Extract semanticDB for lifted definitions [#21856](https://github.com/scala/scala3/pull/21856) + +## Transform + +- Fix enclosingClass from returning refinement classes [#21411](https://github.com/scala/scala3/pull/21411) +- Attempt to beta reduce only if parameters and arguments have same shape [#21970](https://github.com/scala/scala3/pull/21970) +- Drop copied parent refinements before generating bytecode [#21733](https://github.com/scala/scala3/pull/21733) + +## Tooling + +- Ensure to escape characters before constructing JSON profile trace [#21872](https://github.com/scala/scala3/pull/21872) + +## Tuples + +- Fix tupleTypeFromSeq for XXL tuples [#21782](https://github.com/scala/scala3/pull/21782) + +## Typer + +- Do not crash when typing a closure with unknown type, since it can occur for erroneous input [#21178](https://github.com/scala/scala3/pull/21178) +- Revert SAM condition to what it was before [#21684](https://github.com/scala/scala3/pull/21684) +- Fix ctx implicits under case unapplySeq [#21748](https://github.com/scala/scala3/pull/21748) +- Avoid erasure/preErasure issues around Any in transformIsInstanceOf [#21647](https://github.com/scala/scala3/pull/21647) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.6.2..3.6.3-RC1` these are: + +``` + 30 Dale Wijnand + 30 Kacper Korban + 27 Wojciech Mazur + 14 noti0na1 + 10 Eugene Flesselle + 10 Hamza Remmal + 10 HarrisL2 + 9 Martin Odersky + 8 Matt Bovel + 7 Jan Chyb + 6 Tomasz Godzik + 4 Jamie Thompson + 2 Friendseeker + 2 Pascal Weisenburger + 2 Seth Tisue + 2 Sébastien Doeraene + 1 Adrien Piquerez + 1 Alden Torres + 1 Alexander + 1 Fengyun Liu + 1 Georgi Krastev + 1 Jentsch + 1 Lunfu Zhong + 1 Michał Pałka + 1 Natsu Kagami + 1 dependabot[bot] + 1 friendseeker + 1 tgodzik +``` diff --git a/changelogs/3.6.3-RC2.md b/changelogs/3.6.3-RC2.md new file mode 100644 index 000000000000..0da2783bd6fe --- /dev/null +++ b/changelogs/3.6.3-RC2.md @@ -0,0 +1,24 @@ +# Backported fixes + +- Fix: update `scala-cli.jar` path [#22274](http://github.com/scala/scala3/pull/22274) +- Nowarn extension matching nonpublic member [#22276](http://github.com/scala/scala3/pull/22276) +- Limit exposure to ConcurrentModificationException when sys props are replaced or mutated [#22275](http://github.com/scala/scala3/pull/22275) +- Refactor: Improve Given search preference warning [#22273](http://github.com/scala/scala3/pull/22273) +- Fix layout of released SDK archives, restore intermiediete top-level directory [#22272](http://github.com/scala/scala3/pull/22272) +- REPL: JLine: follow recommendation to use JNI, not JNA; also JLine 3.27.1 (was 3.27.0) [#22271](http://github.com/scala/scala3/pull/22271) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.6.3-RC1..3.6.3-RC2` these are: + +``` + 4 Wojciech Mazur + 2 João Ferreira + 2 Seth Tisue + 2 Som Snytt + 1 Eugene Yokota + 1 Hamza Remmal + 1 Rui Chen +``` diff --git a/changelogs/3.6.3.md b/changelogs/3.6.3.md new file mode 100644 index 000000000000..2b3d23b75222 --- /dev/null +++ b/changelogs/3.6.3.md @@ -0,0 +1,192 @@ +# Highlights of the release + +- Scala 2 forwardport: `-Yprofile-trace` [#19897](https://github.com/scala/scala3/pull/19897) + +# Other changes and fixes + +## Annotations + +- Fix Java parsing of annotations on qualified types [#21867](https://github.com/scala/scala3/pull/21867) +- Consider all arguments in Annotations.refersToParamOf [#22001](https://github.com/scala/scala3/pull/22001) + +## Backend + +- Flag class file collision as error [#19332](https://github.com/scala/scala3/pull/19332) + +## Compiler Phases + +- Fix #21939: Update names and descriptions for cc and setup phases [#21942](https://github.com/scala/scala3/pull/21942) +- Limit exposure to ConcurrentModificationException when sys props are replaced or mutated [#22275](http://github.com/scala/scala3/pull/22275) + +## Experimental: Explicit Nulls + +- Improve warning for wildcard matching only null under the explicit nulls flag (scala#21577) [#21623](https://github.com/scala/scala3/pull/21623) +- Fix warning message for matching on redundant nulls [#21850](https://github.com/scala/scala3/pull/21850) + +## Experimental: Capture Checking + +- Fix #21868, #21869, and #21870: handle CapsOf in more places [#21875](https://github.com/scala/scala3/pull/21875) +- Consolidate CC [#21863](https://github.com/scala/scala3/pull/21863) +- Add path support for capture checking [#21445](https://github.com/scala/scala3/pull/21445) + +## Experimentals + +- Replace symbol traversal with tree traversal when finding top level experimentals [#21827](https://github.com/scala/scala3/pull/21827) + +## Extension Methods + +- Nowarn extension matching nonpublic member [#21825](https://github.com/scala/scala3/pull/21825) + +## Implicits + +- Apply implicit conversion from derived Conversion instance defined as implicit rather than given [#21785](https://github.com/scala/scala3/pull/21785) + +## Imports + +- Allow imports nested in packagings to shadow [#21539](https://github.com/scala/scala3/pull/21539) + +## Inline + +- Avoid using the current denotation in NamedType.disambiguate [#21414](https://github.com/scala/scala3/pull/21414) +- Drop phase.isTyper use in isLegalPrefix/asf [#21954](https://github.com/scala/scala3/pull/21954) +- Fix for macro annotation that resolves macro-based implicit crashing the compiler [#20353](https://github.com/scala/scala3/pull/20353) +- Allow macro annotations to recover from suspension [#21969](https://github.com/scala/scala3/pull/21969) + +## Linting + +- Disallow open modifier on objects [#21922](https://github.com/scala/scala3/pull/21922) +- Allow discarding "Discarded non-Unit" warnings with `: Unit` [#21927](https://github.com/scala/scala3/pull/21927) + +## Opaque Types + +- Fix pkg obj prefix of opaque tp ext meth [#21527](https://github.com/scala/scala3/pull/21527) + +## Parser + +- Fix: don't consider `into` as a soft-modifier [#21924](https://github.com/scala/scala3/pull/21924) + +## Pattern Matching + +- Drop inaccessible subclasses from refineUsingParent [#21799](https://github.com/scala/scala3/pull/21799) +- (Re-)Drop inaccessible subclasses from refineUsingParent [#21930](https://github.com/scala/scala3/pull/21930) +- Fix use of class terms in match analysis [#21848](https://github.com/scala/scala3/pull/21848) +- Don't project nested wildcard patterns to nullable [#21934](https://github.com/scala/scala3/pull/21934) +- Fix provablyDisjoint handling enum constants with mixins [#21876](https://github.com/scala/scala3/pull/21876) +- Do not consider uninhabited constructors when performing exhaustive match checking [#21750](https://github.com/scala/scala3/pull/21750) + +## Presentation Compiler + +- Update mtags to 1.4.1 and backport remaining changes [#21859](https://github.com/scala/scala3/pull/21859) +- Backport changes for the presentation compiler from Metals [#21756](https://github.com/scala/scala3/pull/21756) + +## Pickling + +- Avoid orphan param from default arg [#21824](https://github.com/scala/scala3/pull/21824) +- Make sure definition tree has the defined symbol [#21851](https://github.com/scala/scala3/pull/21851) + +## REPL + +- Allow top-level opaque type definitions in REPL [#21753](https://github.com/scala/scala3/pull/21753) +- JLine: follow recommendation to use JNI, not JNA; also JLine 3.27.1 (was 3.27.0) [#22271](http://github.com/scala/scala3/pull/22271) + +## Scaladoc + +- Fix scaladoc TastyInspector regressions [#21716](https://github.com/scala/scala3/pull/21716) +- Bring back the fix for scaladoc TastyInspector regressions [#21929](https://github.com/scala/scala3/pull/21929) +- Fix scaladoc graph highlight background color in dark mode [#21814](https://github.com/scala/scala3/pull/21814) + +## Standard Library + +- Combine cases of `Tuple.Zip` disjoint from `(h1 *: t1, h2 *: t2)` [#21287](https://github.com/scala/scala3/pull/21287) + +## Quotes + +- Fix #20471: owners of top-level symbols in cached quoted code being incorrect [#21945](https://github.com/scala/scala3/pull/21945) + +## Reporting + +- Do not warn about expected missing positions in quotes.reflect.Symbol [#21677](https://github.com/scala/scala3/pull/21677) +- Add missing error messages to asserts in QuotesImpl [#21852](https://github.com/scala/scala3/pull/21852) +- Don't point to the compiler backlog when a compiler plugin phase crashes [#21887](https://github.com/scala/scala3/pull/21887) +- Better error message for polytypes wrapping capturing types [#21843](https://github.com/scala/scala3/pull/21843) +- Pretty-print lambdas [#21846](https://github.com/scala/scala3/pull/21846) +- Nowarn extension matching nonpublic member [#22276](http://github.com/scala/scala3/pull/22276) +- Refactor: Improve Given search preference warning [#22273](http://github.com/scala/scala3/pull/22273) + +## Runner + +- Fix: update `scala-cli.jar` path [#22274](http://github.com/scala/scala3/pull/22274) + +## Releases + +- Fix layout of released SDK archives, restore intermiediete top-level directory [#22272](http://github.com/scala/scala3/pull/22272) + +## Scala.js + +- Shade scalajs.ir under dotty.tools [#21765](https://github.com/scala/scala3/pull/21765) + +## SemanticDB + +- Extract semanticDB for lifted definitions [#21856](https://github.com/scala/scala3/pull/21856) + +## Transform + +- Fix enclosingClass from returning refinement classes [#21411](https://github.com/scala/scala3/pull/21411) +- Attempt to beta reduce only if parameters and arguments have same shape [#21970](https://github.com/scala/scala3/pull/21970) +- Drop copied parent refinements before generating bytecode [#21733](https://github.com/scala/scala3/pull/21733) + +## Tooling + +- Ensure to escape characters before constructing JSON profile trace [#21872](https://github.com/scala/scala3/pull/21872) + +## Tuples + +- Fix tupleTypeFromSeq for XXL tuples [#21782](https://github.com/scala/scala3/pull/21782) + +## Typer + +- Do not crash when typing a closure with unknown type, since it can occur for erroneous input [#21178](https://github.com/scala/scala3/pull/21178) +- Revert SAM condition to what it was before [#21684](https://github.com/scala/scala3/pull/21684) +- Fix ctx implicits under case unapplySeq [#21748](https://github.com/scala/scala3/pull/21748) +- Avoid erasure/preErasure issues around Any in transformIsInstanceOf [#21647](https://github.com/scala/scala3/pull/21647) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.6.2..3.6.3` these are: + +``` + 33 Wojciech Mazur + 30 Dale Wijnand + 30 Kacper Korban + 14 noti0na1 + 11 Hamza Remmal + 10 Eugene Flesselle + 10 HarrisL2 + 9 Martin Odersky + 8 Matt Bovel + 7 Jan Chyb + 6 Tomasz Godzik + 5 Seth Tisue + 4 Jamie Thompson + 2 Friendseeker + 2 João Ferreira + 2 Pascal Weisenburger + 2 Som Snytt + 2 Sébastien Doeraene + 1 Adrien Piquerez + 1 Alden Torres + 1 Alexander + 1 Eugene Yokota + 1 Fengyun Liu + 1 Georgi Krastev + 1 Jentsch + 1 Lunfu Zhong + 1 Michał Pałka + 1 Natsu Kagami + 1 Rui Chen + 1 dependabot[bot] + 1 friendseeker + 1 tgodzik +``` diff --git a/community-build/community-projects/munit b/community-build/community-projects/munit index 5c77d7316fc6..c5d6f474fa0d 160000 --- a/community-build/community-projects/munit +++ b/community-build/community-projects/munit @@ -1 +1 @@ -Subproject commit 5c77d7316fc66adaed64e9532ee0a45a668b01ec +Subproject commit c5d6f474fa0d481e2c29f15d6a67d10ef2099e78 diff --git a/community-build/community-projects/parboiled2 b/community-build/community-projects/parboiled2 index 628127744bde..3fb32f833f8c 160000 --- a/community-build/community-projects/parboiled2 +++ b/community-build/community-projects/parboiled2 @@ -1 +1 @@ -Subproject commit 628127744bde8dc2e01432badd68886a5f722f71 +Subproject commit 3fb32f833f8c6a2fca25474c189efd91ffb65557 diff --git a/community-build/community-projects/scala-collection-compat b/community-build/community-projects/scala-collection-compat index b39b4b64732d..c9d3a8b160a3 160000 --- a/community-build/community-projects/scala-collection-compat +++ b/community-build/community-projects/scala-collection-compat @@ -1 +1 @@ -Subproject commit b39b4b64732d9dd5e0f065e4180f656237ac4444 +Subproject commit c9d3a8b160a35c9915816dd84a1063e18db4a84a diff --git a/community-build/community-projects/scala-stm b/community-build/community-projects/scala-stm index cf204977752a..8d443ab107e7 160000 --- a/community-build/community-projects/scala-stm +++ b/community-build/community-projects/scala-stm @@ -1 +1 @@ -Subproject commit cf204977752af7ec2ca3b50c43f27daa6a628f49 +Subproject commit 8d443ab107e75e809848c2fa3ecd666043171ad5 diff --git a/community-build/community-projects/scala-xml b/community-build/community-projects/scala-xml index 105c3dac8835..0605c07e298c 160000 --- a/community-build/community-projects/scala-xml +++ b/community-build/community-projects/scala-xml @@ -1 +1 @@ -Subproject commit 105c3dac883549eca1182b04fc5a18fe4f5ad51a +Subproject commit 0605c07e298c1bd8758f79d3c790f89db986a6bc diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 index a618330aa808..005c5847ecf9 160000 --- a/community-build/community-projects/specs2 +++ b/community-build/community-projects/specs2 @@ -1 +1 @@ -Subproject commit a618330aa80833787859dae805d02e45d4304c42 +Subproject commit 005c5847ecf9439691505f0628d318b0fed9d341 diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index d60fe2c38848..993e8c8c7a8e 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit d60fe2c38848ef193031c18eab3a14d3306b3761 +Subproject commit 993e8c8c7a8e55be943d63c07c8263c1021add2f diff --git a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala index 6a0c54c4b00b..b3065fefe87f 100644 --- a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala +++ b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala @@ -16,13 +16,12 @@ object CommunityBuildRunner: * and avoid network overhead. See https://github.com/lampepfl/dotty-drone * for more infrastructural details. */ - extension (self: CommunityProject) def run()(using suite: CommunityBuildRunner): Unit = - if self.requiresExperimental && !compilerSupportExperimental then - log(s"Skipping ${self.project} - it needs experimental features unsupported in this build.") - return - self.dependencies.foreach(_.publish()) - self.testOnlyDependencies().foreach(_.publish()) - suite.runProject(self) + extension (self: CommunityProject) + def run()(using suite: CommunityBuildRunner): Unit = + self.dependencies.foreach(_.publish()) + self.testOnlyDependencies().foreach(_.publish()) + suite.runProject(self) + end extension trait CommunityBuildRunner: diff --git a/community-build/src/scala/dotty/communitybuild/Main.scala b/community-build/src/scala/dotty/communitybuild/Main.scala index 852cee46af22..c813f5ff684b 100644 --- a/community-build/src/scala/dotty/communitybuild/Main.scala +++ b/community-build/src/scala/dotty/communitybuild/Main.scala @@ -55,10 +55,7 @@ object Main: Seq("rm", "-rf", destStr).! Files.createDirectory(dest) val (toRun, ignored) = - allProjects.partition( p => - p.docCommand != null - && (!p.requiresExperimental || compilerSupportExperimental) - ) + allProjects.partition(_.docCommand != null) val paths = toRun.map { project => val name = project.project diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index a0444505801a..31c1bb95743c 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -10,9 +10,6 @@ lazy val compilerVersion: String = val file = communitybuildDir.resolve("scala3-bootstrapped.version") new String(Files.readAllBytes(file), UTF_8) -lazy val compilerSupportExperimental: Boolean = - compilerVersion.contains("SNAPSHOT") || compilerVersion.contains("NIGHTLY") - lazy val sbtPluginFilePath: String = // Workaround for https://github.com/sbt/sbt/issues/4395 new File(sys.props("user.home") + "/.sbt/1.0/plugins").mkdirs() @@ -43,7 +40,6 @@ sealed trait CommunityProject: val testOnlyDependencies: () => List[CommunityProject] val binaryName: String val runCommandsArgs: List[String] = Nil - val requiresExperimental: Boolean val environment: Map[String, String] = Map.empty final val projectDir = communitybuildDir.resolve("community-projects").resolve(project) @@ -53,7 +49,6 @@ sealed trait CommunityProject: /** Publish this project to the local Maven repository */ final def publish(): Unit = - // TODO what should this do with .requiresExperimental? if !published then publishDependencies() log(s"Publishing $project") @@ -65,11 +60,6 @@ sealed trait CommunityProject: published = true final def doc(): Unit = - if this.requiresExperimental && !compilerSupportExperimental then - log( - s"Skipping ${this.project} - it needs experimental features unsupported in this build." - ) - return publishDependencies() log(s"Documenting $project") if docCommand eq null then @@ -89,8 +79,7 @@ final case class MillCommunityProject( baseCommand: String, dependencies: List[CommunityProject] = Nil, testOnlyDependencies: () => List[CommunityProject] = () => Nil, - ignoreDocs: Boolean = false, - requiresExperimental: Boolean = false, + ignoreDocs: Boolean = false ) extends CommunityProject: override val binaryName: String = "./mill" override val testCommand = s"$baseCommand.test" @@ -109,8 +98,7 @@ final case class SbtCommunityProject( testOnlyDependencies: () => List[CommunityProject] = () => Nil, sbtPublishCommand: String = null, sbtDocCommand: String = null, - scalacOptions: List[String] = SbtCommunityProject.scalacOptions, - requiresExperimental: Boolean = false, + scalacOptions: List[String] = SbtCommunityProject.scalacOptions ) extends CommunityProject: override val binaryName: String = "sbt" @@ -260,7 +248,6 @@ object projects: project = "intent", sbtTestCommand = "test", sbtDocCommand = "doc", - requiresExperimental = true, ) lazy val scalacheck = SbtCommunityProject( @@ -489,8 +476,8 @@ object projects: lazy val scalaCollectionCompat = SbtCommunityProject( project = "scala-collection-compat", - sbtTestCommand = "compat30/test", - sbtPublishCommand = "compat30/publishLocal", + sbtTestCommand = "compat3/test", + sbtPublishCommand = "compat3/publishLocal", ) lazy val scalaJava8Compat = SbtCommunityProject( diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index f307a6b0a8eb..6181d4c3ddec 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -68,7 +68,7 @@ class CommunityBuildTestC: @Test def fastparse = projects.fastparse.run() @Test def geny = projects.geny.run() @Test def intent = projects.intent.run() - @Test def jacksonModuleScala = projects.jacksonModuleScala.run() + //@Test def jacksonModuleScala = projects.jacksonModuleScala.run() @Test def libretto = projects.libretto.run() @Test def minitest = projects.minitest.run() //@Test def onnxScala = projects.onnxScala.run() @@ -95,7 +95,7 @@ class CommunityBuildTestC: @Test def sourcecode = projects.sourcecode.run() @Test def specs2 = projects.specs2.run() - @Test def stdLib213 = projects.stdLib213.run() + // @Test def stdLib213 = projects.stdLib213.run() @Test def ujson = projects.ujson.run() @Test def upickle = projects.upickle.run() @Test def utest = projects.utest.run() diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index 4027cf9fb564..e1ff94be6362 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -60,7 +60,7 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { assert(classSym.isClass, classSym) def enclosingMethod(sym: Symbol): Option[Symbol] = { if (sym.isClass || sym == NoSymbol) None - else if (sym.is(Method)) Some(sym) + else if (sym.is(Method, butNot=Synthetic)) Some(sym) else enclosingMethod(sym.originalOwner) } enclosingMethod(classSym.originalOwner) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index db52a74300ef..35b24ab57b00 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -24,6 +24,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Phases.* import dotty.tools.dotc.core.Decorators.em import dotty.tools.dotc.report +import dotty.tools.dotc.ast.Trees.SyntheticUnit /* * @@ -218,10 +219,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val success = new asm.Label val failure = new asm.Label - val hasElse = !elsep.isEmpty && (elsep match { - case Literal(value) if value.tag == UnitTag => false - case _ => true - }) + val hasElse = !elsep.hasAttachment(SyntheticUnit) genCond(condp, success, failure, targetIfNoJump = success) markProgramPoint(success) @@ -250,6 +248,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { if hasElse then genLoadTo(elsep, expectedType, dest) else + lineNumber(tree.cond) genAdaptAndSendToDest(UNIT, expectedType, dest) expectedType end if @@ -1144,7 +1143,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * - Every time when generating an ATHROW, a new basic block is started. * - During classfile writing, such basic blocks are found to be dead: no branches go there * - Eliminating dead code would probably require complex shifts in the output byte buffer - * - But there's an easy solution: replace all code in the dead block with with + * - But there's an easy solution: replace all code in the dead block with * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same * - The corresponding stack frame can be easily generated: on entering a dead the block, * the frame requires a single Throwable on the stack. diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 394700c2898e..4f4caf36d92a 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -623,7 +623,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { } if (emitLines && tree.span.exists && !tree.hasAttachment(SyntheticUnit)) { - val nr = ctx.source.offsetToLine(tree.span.point) + 1 + val nr = + val sourcePos = tree.sourcePos + ( + if sourcePos.exists then sourcePos.source.positionInUltimateSource(sourcePos).line + else ctx.source.offsetToLine(tree.span.point) // fallback + ) + 1 + if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr getNonLabelNode(lastInsn) match { diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index b8d7ee04c870..97934935f352 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -113,11 +113,12 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce val directlyInheritedTraits = sym.directlyInheritedTraits val directlyInheritedTraitsSet = directlyInheritedTraits.toSet val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet - val superCalls = superCallsMap.getOrElse(sym, Set.empty) - val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Trait)) + val superCalls = superCallsMap.getOrElse(sym, List.empty) + val superCallsSet = superCalls.toSet + val additional = superCalls.filter(t => !directlyInheritedTraitsSet(t) && t.is(Trait)) // if (additional.nonEmpty) // println(s"$fullName: adding supertraits $additional") - directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional + directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCallsSet(t)) ++ additional } val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 865ee9bf4af9..cb7ed3d54788 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -185,5 +185,6 @@ object BackendUtils { 20 -> asm.Opcodes.V20, 21 -> asm.Opcodes.V21, 22 -> asm.Opcodes.V22, + 23 -> asm.Opcodes.V23 ) } diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 44498082c697..e2730c1e84ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -1,6 +1,6 @@ package dotty.tools.backend.jvm -import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputStream} +import java.io.{DataOutputStream, File, IOException, BufferedOutputStream, FileOutputStream} import java.nio.ByteBuffer import java.nio.channels.{ClosedByInterruptException, FileChannel} import java.nio.charset.StandardCharsets.UTF_8 @@ -12,7 +12,7 @@ import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.em -import dotty.tools.io.{AbstractFile, PlainFile} +import dotty.tools.io.{AbstractFile, PlainFile, VirtualFile} import dotty.tools.io.PlainFile.toPlainFile import BTypes.InternalName import scala.util.chaining.* @@ -26,7 +26,6 @@ import scala.language.unsafeNulls * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { - type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} sealed trait TastyWriter { @@ -46,7 +45,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { /** * Write a classfile */ - def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile /** @@ -91,7 +90,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class SingleClassWriter(underlying: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { underlying.writeFile(classRelativePath(className), bytes) } override def writeTasty(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { @@ -103,7 +102,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } private final class DebugClassWriter(basic: ClassfileWriter, dump: FileWriter) extends ClassfileWriter { - override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): NullableFile = { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile = { val outFile = basic.writeClass(className, bytes, sourceFile) dump.writeFile(classRelativePath(className), bytes) outFile @@ -121,7 +120,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile def close(): Unit } @@ -130,7 +129,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { if (file.isInstanceOf[JarArchive]) { val jarCompressionLevel = compilerSettings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + // created using `AbstractFile.bufferedOutputStream`instead of JarWriter val jarFile = file.underlyingSource.getOrElse{ throw new IllegalStateException("No underlying source for jar") } @@ -165,7 +164,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -182,7 +181,13 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { jarWriter.putNextEntry(entry) try jarWriter.write(bytes, 0, bytes.length) finally jarWriter.flush() - null + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + val pathInJar = + if File.separatorChar == '/' then relativePath + else relativePath.replace('/', File.separatorChar) + PlainFile.toPlainFile(Paths.get(s"${file.absolutePath}!$pathInJar")) } override def close(): Unit = this.synchronized(jarWriter.close()) @@ -230,7 +235,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte]): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -279,7 +284,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte]):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte]): AbstractFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index 8016c2bfc209..cab17b31c3f3 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -25,7 +25,7 @@ import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, List[ClassSymbol]])(using val ctx: Context) { private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index a616241d9a3e..58daa01e4bdf 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -23,10 +23,11 @@ class GenBCode extends Phase { self => override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty - private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] + private val superCallsMap = new MutableSymbolMap[List[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { - val old = superCallsMap.getOrElse(sym, Set.empty) - superCallsMap.update(sym, old + calls) + val old = superCallsMap.getOrElse(sym, List.empty) + if (!old.contains(calls)) + superCallsMap.update(sym, old :+ calls) } private val entryPoints = new mutable.HashSet[String]() diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 45c6d6ecad44..9f172806a3b5 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -44,11 +44,11 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: backendReporting.error(em"Error while emitting $internalName\n${ex.getMessage}") null - if bytes != null then - if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) - AsmUtils.traceClass(bytes) - val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) - if clsFile != null then clazz.onFileCreated(clsFile) + if bytes != null then + if AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern) then + AsmUtils.traceClass(bytes) + val clsFile = classfileWriter.writeClass(internalName, bytes, sourceFile) + clazz.onFileCreated(clsFile) } def sendToDisk(tasty: GeneratedTasty, sourceFile: AbstractFile): Unit = { @@ -73,7 +73,7 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: else s" (defined in ${pos2.source.file.name})" def nicify(name: String): String = name.replace('/', '.').nn if name1 == name2 then - backendReporting.warning( + backendReporting.error( em"${nicify(name1)} and ${nicify(name2)} produce classes that overwrite one another", pos1) else backendReporting.warning( diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index fbb9042affe7..c44c8f19777b 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -11,8 +11,11 @@ class GenSJSIR extends Phase { override def description: String = GenSJSIR.description + override def isEnabled(using Context): Boolean = + ctx.settings.scalajs.value + override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value && !ctx.usedBestEffortTasty + super.isRunnable && !ctx.usedBestEffortTasty def run(using Context): Unit = new JSCodeGen().run() diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 6e2449b5c299..7ba39768871b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -25,13 +25,13 @@ import dotty.tools.dotc.transform.{Erasure, ValueClasses} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSSymUtils.* diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 098f592daa30..9a7753680bc3 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -15,12 +15,12 @@ import StdNames.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.UTF8String import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index b5f9446758a9..e6c73357aa4c 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -22,12 +22,12 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Position.NoPosition +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSExportUtils.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -932,7 +932,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => - import org.scalajs.ir.Names + import dotty.tools.sjs.ir.Names (toIRType(tpe): @unchecked) match { case jstpe.AnyType => NoTypeTest diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 3b25187b0acd..a229c9ea0e58 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.report import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.util.Spans.Span -import org.scalajs.ir +import dotty.tools.sjs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index a9e5dbacc938..0975c94e916a 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -87,7 +87,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn */ val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder() - /** Suspends the compilation unit by thowing a SuspendException + /** Suspends the compilation unit by throwing a SuspendException * and recording the suspended compilation unit */ def suspend(hint: => String)(using Context): Nothing = diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index ffd3d27f7c99..d8ba1ab5dc2e 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -110,7 +110,7 @@ class Compiler { new LetOverApply, // Lift blocks from receivers of applications new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference @@ -132,6 +132,7 @@ class Compiler { new ElimStaticThis, // Replace `this` references to static objects by global identifiers new CountOuterAccesses) :: // Identify outer accessors that can be dropped List(new DropOuterAccessors, // Drop unused outer accessors + new DropParentRefinements, // Drop parent refinements from a template new CheckNoSuperThis, // Check that supercalls don't contain references to `this` new Flatten, // Lift all inner classes to package scope new TransformWildcards, // Replace wildcards with default values @@ -151,7 +152,10 @@ class Compiler { List(new GenBCode) :: // Generate JVM bytecode Nil - var runId: Int = 1 + // TODO: Initially 0, so that the first nextRunId call would return InitialRunId == 1 + // Changing the initial runId from 1 to 0 makes the scala2-library-bootstrap fail to compile, + // when the underlying issue is fixed, please update dotc.profiler.RealProfiler.chromeTrace logic + private var runId: Int = 1 def nextRunId: Int = { runId += 1; runId } diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 11a0430480d9..50fd668c7696 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -339,10 +339,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - try units = phase.runOn(units) - catch case _: InterruptedException => cancelInterrupted() - profiler.afterPhase(phase, profileBefore) + profiler.onPhase(phase): + try units = phase.runOn(units) + catch case _: InterruptedException => cancelInterrupted() if (ctx.settings.Xprint.value.containsPhase(phase)) for (unit <- units) def printCtx(unit: CompilationUnit) = phase.printingContext( diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index b1b771bc7512..56c153498f87 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -11,6 +11,7 @@ import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, D import typer.{Namer, Checking} import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} import config.{Feature, Config} +import config.Feature.{sourceVersion, migrateTo3, enabled, betterForsEnabled} import config.SourceVersion.* import collection.mutable import reporting.* @@ -51,6 +52,10 @@ object desugar { */ val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** Marks a poly fcuntion apply method, so that we can handle adding evidence parameters to them in a special way + */ + val PolyFunctionApply: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -92,7 +97,24 @@ object desugar { override def ensureCompletions(using Context): Unit = { def completeConstructor(sym: Symbol) = sym.infoOrCompleter match { - case completer: Namer#ClassCompleter => + case completer: Namer#ClassCompleter if !sym.isCompleting => + // An example, derived from tests/run/t6385.scala + // + // class Test(): + // def t1: Foo = Foo(1) + // final case class Foo(value: Int) + // + // Here's the sequence of events: + // * The symbol for Foo.apply is forced to complete + // * The symbol for the `value` parameter of the apply method is forced to complete + // * Completing that value parameter requires typing its type, which is a DerivedTypeTrees, + // which only types if it has an OriginalSymbol. + // * So if the case class hasn't been completed, we need (at least) its constructor to be completed + // + // Test tests/neg/i9294.scala is an example of why isCompleting is necessary. + // Annotations are added while completing the constructor, + // so the back reference to foo reaches here which re-initiates the constructor completion. + // So we just skip, as completion is already being triggered. completer.completeConstructor(sym) case _ => } @@ -188,7 +210,7 @@ object desugar { if isSetterNeeded(vdef) then val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) - val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral + val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else syntheticUnitLiteral val setter = cpy.DefDef(vdef)( name = valName.setterName, paramss = (setterParam :: Nil) :: Nil, @@ -224,7 +246,7 @@ object desugar { * def f$default$2[T](x: Int) = x + "m" */ private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = - addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) + addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor).asInstanceOf[DefDef]) /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that * get added to a buffer. @@ -286,10 +308,8 @@ object desugar { tdef1 end desugarContextBounds - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth + def elimContextBounds(meth: Tree, isPrimaryConstructor: Boolean = false)(using Context): Tree = val evidenceParamBuf = mutable.ListBuffer[ValDef]() - var seenContextBounds: Int = 0 def freshName(unused: Tree) = seenContextBounds += 1 // Start at 1 like FreshNameCreator. @@ -299,22 +319,44 @@ object desugar { // parameters of the method since shadowing does not affect // implicit resolution in Scala 3. - val paramssNoContextBounds = - val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + def paramssNoContextBounds(paramss: List[ParamClause]): List[ParamClause] = + val iflag = paramss.lastOption.flatMap(_.headOption) match + case Some(param) if param.mods.isOneOf(GivenOrImplicit) => + param.mods.flags & GivenOrImplicit + case _ => + if Feature.sourceVersion.isAtLeast(`3.6`) then Given + else Implicit val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) - rhs match - case MacroTree(call) => - cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) - case _ => - addEvidenceParams( - cpy.DefDef(meth)( - name = normalizeName(meth, tpt).asTermName, - paramss = paramssNoContextBounds), - evidenceParamBuf.toList) + meth match + case meth @ DefDef(_, paramss, tpt, rhs) => + val newParamss = paramssNoContextBounds(paramss) + rhs match + case MacroTree(call) => + cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) + case _ => + addEvidenceParams( + cpy.DefDef(meth)( + name = normalizeName(meth, tpt).asTermName, + paramss = newParamss + ), + evidenceParamBuf.toList + ) + case meth @ PolyFunction(tparams, fun) => + val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = meth: @unchecked + val Function(vparams: List[untpd.ValDef] @unchecked, rhs) = fun: @unchecked + val newParamss = paramssNoContextBounds(tparams :: vparams :: Nil) + val params = evidenceParamBuf.toList + if params.isEmpty then + meth + else + val boundNames = getBoundNames(params, newParamss) + val recur = fitEvidenceParams(params, nme.apply, boundNames) + val (paramsFst, paramsSnd) = recur(newParamss) + functionsOf((paramsFst ++ paramsSnd).filter(_.nonEmpty), rhs) end elimContextBounds def addDefaultGetters(meth: DefDef)(using Context): Tree = @@ -442,6 +484,74 @@ object desugar { case _ => (Nil, tree) + private def referencesName(vdef: ValDef, names: Set[TermName])(using Context): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => names.contains(name) + case _ => false + + /** Fit evidence `params` into the `mparamss` parameter lists, making sure + * that all parameters referencing `params` are after them. + * - for methods the final parameter lists are := result._1 ++ result._2 + * - for poly functions, each element of the pair contains at most one term + * parameter list + * + * @param params the evidence parameters list that should fit into `mparamss` + * @param methName the name of the method that `mparamss` belongs to + * @param boundNames the names of the evidence parameters + * @param mparamss the original parameter lists of the method + * @return a pair of parameter lists containing all parameter lists in a + * reference-correct order; make sure that `params` is always at the + * intersection of the pair elements; this is relevant, for poly functions + * where `mparamss` is guaranteed to have exectly one term parameter list, + * then each pair element will have at most one term parameter list + */ + private def fitEvidenceParams( + params: List[ValDef], + methName: Name, + boundNames: Set[TermName] + )(mparamss: List[ParamClause])(using Context): (List[ParamClause], List[ParamClause]) = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(referencesName(_, boundNames)) => + (params :: Nil) -> mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + val normParams = + if params.head.mods.flags.is(Given) != mparam.mods.flags.is(Given) then + params.map: param => + val normFlags = param.mods.flags &~ GivenOrImplicit | (mparam.mods.flags & (GivenOrImplicit)) + param.withMods(param.mods.withFlags(normFlags)) + .showing(i"adapted param $result ${result.mods.flags} for ${methName}", Printers.desugar) + else params + ((normParams ++ mparams) :: Nil) -> Nil + case mparams :: mparamss1 => + val (fst, snd) = fitEvidenceParams(params, methName, boundNames)(mparamss1) + (mparams :: fst) -> snd + case Nil => + Nil -> (params :: Nil) + + /** Create a chain of possibly contextual functions from the parameter lists */ + private def functionsOf(paramss: List[ParamClause], rhs: Tree)(using Context): Tree = paramss match + case Nil => rhs + case ValDefs(head @ (fst :: _)) :: rest if fst.mods.isOneOf(GivenOrImplicit) => + val paramTpts = head.map(_.tpt) + val paramNames = head.map(_.name) + val paramsErased = head.map(_.mods.flags.is(Erased)) + makeContextualFunction(paramTpts, paramNames, functionsOf(rest, rhs), paramsErased).withSpan(rhs.span) + case ValDefs(head) :: rest => + Function(head, functionsOf(rest, rhs)) + case TypeDefs(head) :: rest => + PolyFunction(head, functionsOf(rest, rhs)) + case _ => + assert(false, i"unexpected paramss $paramss") + EmptyTree + + private def getBoundNames(params: List[ValDef], paramss: List[ParamClause])(using Context): Set[TermName] = + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names + for mparams <- paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => + boundNames + /** Add all evidence parameters in `params` as implicit parameters to `meth`. * The position of the added parameters is determined as follows: * @@ -456,29 +566,23 @@ object desugar { private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = if params.isEmpty then return meth - var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names - for mparams <- meth.paramss; mparam <- mparams do - mparam match - case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => - boundNames += tparam.name.toTermName - case _ => + val boundNames = getBoundNames(params, meth.paramss) - def referencesBoundName(vdef: ValDef): Boolean = - vdef.tpt.existsSubTree: - case Ident(name: TermName) => boundNames.contains(name) - case _ => false + val fitParams = fitEvidenceParams(params, meth.name, boundNames) - def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match - case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => - params :: mparamss - case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => - (params ++ mparams) :: Nil - case mparams :: mparamss1 => - mparams :: recur(mparamss1) - case Nil => - params :: Nil - - cpy.DefDef(meth)(paramss = recur(meth.paramss)) + if meth.removeAttachment(PolyFunctionApply).isDefined then + // for PolyFunctions we are limited to a single term param list, so we + // reuse the fitEvidenceParams logic to compute the new parameter lists + // and then we add the other parameter lists as function types to the + // return type + val (paramsFst, paramsSnd) = fitParams(meth.paramss) + if ctx.mode.is(Mode.Type) then + cpy.DefDef(meth)(paramss = paramsFst, tpt = functionsOf(paramsSnd, meth.tpt)) + else + cpy.DefDef(meth)(paramss = paramsFst, rhs = functionsOf(paramsSnd, meth.rhs)) + else + val (paramsFst, paramsSnd) = fitParams(meth.paramss) + cpy.DefDef(meth)(paramss = paramsFst ++ paramsSnd) end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ @@ -1069,8 +1173,8 @@ object desugar { paramss match case rightParam :: paramss1 => // `rightParam` must have a single parameter and without `given` flag - def badRightAssoc(problem: String) = - report.error(em"right-associative extension method $problem", mdef.srcPos) + def badRightAssoc(problem: String, pos: SrcPos) = + report.error(em"right-associative extension method $problem", pos) extParamss ++ mdef.paramss rightParam match @@ -1086,11 +1190,23 @@ object desugar { // // If you change the names of the clauses below, also change them in right-associative-extension-methods.md val (leftTyParamsAndLeadingUsing, leftParamAndTrailingUsing) = extParamss.span(isUsingOrTypeParamClause) + + val names = (for ps <- mdef.paramss; p <- ps yield p.name).toSet[Name] + + val tt = new untpd.UntypedTreeTraverser: + def traverse(tree: Tree)(using Context): Unit = tree match + case tree: Ident if names.contains(tree.name) => + badRightAssoc(s"cannot have a forward reference to ${tree.name}", tree.srcPos) + case _ => traverseChildren(tree) + + for ts <- leftParamAndTrailingUsing; t <- ts do + tt.traverse(t) + leftTyParamsAndLeadingUsing ::: rightTyParams ::: rightParam :: leftParamAndTrailingUsing ::: paramss1 else - badRightAssoc("cannot start with using clause") + badRightAssoc("cannot start with using clause", mdef.srcPos) case _ => - badRightAssoc("must start with a single parameter") + badRightAssoc("must start with a single parameter", mdef.srcPos) case _ => // no value parameters, so not an infix operator. extParamss ++ mdef.paramss @@ -1182,27 +1298,29 @@ object desugar { /** Desugar [T_1, ..., T_M] => (P_1, ..., P_N) => R * Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } */ - def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = - val PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) = tree: @unchecked - val paramFlags = fun match - case fun: FunctionWithMods => - // TODO: make use of this in the desugaring when pureFuns is enabled. - // val isImpure = funFlags.is(Impure) - - // Function flags to be propagated to each parameter in the desugared method type. - val givenFlag = fun.mods.flags.toTermFlags & Given - fun.erasedParams.map(isErased => if isErased then givenFlag | Erased else givenFlag) - case _ => - vparamTypes.map(_ => EmptyFlags) - - val vparams = vparamTypes.lazyZip(paramFlags).zipWithIndex.map { - case ((p: ValDef, paramFlags), n) => p.withAddedFlags(paramFlags) - case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) - }.toList - - RefinedTypeTree(ref(defn.PolyFunctionType), List( - DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree).withFlags(Synthetic) - )).withSpan(tree.span) + def makePolyFunctionType(tree: PolyFunction)(using Context): RefinedTypeTree = (tree: @unchecked) match + case PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun @ untpd.Function(vparamTypes, res)) => + val paramFlags = fun match + case fun: FunctionWithMods => + // TODO: make use of this in the desugaring when pureFuns is enabled. + // val isImpure = funFlags.is(Impure) + + // Function flags to be propagated to each parameter in the desugared method type. + val givenFlag = fun.mods.flags.toTermFlags & Given + fun.erasedParams.map(isErased => if isErased then givenFlag | Erased else givenFlag) + case _ => + vparamTypes.map(_ => EmptyFlags) + + val vparams = vparamTypes.lazyZip(paramFlags).zipWithIndex.map { + case ((p: ValDef, paramFlags), n) => p.withAddedFlags(paramFlags) + case ((p, paramFlags), n) => makeSyntheticParameter(n + 1, p).withAddedFlags(paramFlags) + }.toList + + RefinedTypeTree(ref(defn.PolyFunctionType), List( + DefDef(nme.apply, tparams :: vparams :: Nil, res, EmptyTree) + .withFlags(Synthetic) + .withAttachment(PolyFunctionApply, ()) + )).withSpan(tree.span) end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ @@ -1219,7 +1337,7 @@ object desugar { str.toTermName.asSimpleName /** Extract a synthesized given name from a type tree. This is used for - * both anonymous givens and (under x.modularity) deferred givens. + * both anonymous givens and deferred givens. * @param followArgs if true include argument types in the name */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { @@ -1489,7 +1607,7 @@ object desugar { def block(tree: Block)(using Context): Block = tree.expr match { case EmptyTree => cpy.Block(tree)(tree.stats, - unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) + syntheticUnitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) case _ => tree } @@ -1563,9 +1681,10 @@ object desugar { /** Translate tuple expressions * - * () ==> () - * (t) ==> t - * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * (n1 = t1, ..., nN = tN) ==> NamedTuple.build[(n1, ..., nN)]()(TupleN(t1, ..., tN)) */ def tuple(tree: Tuple, pt: Type)(using Context): Tree = var elems = checkWellFormedTupleElems(tree.trees) @@ -1596,9 +1715,13 @@ object desugar { if ctx.mode.is(Mode.Type) then AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) else - TypeApply( - Apply(Select(ref(defn.NamedTupleModule), nme.withNames), tup), - namesTuple :: Nil) + Apply( + Apply( + TypeApply( + Select(ref(defn.NamedTupleModule), nme.build), // NamedTuple.build + namesTuple :: Nil), // ++ [(names...)] + Nil), // ++ () + tup :: Nil) // .++ ((values...)) /** When desugaring a list pattern arguments `elems` adapt them and the * expected type `pt` to each other. This means: @@ -1799,46 +1922,81 @@ object desugar { /** Create tree for for-comprehension `` or * `` where mapName and flatMapName are chosen * corresponding to whether this is a for-do or a for-yield. - * The creation performs the following rewrite rules: + * If betterFors are enabled, the creation performs the following rewrite rules: * - * 1. + * 1. if betterFors is enabled: * - * for (P <- G) E ==> G.foreach (P => E) + * for () do E ==> E + * or + * for () yield E ==> E * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. + * (Where empty for-comprehensions are excluded by the parser) * * 2. * - * for (P <- G) yield E ==> G.map (P => E) + * for (P <- G) do E ==> G.foreach (P => E) + * + * Here and in the following (P => E) is interpreted as the function (P => E) + * if P is a variable pattern and as the partial function { case P => E } otherwise. * * 3. * + * for (P <- G) yield P ==> G + * + * If betterFors is enabled, P is a variable or a tuple of variables and G is not a withFilter. + * + * for (P <- G) yield E ==> G.map (P => E) + * + * Otherwise + * + * 4. + * * for (P_1 <- G_1; P_2 <- G_2; ...) ... * ==> * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) * - * 4. + * 5. + * + * for (P <- G; if E; ...) ... + * ==> + * for (P <- G.withFilter (P => E); ...) ... * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... + * 6. For any N, if betterFors is enabled: * - * 5. For any N: + * for (P <- G; P_1 = E_1; ... P_N = E_N; P1 <- G1; ...) ... + * ==> + * G.flatMap (P => for (P_1 = E_1; ... P_N = E_N; ...)) + * + * 7. For any N, if betterFors is enabled: + * + * for (P <- G; P_1 = E_1; ... P_N = E_N) ... + * ==> + * G.map (P => for (P_1 = E_1; ... P_N = E_N) ...) + * + * 8. For any N: * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) + * for (P <- G; P_1 = E_1; ... P_N = E_N; ...) * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 + * for (TupleN(P, P_1, ... P_N) <- + * for (x @ P <- G) yield { + * val x_1 @ P_1 = E_2 * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) + * val x_N @ P_N = E_N + * TupleN(x, x_1, ..., x_N) + * }; if E; ...) * * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated * and the variable constituting P_i is used instead of x_i * + * 9. For any N, if betterFors is enabled: + * + * for (P_1 = E_1; ... P_N = E_N; ...) + * ==> + * { + * val x_N @ P_N = E_N + * for (...) + * } + * * @param mapName The name to be used for maps (either map or foreach) * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) * @param enums The enumerators in the for expression @@ -1947,7 +2105,7 @@ object desugar { case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) case GenCheckMode.Check => true - case GenCheckMode.Ignore => true + case GenCheckMode.Ignore | GenCheckMode.Filtered => true /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when * matched against `rhs`. @@ -1957,12 +2115,31 @@ object desugar { Select(rhs, name) } + def deepEquals(t1: Tree, t2: Tree): Boolean = + (unsplice(t1), unsplice(t2)) match + case (Ident(n1), Ident(n2)) => n1 == n2 + case (Tuple(ts1), Tuple(ts2)) => ts1.corresponds(ts2)(deepEquals) + case _ => false + enums match { + case Nil if betterForsEnabled => body case (gen: GenFrom) :: Nil => - Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) + if betterForsEnabled + && gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type + && deepEquals(gen.pat, body) + then gen.expr // avoid a redundant map with identity + else Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => val cont = makeFor(mapName, flatMapName, rest, body) Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) + case (gen: GenFrom) :: rest + if betterForsEnabled + && rest.dropWhile(_.isInstanceOf[GenAlias]).headOption.forall(e => e.isInstanceOf[GenFrom]) => // possible aliases followed by a generator or end of for + val cont = makeFor(mapName, flatMapName, rest, body) + val selectName = + if rest.exists(_.isInstanceOf[GenFrom]) then flatMapName + else mapName + Apply(rhsSelect(gen, selectName), makeLambda(gen, cont)) case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) val pats = valeqs map { case GenAlias(pat, _) => pat } @@ -1981,8 +2158,20 @@ object desugar { makeFor(mapName, flatMapName, vfrom1 :: rest1, body) case (gen: GenFrom) :: test :: rest => val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) + val genFrom = GenFrom(gen.pat, filtered, if betterForsEnabled then GenCheckMode.Filtered else GenCheckMode.Ignore) makeFor(mapName, flatMapName, genFrom :: rest, body) + case GenAlias(_, _) :: _ if betterForsEnabled => + val (valeqs, rest) = enums.span(_.isInstanceOf[GenAlias]) + val pats = valeqs.map { case GenAlias(pat, _) => pat } + val rhss = valeqs.map { case GenAlias(_, rhs) => rhs } + val (defpats, ids) = pats.map(makeIdPat).unzip + val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => + val mods = defpat match + case defTree: DefTree => defTree.mods + case _ => Modifiers() + makePatDef(valeq, mods, defpat, rhs) + } + Block(pdefs, makeFor(mapName, flatMapName, rest, body)) case _ => EmptyTree //may happen for erroneous input } @@ -2013,7 +2202,7 @@ object desugar { case ts: Thicket => ts.trees.tail case t => Nil } map { - case Block(Nil, EmptyTree) => unitLiteral // for s"... ${} ..." + case Block(Nil, EmptyTree) => syntheticUnitLiteral // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t } @@ -2046,7 +2235,7 @@ object desugar { val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) case ext: ExtMethods => - Block(List(ext), unitLiteral.withSpan(ext.span)) + Block(List(ext), syntheticUnitLiteral.withSpan(ext.span)) case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) } desugared.withSpan(tree.span) @@ -2214,7 +2403,7 @@ object desugar { case Quote(body, _) => new UntypedTreeTraverser { def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case SplicePattern(body, _) => collect(body) + case SplicePattern(body, _, _) => collect(body) case _ => traverseChildren(tree) } }.traverse(body) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index f83f12e1c027..e77642a8e2b9 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -3,6 +3,7 @@ package ast import core.Contexts.* import core.Decorators.* +import core.StdNames import util.Spans.* import Trees.{Closure, MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType @@ -74,21 +75,50 @@ object NavigateAST { def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { var bestFit: List[Positioned] = path - while (it.hasNext) { - val path1 = it.next() match { + while (it.hasNext) do + val path1 = it.next() match + case sel: untpd.Select if isRecoveryTree(sel) => path + case sel: untpd.Ident if isPatternRecoveryTree(sel) => path case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) case _ => path - } - if ((path1 ne path) && - ((bestFit eq path) || - bestFit.head.span != path1.head.span && - bestFit.head.span.contains(path1.head.span))) + + if (path1 ne path) && ((bestFit eq path) || isBetterFit(bestFit, path1)) then bestFit = path1 - } + bestFit } + + /** + * When choosing better fit we compare spans. If candidate span has starting or ending point inside (exclusive) + * current best fit it is selected as new best fit. This means that same spans are failing the first predicate. + * + * In case when spans start and end at same offsets we prefer non synthethic one. + */ + def isBetterFit(currentBest: List[Positioned], candidate: List[Positioned]): Boolean = + if currentBest.isEmpty && candidate.nonEmpty then true + else if currentBest.nonEmpty && candidate.nonEmpty then + val bestSpan = currentBest.head.span + val candidateSpan = candidate.head.span + + bestSpan != candidateSpan && + envelops(bestSpan, candidateSpan) || + bestSpan.contains(candidateSpan) && bestSpan.isSynthetic && !candidateSpan.isSynthetic + else false + + def isRecoveryTree(sel: untpd.Select): Boolean = + sel.span.isSynthetic + && (sel.name == StdNames.nme.??? && sel.qualifier.symbol.name == StdNames.nme.Predef) + + def isPatternRecoveryTree(ident: untpd.Ident): Boolean = + ident.span.isSynthetic && StdNames.nme.WILDCARD == ident.name + + def envelops(a: Span, b: Span): Boolean = + !b.exists || a.exists && ( + (a.start < b.start && a.end >= b.end ) || (a.start <= b.start && a.end > b.end) + ) + /* * Annotations trees are located in the Type */ diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 97de434ba9d5..09c855847fac 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -141,9 +141,17 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => loop(tree, Nil) /** All term arguments of an application in a single flattened list */ + def allTermArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allTermArguments(fn) ::: args + case TypeApply(fn, args) => allTermArguments(fn) + case Block(_, expr) => allTermArguments(expr) + case _ => Nil + } + + /** All type and term arguments of an application in a single flattened list */ def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { case Apply(fn, args) => allArguments(fn) ::: args - case TypeApply(fn, _) => allArguments(fn) + case TypeApply(fn, args) => allArguments(fn) ::: args case Block(_, expr) => allArguments(expr) case _ => Nil } @@ -806,17 +814,31 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case _ => false } - /** An extractor for closures, either contained in a block or standalone. + /** An extractor for closures, possibly typed, and possibly including the + * definition of the anonymous def. */ object closure { - def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { - case Block(_, expr) => unapply(expr) - case Closure(env, meth, tpt) => Some(env, meth, tpt) - case Typed(expr, _) => unapply(expr) + def unapply(tree: Tree)(using Context): Option[(List[Tree], Tree, Tree)] = tree match { + case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => + unapply(closure) + case Block(Nil, expr) => + unapply(expr) + case Closure(env, meth, tpt) => + Some(env, meth, tpt) + case Typed(expr, _) => + unapply(expr) case _ => None } } + /** An extractor for a closure or a block ending in one. This was + * previously `closure` before that one was tightened. + */ + object blockEndingInClosure: + def unapply(tree: Tree)(using Context): Option[(List[Tree], Tree, Tree)] = tree match + case Block(_, expr) => unapply(expr) + case _ => closure.unapply(tree) + /** An extractor for def of a closure contained the block of the closure. */ object closureDef { def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { @@ -871,7 +893,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => } private object quotePatVars extends TreeAccumulator[List[Symbol]] { def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { - case SplicePattern(pat, _) => outer.apply(syms, pat) + case SplicePattern(pat, _, _) => outer.apply(syms, pat) case _ => foldOver(syms, tree) } } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala index b302a2463a4e..6f0723bf8f35 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala @@ -32,7 +32,7 @@ abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits: case _ => tree end updateTracked - /** Process a list of trees and give the priority to trakced trees */ + /** Process a list of trees and give the priority to tracked trees */ private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) = val trackedTrees = TreeMapWithTrackedStats.trackedTrees stats.mapConserve: @@ -67,7 +67,7 @@ end TreeMapWithTrackedStats object TreeMapWithTrackedStats: private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]] - /** Fetch the tracked trees in the cuurent context */ + /** Fetch the tracked trees in the current context */ private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] = ctx.property(TrackedTrees).get diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 668daea5f1fd..98d9a0ca85f6 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -69,7 +69,12 @@ class TreeTypeMap( } def mapType(tp: Type): Type = - mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) + val substMap = new TypeMap(): + def apply(tp: Type): Type = tp match + case tp: TermRef if tp.symbol.isImport => mapOver(tp) + case tp => tp.substSym(substFrom, substTo) + mapOwnerThis(substMap(typeMap(tp))) + end mapType private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = if (prevStats.isEmpty) assert(newStats.isEmpty) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 41899ed661f5..4c7ca396117e 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -763,9 +763,10 @@ object Trees { * `SplicePattern` can only be contained within a `QuotePattern`. * * @param body The tree that was spliced + * @param typeargs The type arguments of the splice (the HOAS arguments) * @param args The arguments of the splice (the HOAS arguments) */ - case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) + case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], typeargs: List[Tree[T]], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends TermTree[T] { type ThisTree[+T <: Untyped] = SplicePattern[T] } @@ -777,6 +778,7 @@ object Trees { override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" + def isInferred = false } /** Tree that replaces a level 1 splices in pickled (level 0) quotes. @@ -799,6 +801,7 @@ object Trees { */ class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T]: type ThisTree[+T <: Untyped] <: InferredTypeTree[T] + override def isInferred = true /** ref.type */ case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) @@ -1372,9 +1375,9 @@ object Trees { case tree: QuotePattern if (bindings eq tree.bindings) && (body eq tree.body) && (quotes eq tree.quotes) => tree case _ => finalize(tree, untpd.QuotePattern(bindings, body, quotes)(sourceFile(tree))) } - def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { - case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) + def SplicePattern(tree: Tree)(body: Tree, typeargs: List[Tree], args: List[Tree])(using Context): SplicePattern = tree match { + case tree: SplicePattern if (body eq tree.body) && (typeargs eq tree.typeargs) & (args eq tree.args) => tree + case _ => finalize(tree, untpd.SplicePattern(body, typeargs, args)(sourceFile(tree))) } def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { case tree: SingletonTypeTree if (ref eq tree.ref) => tree @@ -1622,8 +1625,8 @@ object Trees { cpy.Splice(tree)(transform(expr)(using spliceContext)) case tree @ QuotePattern(bindings, body, quotes) => cpy.QuotePattern(tree)(transform(bindings), transform(body)(using quoteContext), transform(quotes)) - case tree @ SplicePattern(body, args) => - cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(args)) + case tree @ SplicePattern(body, targs, args) => + cpy.SplicePattern(tree)(transform(body)(using spliceContext), transform(targs), transform(args)) case tree @ Hole(isTerm, idx, args, content) => cpy.Hole(tree)(isTerm, idx, transform(args), transform(content)) case _ => @@ -1771,8 +1774,8 @@ object Trees { this(x, expr)(using spliceContext) case QuotePattern(bindings, body, quotes) => this(this(this(x, bindings), body)(using quoteContext), quotes) - case SplicePattern(body, args) => - this(this(x, body)(using spliceContext), args) + case SplicePattern(body, typeargs, args) => + this(this(this(x, body)(using spliceContext), typeargs), args) case Hole(_, _, args, content) => this(this(x, args), content) case _ => diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index faace26de84d..55021bf50ace 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -11,6 +11,7 @@ import Symbols.*, StdNames.*, Annotations.*, Trees.*, Symbols.* import Decorators.*, DenotTransformers.* import collection.{immutable, mutable} import util.{Property, SourceFile} +import config.Printers.typr import NameKinds.{TempResultName, OuterSelectName} import typer.ConstFold @@ -181,8 +182,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Splice(expr: Tree)(using Context): Splice = ta.assignType(untpd.Splice(expr), expr) - def SplicePattern(pat: Tree, args: List[Tree], tpe: Type)(using Context): SplicePattern = - untpd.SplicePattern(pat, args).withType(tpe) + def SplicePattern(pat: Tree, targs: List[Tree], args: List[Tree], tpe: Type)(using Context): SplicePattern = + untpd.SplicePattern(pat, targs, args).withType(tpe) def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = untpd.Hole(isTerm, idx, args, content).withType(tpe) @@ -315,24 +316,41 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def TypeDef(sym: TypeSymbol)(using Context): TypeDef = ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) - def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + /** Create a class definition + * @param cls the class symbol of the created class + * @param constr its primary constructor + * @param body the statements in its template + * @param superArgs the arguments to pass to the superclass constructor + * @param adaptVarargs if true, allow matching a vararg superclass constructor + * with a missing argument in superArgs, and synthesize an + * empty repeated parameter in the supercall in this case + */ + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], + superArgs: List[Tree] = Nil, adaptVarargs: Boolean = false)(using Context): TypeDef = val firstParent :: otherParents = cls.info.parents: @unchecked + + def adaptedSuperArgs(ctpe: Type): List[Tree] = ctpe match + case ctpe: PolyType => + adaptedSuperArgs(ctpe.instantiate(firstParent.argTypes)) + case ctpe: MethodType + if ctpe.paramInfos.length == superArgs.length + 1 => + // last argument must be a vararg, otherwise isApplicable would have failed + superArgs :+ + repeated(Nil, TypeTree(ctpe.paramInfos.last.argInfos.head, inferred = true)) + case _ => + superArgs + val superRef = - if (cls.is(Trait)) TypeTree(firstParent) - else { - def isApplicable(ctpe: Type): Boolean = ctpe match { - case ctpe: PolyType => - isApplicable(ctpe.instantiate(firstParent.argTypes)) - case ctpe: MethodType => - (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) - case _ => - false - } - val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) - New(firstParent, constr.symbol.asTerm, superArgs) - } + if cls.is(Trait) then TypeTree(firstParent) + else + val parentConstr = firstParent.applicableConstructors(superArgs.tpes, adaptVarargs) match + case Nil => assert(false, i"no applicable parent constructor of $firstParent for supercall arguments $superArgs") + case constr :: Nil => constr + case _ => assert(false, i"multiple applicable parent constructors of $firstParent for supercall arguments $superArgs") + New(firstParent, parentConstr.asTerm, adaptedSuperArgs(parentConstr.info)) + ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } + end ClassDef def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { val selfType = @@ -359,13 +377,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @param parents a non-empty list of class types * @param termForwarders a non-empty list of forwarding definitions specified by their name and the definition they forward to. * @param typeMembers a possibly-empty list of type members specified by their name and their right hand side. + * @param adaptVarargs if true, allow matching a vararg superclass constructor + * with a missing argument in superArgs, and synthesize an + * empty repeated parameter in the supercall in this case * * The class has the same owner as the first function in `termForwarders`. * Its position is the union of all symbols in `termForwarders`. */ - def AnonClass(parents: List[Type], termForwarders: List[(TermName, TermSymbol)], - typeMembers: List[(TypeName, TypeBounds)] = Nil)(using Context): Block = { - AnonClass(termForwarders.head._2.owner, parents, termForwarders.map(_._2.span).reduceLeft(_ union _)) { cls => + def AnonClass(parents: List[Type], + termForwarders: List[(TermName, TermSymbol)], + typeMembers: List[(TypeName, TypeBounds)], + adaptVarargs: Boolean)(using Context): Block = { + AnonClass(termForwarders.head._2.owner, parents, termForwarders.map(_._2.span).reduceLeft(_ union _), adaptVarargs) { cls => def forwarder(name: TermName, fn: TermSymbol) = { val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm for overridden <- fwdMeth.allOverriddenSymbols do @@ -385,6 +408,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * with the specified owner and position. */ def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = + AnonClass(owner, parents, coord, adaptVarargs = false)(body) + + private def AnonClass(owner: Symbol, parents: List[Type], coord: Coord, adaptVarargs: Boolean)(body: ClassSymbol => List[Tree])(using Context): Block = val parents1 = if (parents.head.classSymbol.is(Trait)) { val head = parents.head.parents.head @@ -393,7 +419,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else parents val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) val constr = newConstructor(cls, Synthetic, Nil, Nil).entered - val cdef = ClassDef(cls, DefDef(constr), body(cls)) + val cdef = ClassDef(cls, DefDef(constr), body(cls), Nil, adaptVarargs) Block(cdef :: Nil, New(cls.typeRef, Nil)) def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = @@ -801,6 +827,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Closure(tree: Tree)(env, meth, tpt) } + // This is a more fault-tolerant copier that does not cause errors when + // function types in applications are undefined. + // This was called `Inliner.InlineCopier` before 3.6.3. + class ConservativeTreeCopier() extends TypedTreeCopier: + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + if fun.tpe.widen.exists then super.Apply(tree)(fun, args) + else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) + override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { @@ -1140,6 +1174,21 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { tree } + /** Make sure tree has given symbol. This is called when typing or unpickling + * a ValDef or DefDef. It turns out that under very rare circumstances the symbol + * computed for a tree is not correct. The only known test case is i21755.scala. + * Here we have a self type that mentions a supertype as well as a type parameter + * upper-bounded by the current class and it turns out that we compute the symbol + * for a member method (named `root` in this case) in a subclass to be the + * corresponding symbol in the superclass. It is not known what are the precise + * conditions where this happens, but my guess would be that it's connected to the + * recursion in the self type. + */ + def ensureHasSym(sym: Symbol)(using Context): Unit = + if sym.exists && sym != tree.symbol then + typr.println(i"correcting definition symbol from ${tree.symbol.showLocated} to ${sym.showLocated}") + tree.overwriteType(NamedType(sym.owner.thisType, sym.asTerm.name, sym.denot)) + def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match case defn.ContextFunctionType(argTypes, resType) => @@ -1274,7 +1323,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless } - /** A tree traverser that generates the the same import contexts as original typer for statements. + /** A tree traverser that generates the same import contexts as original typer for statements. * TODO: Should we align TreeMapWithPreciseStatContexts and also keep track of exprOwners? */ abstract class TreeTraverserWithPreciseImportContexts extends TreeTraverser: diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 64f9fb4df95e..e8e3646bd087 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -119,7 +119,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree - // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -183,7 +182,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** An enum to control checking or filtering of patterns in GenFrom trees */ enum GenCheckMode { - case Ignore // neither filter nor check since filtering was done before + case Ignore // neither filter nor check since pattern is trivially irrefutable + case Filtered // neither filter nor check since filtering was done before case Check // check that pattern is irrefutable case CheckAndFilter // both check and filter (transitional period starting with 3.2) case FilterNow // filter out non-matching elements if we are not in 3.2 or later @@ -415,7 +415,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Quote(body: Tree, tags: List[Tree])(implicit src: SourceFile): Quote = new Quote(body, tags) def Splice(expr: Tree)(implicit src: SourceFile): Splice = new Splice(expr) def QuotePattern(bindings: List[Tree], body: Tree, quotes: Tree)(implicit src: SourceFile): QuotePattern = new QuotePattern(bindings, body, quotes) - def SplicePattern(body: Tree, args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, args) + def SplicePattern(body: Tree, typeargs: List[Tree], args: List[Tree])(implicit src: SourceFile): SplicePattern = new SplicePattern(body, typeargs, args) def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) @@ -495,7 +495,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def InferredTypeTree(tpe: Type)(using Context): TypedSplice = TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) - def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())).withAttachment(SyntheticUnit, ()) + def unitLiteral(implicit src: SourceFile): Literal = + Literal(Constant(())) + + def syntheticUnitLiteral(implicit src: SourceFile): Literal = + unitLiteral.withAttachment(SyntheticUnit, ()) def ref(tp: NamedType)(using Context): Tree = TypedSplice(tpd.ref(tp)) @@ -517,12 +521,17 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) - def captureRootIn(using Context): Select = - Select(scalaDot(nme.caps), nme.capIn) - def makeRetaining(parent: Tree, refs: List[Tree], annotName: TypeName)(using Context): Annotated = Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) + def makeCapsOf(tp: RefTree)(using Context): Tree = + TypeApply(Select(scalaDot(nme.caps), nme.capsOf), tp :: Nil) + + def makeCapsBound()(using Context): Tree = + makeRetaining( + Select(scalaDot(nme.caps), tpnme.CapSet), + Nil, tpnme.retainsCap) + def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 5c9946f6134a..aad6ca8ddeac 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -14,22 +14,45 @@ import tpd.* import StdNames.nme import config.Feature import collection.mutable +import CCState.* +import reporting.Message private val Captures: Key[CaptureSet] = Key() object ccConfig: - /** If true, allow mappping capture set variables under captureChecking with maps that are neither + /** If true, allow mapping capture set variables under captureChecking with maps that are neither * bijective nor idempotent. We currently do now know how to do this correctly in all * cases, though. */ inline val allowUnsoundMaps = false - /** If true, use `sealed` as encapsulation mechanism instead of the - * previous global retriction that `cap` can't be boxed or unboxed. + /** If true, when computing the memberinfo of a refined type created + * by addCaptureRefinements take the refineInfo directly without intersecting + * with the parent info. */ - def allowUniversalInBoxed(using Context) = - Feature.sourceVersion.isAtLeast(SourceVersion.`3.3`) + inline val optimizedRefinements = false + + /** If enabled, use a special path in recheckClosure for closures + * that are eta expansions. This can improve some error messages but + * currently leads to unsoundess for handling reach capabilities. + * TODO: The unsoundness needs followin up. + */ + inline val handleEtaExpansionsSpecially = false + + /** If true, use existential capture set variables */ + def useExistentials(using Context) = + Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.5`) + + /** If true, use "sealed" as encapsulation mechanism, meaning that we + * check that type variable instantiations don't have `cap` in any of + * their capture sets. This is an alternative of the original restriction + * that `cap` can't be boxed or unboxed. It is used in 3.3 and 3.4 but + * dropped again in 3.5. + */ + def useSealed(using Context) = + Feature.sourceVersion.stable == SourceVersion.`3.3` + || Feature.sourceVersion.stable == SourceVersion.`3.4` end ccConfig @@ -54,7 +77,7 @@ def depFun(args: List[Type], resultType: Type, isContextual: Boolean, paramNames mt.toFunctionType(alwaysDependent = true) /** An exception thrown if a @retains argument is not syntactically a CaptureRef */ -class IllegalCaptureRef(tpe: Type) extends Exception(tpe.toString) +class IllegalCaptureRef(tpe: Type)(using Context) extends Exception(tpe.show) /** Capture checking state, which is known to other capture checking components */ class CCState: @@ -64,11 +87,52 @@ class CCState: */ var levelError: Option[CaptureSet.CompareResult.LevelError] = None + /** Warnings relating to upper approximations of capture sets with + * existentially bound variables. + */ + val approxWarnings: mutable.ListBuffer[Message] = mutable.ListBuffer() + + private var curLevel: Level = outermostLevel + private val symLevel: mutable.Map[Symbol, Int] = mutable.Map() + +object CCState: + + opaque type Level = Int + + val undefinedLevel: Level = -1 + + val outermostLevel: Level = 0 + + /** The level of the current environment. Levels start at 0 and increase for + * each nested function or class. -1 means the level is undefined. + */ + def currentLevel(using Context): Level = ccState.curLevel + + inline def inNestedLevel[T](inline op: T)(using Context): T = + val ccs = ccState + val saved = ccs.curLevel + ccs.curLevel = ccs.curLevel.nextInner + try op finally ccs.curLevel = saved + + inline def inNestedLevelUnless[T](inline p: Boolean)(inline op: T)(using Context): T = + val ccs = ccState + val saved = ccs.curLevel + if !p then ccs.curLevel = ccs.curLevel.nextInner + try op finally ccs.curLevel = saved + + extension (x: Level) + def isDefined: Boolean = x >= 0 + def <= (y: Level) = (x: Int) <= y + def nextInner: Level = if isDefined then x + 1 else x + + extension (sym: Symbol)(using Context) + def ccLevel: Level = ccState.symLevel.getOrElse(sym, -1) + def recordLevel() = ccState.symLevel(sym) = currentLevel end CCState /** The currently valid CCState */ def ccState(using Context) = - Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState + Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState1 class NoCommonRoot(rs: Symbol*)(using Context) extends Exception( i"No common capture root nested in ${rs.mkString(" and ")}" @@ -76,13 +140,21 @@ class NoCommonRoot(rs: Symbol*)(using Context) extends Exception( extension (tree: Tree) - /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ - def toCaptureRef(using Context): CaptureRef = tree match + /** Map tree with CaptureRef type to its type, + * map CapSet^{refs} to the `refs` references, + * throw IllegalCaptureRef otherwise + */ + def toCaptureRefs(using Context): List[CaptureRef] = tree match case ReachCapabilityApply(arg) => - arg.toCaptureRef.reach - case _ => tree.tpe match + arg.toCaptureRefs.map(_.reach) + case CapsOfApply(arg) => + arg.toCaptureRefs + case _ => tree.tpe.dealiasKeepAnnots match case ref: CaptureRef if ref.isTrackableRef => - ref + ref :: Nil + case AnnotatedType(parent, ann) + if ann.symbol.isRetains && parent.derivesFrom(defn.Caps_CapSet) => + ann.tree.toCaptureSet.elems.toList case tpe => throw IllegalCaptureRef(tpe) // if this was compiled from cc syntax, problem should have been reported at Typer @@ -93,8 +165,8 @@ extension (tree: Tree) tree.getAttachment(Captures) match case Some(refs) => refs case None => - val refs = CaptureSet(tree.retainedElems.map(_.toCaptureRef)*) - .showing(i"toCaptureSet $tree --> $result", capt) + val refs = CaptureSet(tree.retainedElems.flatMap(_.toCaptureRefs)*) + //.showing(i"toCaptureSet $tree --> $result", capt) tree.putAttachment(Captures, refs) refs @@ -109,6 +181,76 @@ extension (tree: Tree) extension (tp: Type) + /** Is this type a CaptureRef that can be tracked? + * This is true for + * - all ThisTypes and all TermParamRef, + * - stable TermRefs with NoPrefix or ThisTypes as prefixes, + * - the root capability `caps.cap` + * - abstract or parameter TypeRefs that derive from caps.CapSet + * - annotated types that represent reach or maybe capabilities + */ + final def isTrackableRef(using Context): Boolean = tp match + case _: (ThisType | TermParamRef) => + true + case tp: TermRef => + ((tp.prefix eq NoPrefix) + || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef + || tp.isRootCapability + ) && !tp.symbol.isOneOf(UnstableValueFlags) + case tp: TypeRef => + tp.symbol.isAbstractOrParamType && tp.derivesFrom(defn.Caps_CapSet) + case tp: TypeParamRef => + tp.derivesFrom(defn.Caps_CapSet) + case AnnotatedType(parent, annot) => + (annot.symbol == defn.ReachCapabilityAnnot + || annot.symbol == defn.MaybeCapabilityAnnot + ) && parent.isTrackableRef + case _ => + false + + /** The capture set of a type. This is: + * - For trackable capture references: The singleton capture set consisting of + * just the reference, provided the underlying capture set of their info is not empty. + * - For other capture references: The capture set of their info + * - For all other types: The result of CaptureSet.ofType + */ + final def captureSet(using Context): CaptureSet = tp match + case tp: CaptureRef if tp.isTrackableRef => + val cs = tp.captureSetOfInfo + if cs.isAlwaysEmpty then cs else tp.singletonCaptureSet + case tp: SingletonCaptureRef => tp.captureSetOfInfo + case _ => CaptureSet.ofType(tp, followResult = false) + + /** The deep capture set of a type. + * For singleton capabilities `x` and reach capabilities `x*`, this is `{x*}`, provided + * the underlying capture set resulting from traversing the type is non-empty. + * For other types this is the union of all covariant capture sets embedded + * in the type, as computed by `CaptureSet.ofTypeDeeply`. + */ + def deepCaptureSet(using Context): CaptureSet = + val dcs = CaptureSet.ofTypeDeeply(tp) + if dcs.isAlwaysEmpty then dcs + else tp match + case tp @ ReachCapability(_) => tp.singletonCaptureSet + case tp: SingletonCaptureRef => tp.reach.singletonCaptureSet + case _ => dcs + + /** A type capturing `ref` */ + def capturing(ref: CaptureRef)(using Context): Type = + if tp.captureSet.accountsFor(ref) then tp + else CapturingType(tp, ref.singletonCaptureSet) + + /** A type capturing the capture set `cs`. If this type is already a capturing type + * the two capture sets are combined. + */ + def capturing(cs: CaptureSet)(using Context): Type = + if (cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, frozen = true).isOK) + && !cs.keepAlways + then tp + else tp match + case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) + case _ => CapturingType(tp, cs) + /** @pre `tp` is a CapturingType */ def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match case tp @ CapturingType(p, r) => @@ -122,7 +264,9 @@ extension (tp: Type) def boxed(using Context): Type = tp.dealias match case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => tp.annot match - case ann: CaptureAnnotation => AnnotatedType(parent, ann.boxedAnnot) + case ann: CaptureAnnotation => + assert(!parent.derivesFrom(defn.Caps_CapSet)) + AnnotatedType(parent, ann.boxedAnnot) case ann => tp case tp: RealTypeBounds => tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) @@ -158,7 +302,23 @@ extension (tp: Type) getBoxed(tp) /** Is the boxedCaptureSet of this type nonempty? */ - def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty + def isBoxedCapturing(using Context): Boolean = + tp match + case tp @ CapturingType(parent, refs) => + tp.isBoxed && !refs.isAlwaysEmpty || parent.isBoxedCapturing + case tp: TypeRef if tp.symbol.isAbstractOrParamType => false + case tp: TypeProxy => tp.superType.isBoxedCapturing + case tp: AndType => tp.tp1.isBoxedCapturing && tp.tp2.isBoxedCapturing + case tp: OrType => tp.tp1.isBoxedCapturing || tp.tp2.isBoxedCapturing + case _ => false + + /** Is the box status of `tp` and `tp2` compatible? I.ee they are + * box boxed, or both unboxed, or one of them has an empty capture set. + */ + def isBoxCompatibleWith(tp2: Type)(using Context): Boolean = + isBoxedCapturing == tp2.isBoxedCapturing + || tp.captureSet.isAlwaysEmpty + || tp2.captureSet.isAlwaysEmpty /** If this type is a capturing type, the version with boxed statues as given by `boxed`. * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing @@ -184,16 +344,14 @@ extension (tp: Type) case _ => tp - /** Is type known to be always pure by its class structure, - * so that adding a capture set to it would not make sense? + /** Is type known to be always pure by its class structure? + * In that case, adding a capture set to it would not make sense. */ def isAlwaysPure(using Context): Boolean = tp.dealias match case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol if sym.isClass then sym.isPureClass else tp.superType.isAlwaysPure - case CapturingType(parent, refs) => - parent.isAlwaysPure || refs.isAlwaysEmpty case tp: TypeProxy => tp.superType.isAlwaysPure case tp: AndType => @@ -203,6 +361,23 @@ extension (tp: Type) case _ => false + /** Tests whether the type derives from `caps.Capability`, which means + * references of this type are maximal capabilities. + */ + def derivesFromCapability(using Context): Boolean = tp.dealias match + case tp: (TypeRef | AppliedType) => + val sym = tp.typeSymbol + if sym.isClass then sym.derivesFrom(defn.Caps_Capability) + else tp.superType.derivesFromCapability + case tp: (TypeProxy & ValueType) => + tp.superType.derivesFromCapability + case tp: AndType => + tp.tp1.derivesFromCapability || tp.tp2.derivesFromCapability + case tp: OrType => + tp.tp1.derivesFromCapability && tp.tp2.derivesFromCapability + case _ => + false + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -290,6 +465,7 @@ extension (tp: Type) ok = false case _ => traverseChildren(t) + end CheckContraCaps object narrowCaps extends TypeMap: /** Has the variance been flipped at this point? */ @@ -302,16 +478,25 @@ extension (tp: Type) t.dealias match case t1 @ CapturingType(p, cs) if cs.isUniversal && !isFlipped => t1.derivedCapturingType(apply(p), ref.reach.singletonCaptureSet) + case t1 @ FunctionOrMethod(args, res @ Existential(_, _)) + if args.forall(_.isAlwaysPure) => + // Also map existentials in results to reach capabilities if all + // preceding arguments are known to be always pure + apply(t1.derivedFunctionOrMethod(args, Existential.toCap(res))) + case Existential(_, _) => + t case _ => t match case t @ CapturingType(p, cs) => t.derivedCapturingType(apply(p), cs) // don't map capture set variables case t => mapOver(t) finally isFlipped = saved + end narrowCaps + ref match case ref: CaptureRef if ref.isTrackableRef => val checker = new CheckContraCaps - checker.traverse(tp) + if !ccConfig.useExistentials then checker.traverse(tp) if checker.ok then val tp1 = narrowCaps(tp) if tp1 ne tp then capt.println(i"narrow $tp of $ref to $tp1") @@ -322,6 +507,12 @@ extension (tp: Type) case _ => tp + def level(using Context): Level = + tp match + case tp: TermRef => tp.symbol.ccLevel + case tp: ThisType => tp.cls.ccLevel.nextInner + case _ => undefinedLevel + extension (cls: ClassSymbol) def pureBaseClass(using Context): Option[Symbol] = @@ -337,9 +528,11 @@ extension (cls: ClassSymbol) // and err on the side of impure. && selfType.exists && selfType.captureSet.isAlwaysEmpty - def baseClassHasExplicitSelfType(using Context): Boolean = + def baseClassHasExplicitNonUniversalSelfType(using Context): Boolean = cls.baseClasses.exists: bc => - bc.is(CaptureChecked) && bc.givenSelfType.exists + bc.is(CaptureChecked) + && bc.givenSelfType.exists + && !bc.givenSelfType.captureSet.isUniversal def matchesExplicitRefsInBaseClass(refs: CaptureSet)(using Context): Boolean = cls.baseClasses.tail.exists: bc => @@ -397,43 +590,21 @@ extension (sym: Symbol) && !sym.allowsRootCapture && sym != defn.Caps_unsafeBox && sym != defn.Caps_unsafeUnbox - - /** Does this symbol define a level where we do not want to let local variables - * escape into outer capture sets? - */ - def isLevelOwner(using Context): Boolean = - sym.isClass - || sym.is(Method, butNot = Accessor) - - /** The owner of the current level. Qualifying owners are - * - methods other than constructors and anonymous functions - * - anonymous functions, provided they either define a local - * root of type caps.Cap, or they are the rhs of a val definition. - * - classes, if they are not staticOwners - * - _root_ - */ - def levelOwner(using Context): Symbol = - def recur(sym: Symbol): Symbol = - if !sym.exists || sym.isRoot || sym.isStaticOwner then defn.RootClass - else if sym.isLevelOwner then sym - else recur(sym.owner) - recur(sym) - - /** The outermost symbol owned by both `sym` and `other`. if none exists - * since the owning scopes of `sym` and `other` are not nested, invoke - * `onConflict` to return a symbol. - */ - def maxNested(other: Symbol, onConflict: (Symbol, Symbol) => Context ?=> Symbol)(using Context): Symbol = - if !sym.exists || other.isContainedIn(sym) then other - else if !other.exists || sym.isContainedIn(other) then sym - else onConflict(sym, other) - - /** The innermost symbol owning both `sym` and `other`. - */ - def minNested(other: Symbol)(using Context): Symbol = - if !other.exists || other.isContainedIn(sym) then sym - else if !sym.exists || sym.isContainedIn(other) then other - else sym.owner.minNested(other.owner) + && !defn.isPolymorphicAfterErasure(sym) + && !defn.isTypeTestOrCast(sym) + + def isRefiningParamAccessor(using Context): Boolean = + sym.is(ParamAccessor) + && { + val param = sym.owner.primaryConstructor.paramSymss + .nestedFind(_.name == sym.name) + .getOrElse(NoSymbol) + !param.hasAnnotation(defn.ConstructorOnlyAnnot) + && !param.hasAnnotation(defn.UntrackedCapturesAnnot) + } + + def hasTrackedParts(using Context): Boolean = + !CaptureSet.ofTypeDeeply(sym.info).isAlwaysEmpty extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ @@ -457,6 +628,14 @@ object ReachCapabilityApply: case Apply(reach, arg :: Nil) if reach.symbol == defn.Caps_reachCapability => Some(arg) case _ => None +/** An extractor for `caps.capsOf[X]`, which is used to express a generic capture set + * as a tree in a @retains annotation. + */ +object CapsOfApply: + def unapply(tree: TypeApply)(using Context): Option[Tree] = tree match + case TypeApply(capsOf, arg :: Nil) if capsOf.symbol == defn.Caps_capsOf => Some(arg) + case _ => None + class AnnotatedCapability(annot: Context ?=> ClassSymbol): def apply(tp: Type)(using Context) = AnnotatedType(tp, Annotation(annot, util.Spans.NoSpan)) @@ -474,4 +653,83 @@ object ReachCapability extends AnnotatedCapability(defn.ReachCapabilityAnnot) */ object MaybeCapability extends AnnotatedCapability(defn.MaybeCapabilityAnnot) +/** Offers utility method to be used for type maps that follow aliases */ +trait ConservativeFollowAliasMap(using Context) extends TypeMap: + + /** If `mapped` is a type alias, apply the map to the alias, while keeping + * annotations. If the result is different, return it, otherwise return `mapped`. + * Furthermore, if `original` is a LazyRef or TypeVar and the mapped result is + * the same as the underlying type, keep `original`. This avoids spurious differences + * which would lead to spurious dealiasing in the result + */ + protected def applyToAlias(original: Type, mapped: Type) = + val mapped1 = mapped match + case t: (TypeRef | AppliedType) => + val t1 = t.dealiasKeepAnnots + if t1 eq t then t + else + // If we see a type alias, map the alias type and keep it if it's different + val t2 = apply(t1) + if t2 ne t1 then t2 else t + case _ => + mapped + original match + case original: (LazyRef | TypeVar) if mapped1 eq original.underlying => + original + case _ => + mapped1 +end ConservativeFollowAliasMap + +/** An extractor for all kinds of function types as well as method and poly types. + * It includes aliases of function types such as `=>`. TODO: Can we do without? + * @return 1st half: The argument types or empty if this is a type function + * 2nd half: The result type + */ +object FunctionOrMethod: + def unapply(tp: Type)(using Context): Option[(List[Type], Type)] = tp match + case defn.FunctionOf(args, res, isContextual) => Some((args, res)) + case mt: MethodType => Some((mt.paramInfos, mt.resType)) + case mt: PolyType => Some((Nil, mt.resType)) + case defn.RefinedFunctionOf(rinfo) => unapply(rinfo) + case _ => None +/** If `tp` is a function or method, a type of the same kind with the given + * argument and result types. + */ +extension (self: Type) + def derivedFunctionOrMethod(argTypes: List[Type], resType: Type)(using Context): Type = self match + case self @ AppliedType(tycon, args) if defn.isNonRefinedFunction(self) => + val args1 = argTypes :+ resType + if args.corresponds(args1)(_ eq _) then self + else self.derivedAppliedType(tycon, args1) + case self @ defn.RefinedFunctionOf(rinfo) => + val rinfo1 = rinfo.derivedFunctionOrMethod(argTypes, resType) + if rinfo1 eq rinfo then self + else if rinfo1.isInstanceOf[PolyType] then self.derivedRefinedType(refinedInfo = rinfo1) + else rinfo1.toFunctionType(alwaysDependent = true) + case self: MethodType => + self.derivedLambdaType(paramInfos = argTypes, resType = resType) + case self: PolyType => + assert(argTypes.isEmpty) + self.derivedLambdaType(resType = resType) + case _ => + self + +/** An extractor for a contains argument */ +object ContainsImpl: + def unapply(tree: TypeApply)(using Context): Option[(Tree, Tree)] = + tree.fun.tpe.widen match + case fntpe: PolyType if tree.fun.symbol == defn.Caps_containsImpl => + tree.args match + case csArg :: refArg :: Nil => Some((csArg, refArg)) + case _ => None + case _ => None + +/** An extractor for a contains parameter */ +object ContainsParam: + def unapply(sym: Symbol)(using Context): Option[(TypeRef, CaptureRef)] = + sym.info.dealias match + case AppliedType(tycon, (cs: TypeRef) :: (ref: CaptureRef) :: Nil) + if tycon.typeSymbol == defn.Caps_ContainsTrait + && cs.typeSymbol.isAbstractOrParamType => Some((cs, ref)) + case _ => None diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala new file mode 100644 index 000000000000..590beda42903 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala @@ -0,0 +1,153 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Decorators.* +import util.{SimpleIdentitySet, Property} +import typer.ErrorReporting.Addenda +import TypeComparer.subsumesExistentially +import util.common.alwaysTrue +import scala.collection.mutable +import CCState.* +import Periods.NoRunId +import compiletime.uninitialized +import StdNames.nme + +/** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs, + * as well as two kinds of AnnotatedTypes representing reach and maybe capabilities. + */ +trait CaptureRef extends TypeProxy, ValueType: + private var myCaptureSet: CaptureSet | Null = uninitialized + private var myCaptureSetRunId: Int = NoRunId + private var mySingletonCaptureSet: CaptureSet.Const | Null = null + + /** Is the reference tracked? This is true if it can be tracked and the capture + * set of the underlying type is not always empty. + */ + final def isTracked(using Context): Boolean = + this.isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) + + /** Is this a reach reference of the form `x*`? */ + final def isReach(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.ReachCapabilityAnnot + case _ => false + + /** Is this a maybe reference of the form `x?`? */ + final def isMaybe(using Context): Boolean = this match + case AnnotatedType(_, annot) => annot.symbol == defn.MaybeCapabilityAnnot + case _ => false + + final def stripReach(using Context): CaptureRef = + if isReach then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this + + final def stripMaybe(using Context): CaptureRef = + if isMaybe then + val AnnotatedType(parent: CaptureRef, _) = this: @unchecked + parent + else this + + /** Is this reference the generic root capability `cap` ? */ + final def isRootCapability(using Context): Boolean = this match + case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot + case _ => false + + /** Is this reference capability that does not derive from another capability ? */ + final def isMaxCapability(using Context): Boolean = this match + case tp: TermRef => tp.isRootCapability || tp.info.derivesFrom(defn.Caps_Exists) + case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) + case _ => false + + // With the support of pathes, we don't need to normalize the `TermRef`s anymore. + // /** Normalize reference so that it can be compared with `eq` for equality */ + // final def normalizedRef(using Context): CaptureRef = this match + // case tp @ AnnotatedType(parent: CaptureRef, annot) if tp.isTrackableRef => + // tp.derivedAnnotatedType(parent.normalizedRef, annot) + // case tp: TermRef if tp.isTrackableRef => + // tp.symbol.termRef + // case _ => this + + /** The capture set consisting of exactly this reference */ + final def singletonCaptureSet(using Context): CaptureSet.Const = + if mySingletonCaptureSet == null then + mySingletonCaptureSet = CaptureSet(this) + mySingletonCaptureSet.uncheckedNN + + /** The capture set of the type underlying this reference */ + final def captureSetOfInfo(using Context): CaptureSet = + if ctx.runId == myCaptureSetRunId then myCaptureSet.nn + else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty + else + myCaptureSet = CaptureSet.Pending + val computed = CaptureSet.ofInfo(this) + if !isCaptureChecking || underlying.isProvisional then + myCaptureSet = null + else + myCaptureSet = computed + myCaptureSetRunId = ctx.runId + computed + + final def invalidateCaches() = + myCaptureSetRunId = NoRunId + + /** x subsumes x + * this subsumes this.f + * x subsumes y ==> x* subsumes y, x subsumes y? + * x subsumes y ==> x* subsumes y*, x? subsumes y? + * x: x1.type /\ x1 subsumes y ==> x subsumes y + * TODO: Document path cases + */ + final def subsumes(y: CaptureRef)(using Context): Boolean = + + def subsumingRefs(x: Type, y: Type): Boolean = x match + case x: CaptureRef => y match + case y: CaptureRef => x.subsumes(y) + case _ => false + case _ => false + + def viaInfo(info: Type)(test: Type => Boolean): Boolean = info.match + case info: SingletonCaptureRef => test(info) + case info: AndType => viaInfo(info.tp1)(test) || viaInfo(info.tp2)(test) + case info: OrType => viaInfo(info.tp1)(test) && viaInfo(info.tp2)(test) + case _ => false + + (this eq y) + || this.isRootCapability + || y.match + case y: TermRef => + y.prefix.match + case ypre: CaptureRef => + this.subsumes(ypre) + || this.match + case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => + // To show `{x.f} <:< {y.f}`, it is important to prove `x` and `y` + // are equvalent, which means `x =:= y` in terms of subtyping, + // not just `{x} =:= {y}` in terms of subcapturing. + // It is possible to construct two singleton types `x` and `y`, + // which subsume each other, but are not equal references. + // See `tests/neg-custom-args/captures/path-prefix.scala` for example. + withMode(Mode.IgnoreCaptures) {TypeComparer.isSameRef(xpre, ypre)} + case _ => + false + case _ => false + || viaInfo(y.info)(subsumingRefs(this, _)) + case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) + case _ => false + || this.match + case ReachCapability(x1) => x1.subsumes(y.stripReach) + case x: TermRef => viaInfo(x.info)(subsumingRefs(_, y)) + case x: TermParamRef => subsumesExistentially(x, y) + case x: TypeRef => assumedContainsOf(x).contains(y) + case _ => false + end subsumes + + def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = + CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) + +end CaptureRef + +trait SingletonCaptureRef extends SingletonType, CaptureRef + diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index d1a5a07f6a0f..81b4287961ba 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -14,8 +14,10 @@ import printing.{Showable, Printer} import printing.Texts.* import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda +import TypeComparer.subsumesExistentially import util.common.alwaysTrue -import scala.collection.mutable +import scala.collection.{mutable, immutable} +import CCState.* /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -55,10 +57,14 @@ sealed abstract class CaptureSet extends Showable: */ def isAlwaysEmpty: Boolean - /** An optional level limit, or NoSymbol if none exists. All elements of the set - * must be in scopes visible from the level limit. + /** An optional level limit, or undefinedLevel if none exists. All elements of the set + * must be at levels equal or smaller than the level of the set, if it is defined. */ - def levelLimit: Symbol + def level: Level + + /** An optional owner, or NoSymbol if none exists. Used for diagnstics + */ + def owner: Symbol /** Is this capture set definitely non-empty? */ final def isNotEmpty: Boolean = !elems.isEmpty @@ -79,6 +85,11 @@ sealed abstract class CaptureSet extends Showable: final def isUniversal(using Context) = elems.exists(_.isRootCapability) + final def isUnboxable(using Context) = + elems.exists(elem => elem.isRootCapability || Existential.isExistentialVar(elem)) + + final def keepAlways: Boolean = this.isInstanceOf[EmptyWithProvenance] + /** Try to include an element in this capture set. * @param elem The element to be added * @param origin The set that originated the request, or `empty` if the request came from outside. @@ -115,7 +126,7 @@ sealed abstract class CaptureSet extends Showable: * capture set. */ protected final def addNewElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if elem.isRootCapability || summon[VarState] == FrozenState then + if elem.isMaxCapability || summon[VarState] == FrozenState then addThisElem(elem) else addThisElem(elem).orElse: @@ -143,35 +154,17 @@ sealed abstract class CaptureSet extends Showable: cs.addDependent(this)(using ctx, UnrecordedState) this - /** x subsumes x - * this subsumes this.f - * x subsumes y ==> x* subsumes y, x subsumes y? - * x subsumes y ==> x* subsumes y*, x? subsumes y? - */ - extension (x: CaptureRef) - private def subsumes(y: CaptureRef)(using Context): Boolean = - (x eq y) - || x.isRootCapability - || y.match - case y: TermRef => y.prefix eq x - case MaybeCapability(y1) => x.stripMaybe.subsumes(y1) - case _ => false - || x.match - case ReachCapability(x1) => x1.subsumes(y.stripReach) - case _ => false - /** {x} <:< this where <:< is subcapturing, but treating all variables * as frozen. */ def accountsFor(x: CaptureRef)(using Context): Boolean = - if comparer.isInstanceOf[ExplainingTypeComparer] then // !!! DEBUG - reporting.trace.force(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): - elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - else - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): - elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + def debugInfo(using Context) = i"$this accountsFor $x, which has capture set ${x.captureSetOfInfo}" + def test(using Context) = reporting.trace(debugInfo): + elems.exists(_.subsumes(x)) + || !x.isMaxCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + comparer match + case comparer: ExplainingTypeComparer => comparer.traceIndented(debugInfo)(test) + case _ => test /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts @@ -183,7 +176,7 @@ sealed abstract class CaptureSet extends Showable: def mightAccountFor(x: CaptureRef)(using Context): Boolean = reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { elems.exists(_.subsumes(x)) - || !x.isRootCapability + || !x.isMaxCapability && { val elems = x.captureSetOfInfo.elems !elems.isEmpty && elems.forall(mightAccountFor) @@ -227,13 +220,11 @@ sealed abstract class CaptureSet extends Showable: * `this` and `that` */ def ++ (that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then that + if this.subCaptures(that, frozen = true).isOK then + if that.isAlwaysEmpty && this.keepAlways then this else that else if that.subCaptures(this, frozen = true).isOK then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var( - this.levelLimit.maxNested(that.levelLimit, onConflict = (sym1, sym2) => sym1), - this.elems ++ that.elems) - .addAsDependentTo(this).addAsDependentTo(that) + else Union(this, that) /** The smallest superset (via <:<) of this capture set that also contains `ref`. */ @@ -246,7 +237,7 @@ sealed abstract class CaptureSet extends Showable: if this.subCaptures(that, frozen = true).isOK then this else if that.subCaptures(this, frozen = true).isOK then that else if this.isConst && that.isConst then Const(elemIntersection(this, that)) - else Intersected(this, that) + else Intersection(this, that) /** The largest subset (via <:<) of this capture set that does not account for * any of the elements in the constant capture set `that` @@ -305,7 +296,7 @@ sealed abstract class CaptureSet extends Showable: case _ => val mapped = mapRefs(elems, tm, tm.variance) if isConst then - if mapped.isConst && mapped.elems == elems then this + if mapped.isConst && mapped.elems == elems && !mapped.keepAlways then this else mapped else Mapped(asVar, tm, tm.variance, mapped) @@ -317,7 +308,7 @@ sealed abstract class CaptureSet extends Showable: /** Invoke handler if this set has (or later aquires) the root capability `cap` */ def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = - if isUniversal then handler() + if isUnboxable then handler() this /** Invoke handler on the elements to ensure wellformedness of the capture set. @@ -383,7 +374,7 @@ object CaptureSet: def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) + else Const(SimpleIdentitySet(elems.map(_.ensuring(_.isTrackableRef))*)) def apply(elems: Refs)(using Context): CaptureSet.Const = if elems.isEmpty then empty else Const(elems) @@ -402,11 +393,19 @@ object CaptureSet: def withDescription(description: String): Const = Const(elems, description) - def levelLimit = NoSymbol + def level = undefinedLevel + + def owner = NoSymbol override def toString = elems.toString end Const + case class EmptyWithProvenance(ref: CaptureRef, mapped: Type) extends Const(SimpleIdentitySet.empty): + override def optionalInfo(using Context): String = + if ctx.settings.YccDebug.value + then i" under-approximating the result of mapping $ref to $mapped" + else "" + /** A special capture set that gets added to the types of symbols that were not * themselves capture checked, in order to admit arbitrary corresponding capture * sets in subcapturing comparisons. Similar to platform types for explicit @@ -422,7 +421,7 @@ object CaptureSet: end Fluid /** The subclass of captureset variables with given initial elements */ - class Var(directOwner: Symbol, initialElems: Refs = emptySet)(using @constructorOnly ictx: Context) extends CaptureSet: + class Var(override val owner: Symbol = NoSymbol, initialElems: Refs = emptySet, val level: Level = undefinedLevel, underBox: Boolean = false)(using @constructorOnly ictx: Context) extends CaptureSet: /** A unique identification number for diagnostics */ val id = @@ -431,9 +430,6 @@ object CaptureSet: //assert(id != 40) - override val levelLimit = - if directOwner.exists then directOwner.levelOwner else NoSymbol - /** A variable is solved if it is aproximated to a from-then-on constant set. */ private var isSolved: Boolean = false @@ -446,7 +442,7 @@ object CaptureSet: var deps: Deps = emptySet def isConst = isSolved - def isAlwaysEmpty = false + def isAlwaysEmpty = isSolved && elems.isEmpty def isMaybeSet = false // overridden in BiMapped @@ -485,12 +481,16 @@ object CaptureSet: deps = state.deps(this) final def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if isConst || !recordElemsState() then - CompareResult.Fail(this :: Nil) // fail if variable is solved or given VarState is frozen + if isConst // Fail if variable is solved, + || !recordElemsState() // or given VarState is frozen, + || Existential.isBadExistential(elem) // or `elem` is an out-of-scope existential, + then + CompareResult.Fail(this :: Nil) else if !levelOK(elem) then - CompareResult.LevelError(this, elem) + CompareResult.LevelError(this, elem) // or `elem` is not visible at the level of the set. else //if id == 34 then assert(!elem.isUniversalRootCapability) + assert(elem.isTrackableRef, elem) elems += elem if elem.isRootCapability then rootAddedHandler() @@ -504,14 +504,17 @@ object CaptureSet: res.addToTrace(this) private def levelOK(elem: CaptureRef)(using Context): Boolean = - if elem.isRootCapability then !noUniversal + if elem.isRootCapability || Existential.isExistentialVar(elem) then + !noUniversal else elem match - case elem: TermRef if levelLimit.exists => - var sym = elem.symbol - if sym.isLevelOwner then sym = sym.owner - levelLimit.isContainedIn(sym.levelOwner) - case elem: ThisType if levelLimit.exists => - levelLimit.isContainedIn(elem.cls.levelOwner) + case elem: TermRef if level.isDefined => + elem.prefix match + case prefix: CaptureRef => + levelOK(prefix) + case _ => + elem.symbol.ccLevel <= level + case elem: ThisType if level.isDefined => + elem.cls.ccLevel.nextInner <= level case ReachCapability(elem1) => levelOK(elem1) case MaybeCapability(elem1) => @@ -550,7 +553,14 @@ object CaptureSet: universal else computingApprox = true - try computeApprox(origin).ensuring(_.isConst) + try + val approx = computeApprox(origin).ensuring(_.isConst) + if approx.elems.exists(Existential.isExistentialVar(_)) then + ccState.approxWarnings += + em"""Capture set variable $this gets upper-approximated + |to existential variable from $approx, using {cap} instead.""" + universal + else approx finally computingApprox = false /** The intersection of all upper approximations of dependent sets */ @@ -587,10 +597,12 @@ object CaptureSet: override def optionalInfo(using Context): String = for vars <- ctx.property(ShownVars) do vars += this val debugInfo = - if !isConst && ctx.settings.YccDebug.value then ids else "" + if !ctx.settings.YccDebug.value then "" + else if isConst then ids ++ "(solved)" + else ids val limitInfo = - if ctx.settings.YprintLevel.value && levelLimit.exists - then i"" + if ctx.settings.YprintLevel.value && level.isDefined + then i"" else "" debugInfo ++ limitInfo @@ -613,7 +625,7 @@ object CaptureSet: * capture set, since they represent only what is the result of the constructor. * Test case: Without that tweak, logger.scala would not compile. */ - class RefiningVar(directOwner: Symbol)(using Context) extends Var(directOwner): + class RefiningVar(owner: Symbol)(using Context) extends Var(owner): override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this /** A variable that is derived from some other variable via a map or filter. */ @@ -644,7 +656,7 @@ object CaptureSet: */ class Mapped private[CaptureSet] (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, initial.elems): + extends DerivedVar(source.owner, initial.elems): addAsDependentTo(initial) // initial mappings could change by propagation private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] @@ -682,19 +694,10 @@ object CaptureSet: if cond then propagate else CompareResult.OK val mapped = extrapolateCaptureRef(elem, tm, variance) + def isFixpoint = mapped.isConst && mapped.elems.size == 1 && mapped.elems.contains(elem) - def addMapped = - val added = mapped.elems.filter(!accountsFor(_)) - addNewElems(added) - .andAlso: - if mapped.isConst then CompareResult.OK - else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } - else CompareResult.Fail(this :: Nil) - .andAlso: - propagateIf(!added.isEmpty) - def failNoFixpoint = val reason = if variance <= 0 then i"the set's variance is $variance" @@ -704,11 +707,14 @@ object CaptureSet: CompareResult.Fail(this :: Nil) if origin eq source then // elements have to be mapped - addMapped + val added = mapped.elems.filter(!accountsFor(_)) + addNewElems(added) .andAlso: if mapped.isConst then CompareResult.OK else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } else CompareResult.Fail(this :: Nil) + .andAlso: + propagateIf(!added.isEmpty) else if accountsFor(elem) then CompareResult.OK else if variance > 0 then @@ -741,7 +747,7 @@ object CaptureSet: */ final class BiMapped private[CaptureSet] (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, initialElems): + extends DerivedVar(source.owner, initialElems): override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = if origin eq source then @@ -752,9 +758,8 @@ object CaptureSet: CompareResult.OK else source.tryInclude(bimap.backward(elem), this) - .showing(i"propagating new elem $elem backward from $this to $source = $result", capt) - .andAlso: - addNewElem(elem) + .showing(i"propagating new elem $elem backward from $this to $source = $result", captDebug) + .andAlso(addNewElem(elem)) /** For a BiTypeMap, supertypes of the mapped type also constrain * the source via the inverse type mapping and vice versa. That is, if @@ -775,7 +780,7 @@ object CaptureSet: /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) - extends DerivedVar(source.levelLimit, source.elems.filter(p)): + extends DerivedVar(source.owner, source.elems.filter(p)): override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = if accountsFor(elem) then @@ -804,8 +809,30 @@ object CaptureSet: class Diff(source: Var, other: Const)(using Context) extends Filtered(source, !other.accountsFor(_)) - class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using Context) - extends Var(cs1.levelLimit.minNested(cs2.levelLimit), elemIntersection(cs1, cs2)): + class Union(cs1: CaptureSet, cs2: CaptureSet)(using Context) + extends Var(initialElems = cs1.elems ++ cs2.elems): + addAsDependentTo(cs1) + addAsDependentTo(cs2) + + override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + if accountsFor(elem) then CompareResult.OK + else + val res = super.tryInclude(elem, origin) + // If this is the union of a constant and a variable, + // propagate `elem` to the variable part to avoid slack + // between the operands and the union. + if res.isOK && (origin ne cs1) && (origin ne cs2) then + if cs1.isConst then cs2.tryInclude(elem, origin) + else if cs2.isConst then cs1.tryInclude(elem, origin) + else res + else res + + override def propagateSolved()(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved() + end Union + + class Intersection(cs1: CaptureSet, cs2: CaptureSet)(using Context) + extends Var(initialElems = elemIntersection(cs1, cs2)): addAsDependentTo(cs1) addAsDependentTo(cs2) deps += cs1 @@ -829,7 +856,7 @@ object CaptureSet: override def propagateSolved()(using Context) = if cs1.isConst && cs2.isConst && !isConst then markSolved() - end Intersected + end Intersection def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) @@ -846,9 +873,11 @@ object CaptureSet: val r1 = tm(r) val upper = r1.captureSet def isExact = - upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + upper.isAlwaysEmpty + || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) + || r.derivesFrom(defn.Caps_CapSet) if variance > 0 || isExact then upper - else if variance < 0 then CaptureSet.empty + else if variance < 0 then CaptureSet.EmptyWithProvenance(r, r1) else upper.maybe /** Apply `f` to each element in `xs`, and join result sets with `++` */ @@ -895,7 +924,7 @@ object CaptureSet: if ctx.settings.YccDebug.value then printer.toText(trace, ", ") else blocking.show case LevelError(cs: CaptureSet, elem: CaptureRef) => - Str(i"($elem at wrong level for $cs in ${cs.levelLimit})") + Str(i"($elem at wrong level for $cs at level ${cs.level.toString})") /** The result is OK */ def isOK: Boolean = this == OK @@ -1032,7 +1061,9 @@ object CaptureSet: /** The capture set of the type underlying CaptureRef */ def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet + case ref: (TermRef | TermParamRef) if ref.isMaxCapability => + if ref.isTrackableRef then ref.singletonCaptureSet + else CaptureSet.universal case ReachCapability(ref1) => deepCaptureSet(ref1.widen) .showing(i"Deep capture set of $ref: ${ref1.widen} = $result", capt) case _ => ofType(ref.underlying, followResult = true) @@ -1040,17 +1071,23 @@ object CaptureSet: /** Capture set of a type */ def ofType(tp: Type, followResult: Boolean)(using Context): CaptureSet = def recur(tp: Type): CaptureSet = trace(i"ofType $tp, ${tp.getClass} $followResult", show = true): - tp.dealias match + tp.dealiasKeepAnnots match case tp: TermRef => tp.captureSet case tp: TermParamRef => tp.captureSet - case tp: TypeRef => - if tp.typeSymbol == defn.Caps_Cap then universal else empty + case _: TypeRef => + empty case _: TypeParamRef => empty case CapturingType(parent, refs) => recur(parent) ++ refs + case tp @ AnnotatedType(parent, ann) if ann.hasSymbol(defn.ReachCapabilityAnnot) => + parent match + case parent: SingletonCaptureRef if parent.isTrackableRef => + tp.singletonCaptureSet + case _ => + CaptureSet.ofTypeDeeply(parent.widen) case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => ofType(tpd.parent, followResult = false) // pick up capture set from parent type ++ (recur(rinfo.resType) // add capture set of result @@ -1066,7 +1103,7 @@ object CaptureSet: case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) case _ => cs case tp: TypeProxy => - recur(tp.underlying) + recur(tp.superType) case AndType(tp1, tp2) => recur(tp1) ** recur(tp2) case OrType(tp1, tp2) => @@ -1074,18 +1111,29 @@ object CaptureSet: case _ => empty recur(tp) - .showing(i"capture set of $tp = $result", captDebug) + //.showing(i"capture set of $tp = $result", captDebug) - private def deepCaptureSet(tp: Type)(using Context): CaptureSet = + /** The deep capture set of a type is the union of all covariant occurrences of + * capture sets. Nested existential sets are approximated with `cap`. + */ + def ofTypeDeeply(tp: Type)(using Context): CaptureSet = val collect = new TypeAccumulator[CaptureSet]: def apply(cs: CaptureSet, t: Type) = t.dealias match case t @ CapturingType(p, cs1) => val cs2 = apply(cs, p) if variance > 0 then cs2 ++ cs1 else cs2 + case t @ Existential(_, _) => + apply(cs, Existential.toCap(t)) case _ => foldOver(cs, t) collect(CaptureSet.empty, tp) + type AssumedContains = immutable.Map[TypeRef, SimpleIdentitySet[CaptureRef]] + val AssumedContains: Property.Key[AssumedContains] = Property.Key() + + def assumedContains(using Context): AssumedContains = + ctx.property(AssumedContains).getOrElse(immutable.Map.empty) + private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable @@ -1135,6 +1183,6 @@ object CaptureSet: i""" | |Note that reference ${ref}$levelStr - |cannot be included in outer capture set $cs which is associated with ${cs.levelLimit}""" + |cannot be included in outer capture set $cs""" end CaptureSet diff --git a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala index ee0cad4d4d03..9f9b923b2c88 100644 --- a/compiler/src/dotty/tools/dotc/cc/CapturingType.scala +++ b/compiler/src/dotty/tools/dotc/cc/CapturingType.scala @@ -28,16 +28,14 @@ object CapturingType: /** Smart constructor that * - drops empty capture sets - * - drops a capability class expansion if it is further refined with another capturing type * - fuses compatible capturing types. * An outer type capturing type A can be fused with an inner capturing type B if their * boxing status is the same or if A is boxed. */ def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = - if refs.isAlwaysEmpty then parent + assert(!boxed || !parent.derivesFrom(defn.Caps_CapSet)) + if refs.isAlwaysEmpty && !refs.keepAlways then parent else parent match - case parent @ CapturingType(parent1, refs1) if refs1 eq defn.expandedUniversalSet => - apply(parent1, refs, boxed) case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => apply(parent1, refs ++ refs1, boxed) case _ => diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a5bb8792af2c..77d893ad49b9 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -12,21 +12,25 @@ import ast.{tpd, untpd, Trees} import Trees.* import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPairsChecker} import typer.Checking.{checkBounds, checkAppliedTypesIn} -import typer.ErrorReporting.{Addenda, err} -import typer.ProtoTypes.{AnySelectionProto, LhsProto} +import typer.ErrorReporting.{Addenda, NothingToAdd, err} +import typer.ProtoTypes.{LhsProto, WildcardSelectionProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} +import CCState.* import StdNames.nme import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} -import reporting.trace +import reporting.{trace, Message, OverrideError} /** The capture checker */ object CheckCaptures: import ast.tpd.* + val name: String = "cc" + val description: String = "capture checking" + enum EnvKind: case Regular // normal case case NestedInOwner // environment is a temporary one nested in the owner's environment, @@ -38,7 +42,7 @@ object CheckCaptures: /** A class describing environments. * @param owner the current owner * @param kind the environment's kind - * @param captured the caputure set containing all references to tracked free variables outside of boxes + * @param captured the capture set containing all references to tracked free variables outside of boxes * @param outer0 the next enclosing environment */ case class Env( @@ -61,6 +65,9 @@ object CheckCaptures: val res = cur cur = cur.outer res + + def ownerString(using Context): String = + if owner.isAnonymousFunction then "enclosing function" else owner.show end Env /** Similar normal substParams, but this is an approximating type map that @@ -118,20 +125,24 @@ object CheckCaptures: * This check is performed at Typer. */ def checkWellformed(parent: Tree, ann: Tree)(using Context): Unit = - parent.tpe match - case _: SingletonType => - report.error(em"Singleton type $parent cannot have capture set", parent.srcPos) - case _ => + def check(elem: Tree, pos: SrcPos): Unit = elem.tpe match + case ref: CaptureRef => + if !ref.isTrackableRef then + report.error(em"$elem cannot be tracked since it is not a parameter or local value", pos) + case tpe => + report.error(em"$elem: $tpe is not a legal element of a capture set", pos) for elem <- ann.retainedElems do - val elem1 = elem match - case ReachCapabilityApply(arg) => arg - case _ => elem - elem1.tpe match - case ref: CaptureRef => - if !ref.isTrackableRef then - report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) - case tpe => - report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) + elem match + case CapsOfApply(arg) => + def isLegalCapsOfArg = + arg.symbol.isAbstractOrParamType && arg.symbol.info.derivesFrom(defn.Caps_CapSet) + if !isLegalCapsOfArg then + report.error( + em"""$arg is not a legal prefix for `^` here, + |is must be a type parameter or abstract type with a caps.CapSet upper bound.""", + elem.srcPos) + case ReachCapabilityApply(arg) => check(arg, elem.srcPos) + case _ => check(elem, elem.srcPos) /** Report an error if some part of `tp` contains the root capability in its capture set * or if it refers to an unsealed type parameter that could possibly be instantiated with @@ -157,29 +168,36 @@ object CheckCaptures: case _ => case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () - case t => + case CapturingType(parent, refs) => if variance >= 0 then - t.captureSet.disallowRootCapability: () => + refs.disallowRootCapability: () => def part = if t eq tp then "" else i"the part $t of " report.error( em"""$what cannot $have $tp since |${part}that type captures the root capability `cap`. |$addendum""", pos) + traverse(parent) + case t => traverseChildren(t) - check.traverse(tp) + if ccConfig.useSealed then check.traverse(tp) end disallowRootCapabilitiesIn /** Attachment key for bodies of closures, provided they are values */ val ClosureBodyValue = Property.Key[Unit] + /** A prototype that indicates selection with an immutable value */ + class PathSelectionProto(val sym: Symbol, val pt: Type)(using Context) extends WildcardSelectionProto + class CheckCaptures extends Recheck, SymTransformer: thisPhase => import ast.tpd.* import CheckCaptures.* - def phaseName: String = "cc" + override def phaseName: String = CheckCaptures.name + + override def description: String = CheckCaptures.description override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere @@ -189,7 +207,7 @@ class CheckCaptures extends Recheck, SymTransformer: if Feature.ccEnabled then super.run - val ccState = new CCState + val ccState1 = new CCState // Dotty problem: Rename to ccState ==> Crash in ExplicitOuter class CaptureChecker(ictx: Context) extends Rechecker(ictx): @@ -220,6 +238,9 @@ class CheckCaptures extends Recheck, SymTransformer: if tpt.isInstanceOf[InferredTypeTree] then interpolator().traverse(tpt.knownType) .showing(i"solved vars in ${tpt.knownType}", capt) + for msg <- ccState.approxWarnings do + report.warning(msg, tpt.srcPos) + ccState.approxWarnings.clear() /** Assert subcapturing `cs1 <: cs2` */ def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = @@ -253,7 +274,7 @@ class CheckCaptures extends Recheck, SymTransformer: ctx.printer.toTextCaptureRef(ref).show // Uses 4-space indent as a trial - def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = + private def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = object checker extends TypeTraverser: var refVariances: Map[Boolean, Int] = Map.empty @@ -309,7 +330,7 @@ class CheckCaptures extends Recheck, SymTransformer: def capturedVars(sym: Symbol)(using Context): CaptureSet = myCapturedVars.getOrElseUpdate(sym, if sym.ownersIterator.exists(_.isTerm) - then CaptureSet.Var(sym.owner) + then CaptureSet.Var(sym.owner, level = sym.ccLevel) else CaptureSet.empty) /** For all nested environments up to `limit` or a closed environment perform `op`, @@ -344,56 +365,136 @@ class CheckCaptures extends Recheck, SymTransformer: * the environment in which `sym` is defined. */ def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists then - val ref = sym.termRef - if ref.isTracked then - forallOuterEnvsUpTo(sym.enclosure): env => - capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") - checkElem(ref, env.captured, pos, provenance(env)) + markFree(sym, sym.termRef, pos) + + def markFree(sym: Symbol, ref: TermRef, pos: SrcPos)(using Context): Unit = + if sym.exists && ref.isTracked then + forallOuterEnvsUpTo(sym.enclosure): env => + capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") + checkElem(ref, env.captured, pos, provenance(env)) /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing * environments. At each stage, only include references from `cs` that are outside * the environment's owner */ def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + // A captured reference with the symbol `sym` is visible from the environment + // if `sym` is not defined inside the owner of the environment. + inline def isVisibleFromEnv(sym: Symbol, env: Env) = + if env.kind == EnvKind.NestedInOwner then + !sym.isProperlyContainedIn(env.owner) + else + !sym.isContainedIn(env.owner) + + def checkSubsetEnv(cs: CaptureSet, env: Env)(using Context): Unit = + // Only captured references that are visible from the environment + // should be included. + val included = cs.filter: c => + c.stripReach match + case ref: NamedType => + val refSym = ref.symbol + val refOwner = refSym.owner + val isVisible = isVisibleFromEnv(refOwner, env) + if isVisible && !ref.isRootCapability then + ref match + case ref: TermRef if ref.prefix `ne` NoPrefix => + // If c is a path of a class defined outside the environment, + // we check the capture set of its info. + checkSubsetEnv(ref.captureSetOfInfo, env) + case _ => + if !isVisible + && (c.isReach || ref.isType) + && (!ccConfig.useSealed || refSym.is(Param)) + && refOwner == env.owner + then + if refSym.hasAnnotation(defn.UnboxAnnot) then + capt.println(i"exempt: $ref in $refOwner") + else + // Reach capabilities that go out of scope have to be approximated + // by their underlying capture set, which cannot be universal. + // Reach capabilities of @unboxed parameters are exempted. + val cs = CaptureSet.ofInfo(c) + cs.disallowRootCapability: () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) + checkSubset(cs, env.captured, pos, provenance(env)) + isVisible + case ref: ThisType => isVisibleFromEnv(ref.cls, env) + case _ => false + checkSubset(included, env.captured, pos, provenance(env)) + capt.println(i"Include call or box capture $included from $cs in ${env.owner} --> ${env.captured}") + if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass): env => - // Whether a symbol is defined inside the owner of the environment? - inline def isContainedInEnv(sym: Symbol) = - if env.kind == EnvKind.NestedInOwner then - sym.isProperlyContainedIn(env.owner) - else - sym.isContainedIn(env.owner) - // A captured reference with the symbol `sym` is visible from the environment - // if `sym` is not defined inside the owner of the environment - inline def isVisibleFromEnv(sym: Symbol) = !isContainedInEnv(sym) - // Only captured references that are visible from the environment - // should be included. - val included = cs.filter: - case ref: TermRef => isVisibleFromEnv(ref.symbol.owner) - case ref: ThisType => isVisibleFromEnv(ref.cls) - case _ => false - capt.println(i"Include call capture $included in ${env.owner}") - checkSubset(included, env.captured, pos, provenance(env)) + checkSubsetEnv(cs, env) + end markFree /** Include references captured by the called method in the current environment stack */ def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + private val prefixCalls = util.EqHashSet[GenericApply]() + private val unboxedArgs = util.EqHashSet[Tree]() + + def handleCall(meth: Symbol, call: GenericApply, eval: () => Type)(using Context): Type = + if prefixCalls.remove(call) then return eval() + + val unboxedParamNames = + meth.rawParamss.flatMap: params => + params.collect: + case param if param.hasAnnotation(defn.UnboxAnnot) => + param.name + .toSet + + def markUnboxedArgs(call: GenericApply): Unit = call.fun.tpe.widen match + case MethodType(pnames) => + for (pname, arg) <- pnames.lazyZip(call.args) do + if unboxedParamNames.contains(pname) then + unboxedArgs.add(arg) + case _ => + + def markPrefixCalls(tree: Tree): Unit = tree match + case tree: GenericApply => + prefixCalls.add(tree) + markUnboxedArgs(tree) + markPrefixCalls(tree.fun) + case _ => + + markUnboxedArgs(call) + markPrefixCalls(call.fun) + val res = eval() + includeCallCaptures(meth, call.srcPos) + res + end handleCall + override def recheckIdent(tree: Ident, pt: Type)(using Context): Type = if tree.symbol.is(Method) then if tree.symbol.info.isParameterless then // there won't be an apply; need to include call captures now includeCallCaptures(tree.symbol, tree.srcPos) - else + else if !tree.symbol.isStatic then //debugShowEnvs() - markFree(tree.symbol, tree.srcPos) + def addSelects(ref: TermRef, pt: Type): TermRef = pt match + case pt: PathSelectionProto if ref.isTracked => + // if `ref` is not tracked then the selection could not give anything new + // class SerializationProxy in stdlib-cc/../LazyListIterable.scala has an example where this matters. + addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) + case _ => ref + val ref = tree.symbol.termRef + val pathRef = addSelects(ref, pt) + //if pathRef ne ref then + // println(i"add selects $ref --> $pathRef") + markFree(tree.symbol, if false then ref else pathRef, tree.srcPos) super.recheckIdent(tree, pt) + override def selectionProto(tree: Select, pt: Type)(using Context): Type = + val sym = tree.symbol + if !sym.isOneOf(UnstableValueFlags) && !sym.isStatic then PathSelectionProto(sym, pt) + else super.selectionProto(tree, pt) + /** A specialized implementation of the selection rule. * - * E |- f: f{ m: Cr R }^Cf - * ----------------------- + * E |- f: T{ m: R^Cr }^{f} + * ------------------------ * E |- f.m: R^C * * The implementation picks as `C` one of `{f}` or `Cr`, depending on the @@ -416,42 +517,34 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => denot val selType = recheckSelection(tree, qualType, name, disambiguate) - val selCs = selType.widen.captureSet - if selCs.isAlwaysEmpty - || selType.widen.isBoxedCapturing + val selWiden = selType.widen + + if pt == LhsProto || qualType.isBoxedCapturing - || pt == LhsProto + || selType.isTrackableRef + || selWiden.isBoxedCapturing + || selWiden.captureSet.isAlwaysEmpty then selType else val qualCs = qualType.captureSet - capt.println(i"pick one of $qualType, ${selType.widen}, $qualCs, $selCs in $tree") + val selCs = selType.captureSet + capt.println(i"pick one of $qualType, ${selType.widen}, $qualCs, $selCs ${selWiden.captureSet} in $tree") + if qualCs.mightSubcapture(selCs) && !selCs.mightSubcapture(qualCs) && !pt.stripCapturing.isInstanceOf[SingletonType] then - selType.widen.stripCapturing.capturing(qualCs) + selWiden.stripCapturing.capturing(qualCs) .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) else selType }//.showing(i"recheck sel $tree, $qualType = $result") - /** A specialized implementation of the apply rule. - * - * E |- f: Ra ->Cf Rr^Cr - * E |- a: Ra^Ca - * --------------------- - * E |- f a: Rr^C - * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. - */ override def recheckApply(tree: Apply, pt: Type)(using Context): Type = val meth = tree.fun.symbol - includeCallCaptures(meth, tree.srcPos) - // Unsafe box/unbox handlng, only for versions < 3.3 + // Unsafe box/unbox handling, only for versions < 3.3 def mapArgUsing(f: Type => Type) = val arg :: Nil = tree.args: @unchecked val argType0 = f(recheckStart(arg, pt)) @@ -482,24 +575,58 @@ class CheckCaptures extends Recheck, SymTransformer: tp.derivedCapturingType(forceBox(parent), refs) mapArgUsing(forceBox) else - super.recheckApply(tree, pt) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet): (cs, arg) => - cs ++ arg.tpe.captureSet - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) - case _ => appType - case appType => appType + handleCall(meth, tree, () => super.recheckApply(tree, pt)) end recheckApply + protected override + def recheckArg(arg: Tree, formal: Type)(using Context): Type = + val argType = recheck(arg, formal) + if unboxedArgs.contains(arg) then + capt.println(i"charging deep capture set of $arg: ${argType} = ${argType.deepCaptureSet}") + markFree(argType.deepCaptureSet, arg.srcPos) + argType + + /** A specialized implementation of the apply rule. + * + * E |- q: Tq^Cq + * E |- q.f: Ta^Ca ->Cf Tr^Cr + * E |- a: Ta + * --------------------- + * E |- f(a): Tr^C + * + * If the function `f` does not have an `@unboxed` parameter, then + * any unboxing it does would be charged to the environment of the function + * so they have to appear in Cq. Since any capabilities of the result of the + * application must already be present in the application, an upper + * approximation of the result capture set is Cq \union Ca, where `Ca` + * is the capture set of the argument. + * If the function `f` does have an `@unboxed` parameter, then it could in addition + * unbox reach capabilities over its formal parameter. Therefore, the approximation + * would be `Cq \union dcs(Ca)` instead. + * If the approximation is known to subcapture the declared result Cr, we pick it for C + * otherwise we pick Cr. + */ + protected override + def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = + val appType = Existential.toCap(super.recheckApplication(tree, qualType, funType, argTypes)) + val qualCaptures = qualType.captureSet + val argCaptures = + for (arg, argType) <- tree.args.lazyZip(argTypes) yield + if unboxedArgs.remove(arg) // need to ensure the remove happens, that's why argCaptures is computed even if not needed. + then argType.deepCaptureSet + else argType.captureSet + appType match + case appType @ CapturingType(appType1, refs) + if qualType.exists + && !tree.fun.symbol.isConstructor + && qualCaptures.mightSubcapture(refs) + && argCaptures.forall(_.mightSubcapture(refs)) => + val callCaptures = argCaptures.foldLeft(qualCaptures)(_ ++ _) + appType.derivedCapturingType(appType1, callCaptures) + .showing(i"narrow $tree: $appType, refs = $refs, qual-cs = ${qualType.captureSet} = $result", capt) + case appType => + appType + private def isDistinct(xs: List[Type]): Boolean = xs match case x :: xs1 => xs1.isEmpty || !xs1.contains(x) && isDistinct(xs1) case Nil => true @@ -537,11 +664,11 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = if setup.isCapabilityClassRef(core) - then CaptureSet.universal else initCs + var allCaptures: CaptureSet = + if core.derivesFromCapability then defn.universalCSImpliedByCapability else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do - val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol - if getter.termRef.isTracked && !getter.is(Private) then + val getter = cls.info.member(getterName).suchThat(_.isRefiningParamAccessor).symbol + if !getter.is(Private) && getter.hasTrackedParts then refined = RefinedType(refined, getterName, argType) allCaptures ++= argType.captureSet (refined, allCaptures) @@ -568,18 +695,41 @@ class CheckCaptures extends Recheck, SymTransformer: end instantiate override def recheckTypeApply(tree: TypeApply, pt: Type)(using Context): Type = - if ccConfig.allowUniversalInBoxed then + val meth = tree.symbol + if ccConfig.useSealed then val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] + def isExempt(sym: Symbol) = + sym.isTypeTestOrCast || sym == defn.Compiletime_erasedValue for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do - if !tree.symbol.isTypeTestOrCast then - def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" + if !isExempt(meth) then + def where = if meth.exists then i" in an argument of $meth" else "" disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) - super.recheckTypeApply(tree, pt) + try handleCall(meth, tree, () => Existential.toCap(super.recheckTypeApply(tree, pt))) + finally checkContains(tree) + end recheckTypeApply + + /** Faced with a tree of form `caps.contansImpl[CS, r.type]`, check that `R` is a tracked + * capability and assert that `{r} <:CS`. + */ + def checkContains(tree: TypeApply)(using Context): Unit = tree match + case ContainsImpl(csArg, refArg) => + val cs = csArg.knownType.captureSet + val ref = refArg.knownType + capt.println(i"check contains $cs , $ref") + ref match + case ref: CaptureRef if ref.isTracked => + checkElem(ref, cs, tree.srcPos) + case _ => + report.error(em"$refArg is not a tracked capability", refArg.srcPos) + case _ => + + override def recheckBlock(tree: Block, pt: Type)(using Context): Type = + inNestedLevel(super.recheckBlock(tree, pt)) override def recheckClosure(tree: Closure, pt: Type, forceDependent: Boolean)(using Context): Type = val cs = capturedVars(tree.meth.symbol) @@ -597,25 +747,33 @@ class CheckCaptures extends Recheck, SymTransformer: mdef.rhs.putAttachment(ClosureBodyValue, ()) case _ => - // Constrain closure's parameters and result from the expected type before - // rechecking the body. openClosures = (mdef.symbol, pt) :: openClosures try + // Constrain closure's parameters and result from the expected type before + // rechecking the body. val res = recheckClosure(expr, pt, forceDependent = true) - if !isEtaExpansion(mdef) then + if !(isEtaExpansion(mdef) && ccConfig.handleEtaExpansionsSpecially) then // If closure is an eta expanded method reference it's better to not constrain // its internals early since that would give error messages in generated code // which are less intelligible. // Example is the line `a = x` in neg-custom-args/captures/vars.scala. // For all other closures, early constraints are preferred since they // give more localized error messages. - checkConformsExpr(res, pt, expr) + val res1 = Existential.toCapDeeply(res) + val pt1 = Existential.toCapDeeply(pt) + // We need to open existentials here in order not to get vars mixed up in them + // We do the proper check with existentials when we are finished with the closure block. + capt.println(i"pre-check closure $expr of type $res1 against $pt1") + checkConformsExpr(res1, pt1, expr) recheckDef(mdef, mdef.symbol) res finally openClosures = openClosures.tail end recheckClosureBlock + override def seqLiteralElemProto(tree: SeqLiteral, pt: Type, declared: Type)(using Context) = + super.seqLiteralElemProto(tree, pt, declared).boxed + /** Maps mutable variables to the symbols that capture them (in the * CheckCaptures sense, i.e. symbol is referred to from a different method * than the one it is defined in). @@ -684,13 +842,25 @@ class CheckCaptures extends Recheck, SymTransformer: val localSet = capturedVars(sym) if !localSet.isAlwaysEmpty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv) - try checkInferredResult(super.recheckDefDef(tree, sym), tree) - finally - if !sym.isAnonymousFunction then - // Anonymous functions propagate their type to the enclosing environment - // so it is not in general sound to interpolate their types. - interpolateVarsIn(tree.tpt) - curEnv = saved + + // ctx with AssumedContains entries for each Contains parameter + val bodyCtx = + var ac = CaptureSet.assumedContains + for paramSyms <- sym.paramSymss do + for case ContainsParam(cs, ref) <- paramSyms do + ac = ac.updated(cs, ac.getOrElse(cs, SimpleIdentitySet.empty) + ref) + if ac.isEmpty then ctx + else ctx.withProperty(CaptureSet.AssumedContains, Some(ac)) + + inNestedLevel: // TODO: needed here? + try checkInferredResult(super.recheckDefDef(tree, sym)(using bodyCtx), tree) + finally + if !sym.isAnonymousFunction then + // Anonymous functions propagate their type to the enclosing environment + // so it is not in general sound to interpolate their types. + interpolateVarsIn(tree.tpt) + curEnv = saved + end recheckDefDef /** If val or def definition with inferred (result) type is visible * in other compilation units, check that the actual inferred type @@ -748,7 +918,8 @@ class CheckCaptures extends Recheck, SymTransformer: val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do - if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then + if !param.hasAnnotation(defn.ConstructorOnlyAnnot) + && !param.hasAnnotation(defn.UntrackedCapturesAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) def selfType = impl.body @@ -760,7 +931,8 @@ class CheckCaptures extends Recheck, SymTransformer: checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), selfType.srcPos, cs1description = " captured by this self type") - super.recheckClassDef(tree, impl, cls) + inNestedLevelUnless(cls.is(Module)): + super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -780,7 +952,7 @@ class CheckCaptures extends Recheck, SymTransformer: override def recheckTry(tree: Try, pt: Type)(using Context): Type = val tp = super.recheckTry(tree, pt) - if ccConfig.allowUniversalInBoxed && Feature.enabled(Feature.saferExceptions) then + if ccConfig.useSealed && Feature.enabled(Feature.saferExceptions) then disallowRootCapabilitiesIn(tp, ctx.owner, "result of `try`", "have type", "This is often caused by a locally generated exception capability leaking as part of its result.", @@ -812,9 +984,9 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv tree match case _: RefTree | closureDef(_) if pt.isBoxedCapturing => - curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner), curEnv) + curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner, level = currentLevel), curEnv) case _ if tree.hasAttachment(ClosureBodyValue) => - curEnv = Env(curEnv.owner, EnvKind.ClosureResult, CaptureSet.Var(curEnv.owner), curEnv) + curEnv = Env(curEnv.owner, EnvKind.ClosureResult, CaptureSet.Var(curEnv.owner, level = currentLevel), curEnv) case _ => val res = try @@ -828,8 +1000,9 @@ class CheckCaptures extends Recheck, SymTransformer: tree.tpe finally curEnv = saved if tree.isTerm then - checkReachCapsIsolated(res.widen, tree.srcPos) - if !pt.isBoxedCapturing then + if !ccConfig.useExistentials then + checkReachCapsIsolated(res.widen, tree.srcPos) + if !pt.isBoxedCapturing && pt != LhsProto then markFree(res.boxedCaptureSet, tree.srcPos) res @@ -838,18 +1011,28 @@ class CheckCaptures extends Recheck, SymTransformer: case _: RefTree | _: Apply | _: TypeApply => tree.symbol.unboxesResult case _: Try => true case _ => false - def checkNotUniversal(tp: Type): Unit = tp.widenDealias match - case wtp @ CapturingType(parent, refs) => - refs.disallowRootCapability { () => - report.error( - em"""The expression's type $wtp is not allowed to capture the root capability `cap`. - |This usually means that a capability persists longer than its allowed lifetime.""", - tree.srcPos) - } - checkNotUniversal(parent) - case _ => - if !ccConfig.allowUniversalInBoxed && needsUniversalCheck then - checkNotUniversal(tpe) + + object checkNotUniversal extends TypeTraverser: + def traverse(tp: Type) = + tp.dealias match + case wtp @ CapturingType(parent, refs) => + if variance > 0 then + refs.disallowRootCapability: () => + def part = if wtp eq tpe.widen then "" else i" in its part $wtp" + report.error( + em"""The expression's type ${tpe.widen} is not allowed to capture the root capability `cap`$part. + |This usually means that a capability persists longer than its allowed lifetime.""", + tree.srcPos) + if !wtp.isBoxed then traverse(parent) + case tp => + traverseChildren(tp) + + if !ccConfig.useSealed + && !tpe.hasAnnotation(defn.UncheckedCapturesAnnot) + && needsUniversalCheck + && tpe.widen.isValueType + then + checkNotUniversal.traverse(tpe.widen) super.recheckFinish(tpe, tree, pt) end recheckFinish @@ -866,6 +1049,36 @@ class CheckCaptures extends Recheck, SymTransformer: private inline val debugSuccesses = false + type BoxErrors = mutable.ListBuffer[Message] | Null + + private def boxErrorAddenda(boxErrors: BoxErrors) = + if boxErrors == null then NothingToAdd + else new Addenda: + override def toAdd(using Context): List[String] = + boxErrors.toList.map: msg => + i""" + | + |Note that ${msg.toString}""" + + private def addApproxAddenda(using Context) = + new TypeAccumulator[Addenda]: + def apply(add: Addenda, t: Type) = t match + case CapturingType(t, CaptureSet.EmptyWithProvenance(ref, mapped)) => + /* val (origCore, kind) = original match + case tp @ AnnotatedType(parent, ann) if ann.hasSymbol(defn.ReachCapabilityAnnot) => + (parent, " deep") + case _ => + (original, "")*/ + add ++ new Addenda: + override def toAdd(using Context): List[String] = + i""" + | + |Note that a capability $ref in a capture set appearing in contravariant position + |was mapped to $mapped which is not a capability. Therefore, it was under-approximated to the empty set.""" + :: Nil + case _ => + foldOver(add, t) + /** Massage `actual` and `expected` types before checking conformance. * Massaging is done by the methods following this one: * - align dependent function types and add outer references in the expected type @@ -875,12 +1088,13 @@ class CheckCaptures extends Recheck, SymTransformer: */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda)(using Context): Type = var expected1 = alignDependentFunction(expected, actual.stripCapturing) - val actualBoxed = adaptBoxed(actual, expected1, tree.srcPos) + val boxErrors = new mutable.ListBuffer[Message] + val actualBoxed = adapt(actual, expected1, tree.srcPos, boxErrors) //println(i"check conforms $actualBoxed <<< $expected1") if actualBoxed eq actual then // Only `addOuterRefs` when there is no box adaptation - expected1 = addOuterRefs(expected1, actual) + expected1 = addOuterRefs(expected1, actual, tree.srcPos) if isCompatible(actualBoxed, expected1) then if debugSuccesses then tree match case Ident(_) => @@ -889,7 +1103,10 @@ class CheckCaptures extends Recheck, SymTransformer: actualBoxed else capt.println(i"conforms failed for ${tree}: $actual vs $expected") - err.typeMismatch(tree.withType(actualBoxed), expected1, addenda ++ CaptureSet.levelErrors) + err.typeMismatch(tree.withType(actualBoxed), expected1, + addApproxAddenda( + addenda ++ CaptureSet.levelErrors ++ boxErrorAddenda(boxErrors), + expected1)) actual end checkConformsExpr @@ -903,8 +1120,7 @@ class CheckCaptures extends Recheck, SymTransformer: case expected @ defn.FunctionOf(args, resultType, isContextual) if defn.isNonRefinedFunction(expected) => actual match - case RefinedType(parent, nme.apply, rinfo: MethodType) - if defn.isFunctionNType(actual) => + case defn.RefinedFunctionOf(rinfo: MethodType) => depFun(args, resultType, isContextual, rinfo.paramNames) case _ => expected case _ => expected @@ -919,8 +1135,12 @@ class CheckCaptures extends Recheck, SymTransformer: * that are outside `Cls`. These are all accessed through `Cls.this`, * so we can assume they are already accounted for by `Ce` and adding * them explicitly to `Ce` changes nothing. + * - To make up for this, we also add these variables to the capture set of `Cls`, + * so that all instances of `Cls` will capture these outer references. + * So in a sense we use `{Cls.this}` as a placeholder for certain outer captures. + * that we needed to be subsumed by `Cls.this`. */ - private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = + private def addOuterRefs(expected: Type, actual: Type, pos: SrcPos)(using Context): Type = def isPure(info: Type): Boolean = info match case info: PolyType => isPure(info.resType) @@ -933,16 +1153,40 @@ class CheckCaptures extends Recheck, SymTransformer: else isPure(owner.info) && isPureContext(owner.owner, limit) // Augment expeced capture set `erefs` by all references in actual capture - // set `arefs` that are outside some `this.type` reference in `erefs` + // set `arefs` that are outside some `C.this.type` reference in `erefs` for an enclosing + // class `C`. If an added reference is not a ThisType itself, add it to the capture set + // (i.e. use set) of the `C`. This makes sure that any outer reference implicitly subsumed + // by `C.this` becomes a capture reference of every instance of `C`. def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = (erefs /: erefs.elems): (erefs, eref) => eref match case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - erefs ++ arefs.filter { - case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) + + def pathRoot(aref: Type): Type = aref match + case aref: NamedType if aref.symbol.owner.isClass => pathRoot(aref.prefix) + case _ => aref + + def isOuterRef(aref: Type): Boolean = pathRoot(aref) match + case aref: NamedType => eref.cls.isProperlyContainedIn(aref.symbol.owner) case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) case _ => false - } + + val outerRefs = arefs.filter(isOuterRef) + + // Include implicitly added outer references in the capture set of the class of `eref`. + for outerRef <- outerRefs.elems do + if !erefs.elems.contains(outerRef) + && !pathRoot(outerRef).isInstanceOf[ThisType] + // we don't need to add outer ThisTypes as these are anyway added as path + // prefixes at the use site. And this exemption is required since capture sets + // of non-local classes are always empty, so we can't add an outer this to them. + then + def provenance = + i""" of the enclosing class ${eref.cls}. + |The reference was included since we tried to establish that $arefs <: $erefs""" + checkElem(outerRef, capturedVars(eref.cls), pos, provenance) + + erefs ++ outerRefs case _ => erefs @@ -974,166 +1218,146 @@ class CheckCaptures extends Recheck, SymTransformer: * * @param alwaysConst always make capture set variables constant after adaptation */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, alwaysConst: Boolean = false)(using Context): Type = + def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean, boxErrors: BoxErrors)(using Context): Type = - inline def inNestedEnv[T](boxed: Boolean)(op: => T): T = - val saved = curEnv - curEnv = Env(curEnv.owner, EnvKind.NestedInOwner, CaptureSet.Var(curEnv.owner), if boxed then null else curEnv) - try op - finally curEnv = saved + def recur(actual: Type, expected: Type, covariant: Boolean): Type = - /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) - * to `expected` type. - * It returns the adapted type along with a capture set consisting of the references - * that were additionally captured during adaptation. - * @param reconstruct how to rebuild the adapted function type - */ - def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = - inNestedEnv(boxed): - val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _) => (eargs, eres) - case expected: MethodType => (expected.paramInfos, expected.resType) - case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionNType(expected) => (rinfo.paramInfos, rinfo.resType) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - (resTp, curEnv.captured) - end adaptFun - - /** Adapt type function type `actual` to the expected type. - * @see [[adaptFun]] - */ - def adaptTypeFun( - actual: Type, ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: Type => Type): (Type, CaptureSet) = - inNestedEnv(boxed): - val eres = expected.dealias.stripCapturing match - case defn.PolyFunctionOf(rinfo: PolyType) => rinfo.resType - case expected: PolyType => expected.resType - case _ => WildcardType - - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if ares1 eq ares then actual - else reconstruct(ares1) - - (resTp, curEnv.captured) - end adaptTypeFun - - def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = - val arrow = if covariant then "~~>" else "<~~" - i"adapting $actual $arrow $expected" - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { - if expected.isInstanceOf[WildcardType] then actual - else - // Decompose the actual type into the inner shape type, the capture set and the box status - val styp = if actual.isFromJavaObject then actual else actual.stripCapturing - val cs = actual.captureSet - val boxed = actual.isBoxedCapturing - - // A box/unbox should be inserted, if the actual box status mismatches with the expectation - val needsAdaptation = boxed != expected.isBoxedCapturing - // Whether to insert a box or an unbox? - val insertBox = needsAdaptation && covariant != boxed - - // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation - val (styp1, leaked) = styp match { - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - adaptFun(actual, args.init, args.last, expected, covariant, insertBox, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - case actual @ defn.RefinedFunctionOf(rinfo: MethodType) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - adaptFun(actual, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(alwaysDependent = true)) - case actual: MethodType => - adaptFun(actual, actual.paramInfos, actual.resType, expected, covariant, insertBox, - (aargs1, ares1) => - actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - case actual @ defn.RefinedFunctionOf(rinfo: PolyType) => - adaptTypeFun(actual, rinfo.resType, expected, covariant, insertBox, - ares1 => - val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(refinedInfo = rinfo1) - actual1 - ) - case _ => - (styp, CaptureSet()) - } + /** Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + * @param boxed if true we adapt to a boxed expected type + */ + def adaptShape(actualShape: Type, boxed: Boolean): (Type, CaptureSet) = actualShape match + case FunctionOrMethod(aargs, ares) => + val saved = curEnv + curEnv = Env( + curEnv.owner, EnvKind.NestedInOwner, + CaptureSet.Var(curEnv.owner, level = currentLevel), + if boxed then null else curEnv) + try + val (eargs, eres) = expected.dealias.stripCapturing match + case FunctionOrMethod(eargs, eres) => (eargs, eres) + case _ => (aargs.map(_ => WildcardType), WildcardType) + val aargs1 = aargs.zipWithConserve(eargs): + recur(_, _, !covariant) + val ares1 = recur(ares, eres, covariant) + val resTp = + if (aargs1 eq aargs) && (ares1 eq ares) then actualShape // optimize to avoid redundant matches + else actualShape.derivedFunctionOrMethod(aargs1, ares1) + (resTp, CaptureSet(curEnv.captured.elems)) + finally curEnv = saved + case _ => + (actualShape, CaptureSet()) + end adaptShape - // Capture set of the term after adaptation - val cs1 = - if covariant then cs ++ leaked - else - if !leaked.subCaptures(cs, frozen = false).isOK then - report.error( - em"""$expected cannot be box-converted to $actual - |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) - cs - - // Compute the adapted type - def adaptedType(resultBoxed: Boolean) = - if (styp1 eq styp) && leaked.isAlwaysEmpty && boxed == resultBoxed then actual - else styp1.capturing(if alwaysConst then CaptureSet(cs1.elems) else cs1).forceBoxStatus(resultBoxed) - - if needsAdaptation then - val criticalSet = // the set which is not allowed to have `cap` - if covariant then cs1 // can't box with `cap` - else expected.captureSet // can't unbox with `cap` - if criticalSet.isUniversal && expected.isValueType && !ccConfig.allowUniversalInBoxed then - // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. This tends to give better error - // messages than disallowing the root capability in `criticalSet`. - if ctx.settings.YccDebug.value then - println(i"cannot box/unbox $actual vs $expected") - actual - else - if !ccConfig.allowUniversalInBoxed then - // Disallow future addition of `cap` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `cap`""", - pos) - } - if !insertBox then // unboxing - //debugShowEnvs() - markFree(criticalSet, pos) - adaptedType(!boxed) + def adaptStr = i"adapting $actual ${if covariant then "~~>" else "<~~"} $expected" + + actual match + case actual @ Existential(_, actualUnpacked) => + return Existential.derivedExistentialType(actual): + recur(actualUnpacked, expected, covariant) + case _ => + expected match + case expected @ Existential(_, expectedUnpacked) => + return recur(actual, expectedUnpacked, covariant) + case _: WildcardType => + return actual + case _ => + + trace(adaptStr, capt, show = true) { + + // Decompose the actual type into the inner shape type, the capture set and the box status + val actualShape = if actual.isFromJavaObject then actual else actual.stripCapturing + val actualIsBoxed = actual.isBoxedCapturing + + // A box/unbox should be inserted, if the actual box status mismatches with the expectation + val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing + // Whether to insert a box or an unbox? + val insertBox = needsAdaptation && covariant != actualIsBoxed + + // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation + val (adaptedShape, leaked) = adaptShape(actualShape, insertBox) + + // Capture set of the term after adaptation + val captures = + val cs = actual.captureSet + if covariant then cs ++ leaked else - adaptedType(boxed) - } + if !leaked.subCaptures(cs, frozen = false).isOK then + report.error( + em"""$expected cannot be box-converted to $actual + |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) + cs + + // Compute the adapted type + def adaptedType(resultBoxed: Boolean) = + if (adaptedShape eq actualShape) && leaked.isAlwaysEmpty && actualIsBoxed == resultBoxed + then actual + else adaptedShape + .capturing(if alwaysConst then CaptureSet(captures.elems) else captures) + .forceBoxStatus(resultBoxed) + + if needsAdaptation then + val criticalSet = // the set which is not allowed to have `cap` + if covariant then captures // can't box with `cap` + else expected.captureSet // can't unbox with `cap` + def msg = em"""$actual cannot be box-converted to $expected + |since at least one of their capture sets contains the root capability `cap`""" + def allowUniversalInBoxed = + ccConfig.useSealed + || expected.hasAnnotation(defn.UncheckedCapturesAnnot) + || actual.widen.hasAnnotation(defn.UncheckedCapturesAnnot) + if criticalSet.isUnboxable && expected.isValueType && !allowUniversalInBoxed then + // We can't box/unbox the universal capability. Leave `actual` as it is + // so we get an error in checkConforms. Add the error message generated + // from boxing as an addendum. This tends to give better error + // messages than disallowing the root capability in `criticalSet`. + if boxErrors != null then boxErrors += msg + if ctx.settings.YccDebug.value then + println(i"cannot box/unbox $actual vs $expected") + actual + else + if !allowUniversalInBoxed then + // Disallow future addition of `cap` to `criticalSet`. + criticalSet.disallowRootCapability: () => + report.error(msg, pos) + if !insertBox then // unboxing + //debugShowEnvs() + markFree(criticalSet, pos) + adaptedType(!actualIsBoxed) + else + adaptedType(actualIsBoxed) + } + end recur + + recur(actual, expected, covariant) + end adaptBoxed + + /** If actual is a tracked CaptureRef `a` and widened is a capturing type T^C, + * improve `T^C` to `T^{a}`, following the VAR rule of CC. + */ + private def improveCaptures(widened: Type, actual: Type)(using Context): Type = actual match + case ref: CaptureRef if ref.isTracked => + widened match + case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => + widened.derivedCapturingType(p, ref.singletonCaptureSet) + .showing(i"improve $widened to $result", capt) + case _ => widened + case _ => widened + /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions + * + * @param alwaysConst always make capture set variables constant after adaptation + */ + def adapt(actual: Type, expected: Type, pos: SrcPos, boxErrors: BoxErrors)(using Context): Type = if expected == LhsProto || expected.isSingleton && actual.isSingleton then actual else - var actualw = actual.widenDealias - actual match - case ref: CaptureRef if ref.isTracked => - actualw match - case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => - actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) - .showing(i"improve $actualw to $result", capt) - // given `a: T^C`, improve `T^C` to `T^{a}` - case _ => - case _ => - val adapted = adapt(actualw.withReachCaptures(actual), expected, covariant = true) - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else actual - end adaptBoxed + val widened = improveCaptures(actual.widen.dealiasKeepAnnots, actual) + val adapted = adaptBoxed( + widened.withReachCaptures(actual), expected, pos, + covariant = true, alwaysConst = false, boxErrors) + if adapted eq widened then actual + else adapted.showing(i"adapt boxed $actual vs $expected = $adapted", capt) + end adapt /** Check overrides again, taking capture sets into account. * TODO: Can we avoid doing overrides checks twice? @@ -1147,12 +1371,13 @@ class CheckCaptures extends Recheck, SymTransformer: * @param sym symbol of the field definition that is being checked */ override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) + val expected1 = alignDependentFunction(addOuterRefs(expected, actual, srcPos), actual.stripCapturing) val actual1 = val saved = curEnv try curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) - val adapted = adaptBoxed(actual, expected1, srcPos, alwaysConst = true) + val adapted = + adaptBoxed(actual, expected1, srcPos, covariant = true, alwaysConst = true, null) actual match case _: MethodType => // We remove the capture set resulted from box adaptation for method types, @@ -1168,6 +1393,21 @@ class CheckCaptures extends Recheck, SymTransformer: !setup.isPreCC(overriding) && !setup.isPreCC(overridden) override def checkInheritedTraitParameters: Boolean = false + + /** Check that overrides don't change the @unbox status of their parameters */ + override def additionalChecks(member: Symbol, other: Symbol)(using Context): Unit = + for + (params1, params2) <- member.rawParamss.lazyZip(other.rawParamss) + (param1, param2) <- params1.lazyZip(params2) + do + if param1.hasAnnotation(defn.UnboxAnnot) != param2.hasAnnotation(defn.UnboxAnnot) then + report.error( + OverrideError( + i"has a parameter ${param1.name} with different @unbox status than the corresponding parameter in the overridden definition", + self, member, other, self.memberInfo(member), self.memberInfo(other) + ), + if member.owner == clazz then member.srcPos else clazz.srcPos + ) end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = @@ -1177,6 +1417,11 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => traverseChildren(t) + private val completed = new mutable.HashSet[Symbol] + + override def skipRecheck(sym: Symbol)(using Context): Boolean = + completed.contains(sym) + /** Check a ValDef or DefDef as an action performed in a completer. Since * these checks can appear out of order, we need to firsty create the correct * environment for checking the definition. @@ -1197,7 +1442,8 @@ class CheckCaptures extends Recheck, SymTransformer: case None => Env(sym, EnvKind.Regular, localSet, restoreEnvFor(sym.owner)) curEnv = restoreEnvFor(sym.owner) capt.println(i"Complete $sym in ${curEnv.outersIterator.toList.map(_.owner)}") - recheckDef(tree, sym) + try recheckDef(tree, sym) + finally completed += sym finally curEnv = saved @@ -1215,8 +1461,9 @@ class CheckCaptures extends Recheck, SymTransformer: withCaptureSetsExplained: super.checkUnit(unit) checkOverrides.traverse(unit.tpdTree) - checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) + checkSelfTypes(unit.tpdTree) + postCheckWF(unit.tpdTree) if ctx.settings.YccDebug.value then show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing @@ -1305,7 +1552,7 @@ class CheckCaptures extends Recheck, SymTransformer: case ref: TermParamRef if !allowed.contains(ref) && !seen.contains(ref) => seen += ref - if ref.underlying.isRef(defn.Caps_Cap) then + if ref.isMaxCapability then report.error(i"escaping local reference $ref", tree.srcPos) else val widened = ref.captureSetOfInfo @@ -1366,7 +1613,6 @@ class CheckCaptures extends Recheck, SymTransformer: check.traverse(tp) /** Perform the following kinds of checks - * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. */ @@ -1394,10 +1640,8 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => end check end checker - checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) - for chk <- todoAtPostCheck do chk() - setup.postCheck() + checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) if !ctx.reporter.errorsReported then // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives @@ -1409,5 +1653,15 @@ class CheckCaptures extends Recheck, SymTransformer: case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) case _ => traverseChildren(t) checkApplied.traverse(unit) + end postCheck + + /** Perform the following kinds of checks: + * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. + * - Check that publicly visible inferred types correspond to the type + * they have without capture checking. + */ + def postCheckWF(unit: tpd.Tree)(using Context): Unit = + for chk <- todoAtPostCheck do chk() + setup.postCheck() end CaptureChecker end CheckCaptures diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala new file mode 100644 index 000000000000..732510789e28 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/Existential.scala @@ -0,0 +1,386 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* +import CaptureSet.IdempotentCaptRefMap +import StdNames.nme +import ast.tpd.* +import Decorators.* +import typer.ErrorReporting.errorType +import Names.TermName +import NameKinds.ExistentialBinderName +import NameOps.isImpureFunction +import reporting.Message + +/** + +Handling existentials in CC: + + - We generally use existentials only in function and method result types + - All occurrences of an EX-bound variable appear co-variantly in the bound type + +In Setup: + + - Convert occurrences of `cap` in function results to existentials. Precise rules below. + - Conversions are done in two places: + + + As part of mapping from local types of parameters and results to infos of methods. + The local types just use `cap`, whereas the result type in the info uses EX-bound variables. + + When converting functions or methods appearing in explicitly declared types. + Here again, we only replace cap's in fucntion results. + + - Conversion is done with a BiTypeMap in `Existential.mapCap`. + +In reckeckApply and recheckTypeApply: + + - If an EX is toplevel in the result type, replace its bound variable + occurrences with `cap`. + +Level checking and avoidance: + + - Environments, capture refs, and capture set variables carry levels + + + levels start at 0 + + The level of a block or template statement sequence is one higher than the level of + its environment + + The level of a TermRef is the level of the environment where its symbol is defined. + + The level of a ThisType is the level of the statements of the class to which it beloongs. + + The level of a TermParamRef is currently -1 (i.e. TermParamRefs are not yet checked using this system) + + The level of a capture set variable is the level of the environment where it is created. + + - Variables also carry info whether they accept `cap` or not. Variables introduced under a box + don't, the others do. + + - Capture set variables do not accept elements of level higher than the variable's level + + - We use avoidance to heal such cases: If the level-incorrect ref appears + + covariantly: widen to underlying capture set, reject if that is cap and the variable does not allow it + + contravariantly: narrow to {} + + invarianty: reject with error + +In cv-computation (markFree): + + - Reach capabilities x* of a parameter x cannot appear in the capture set of + the owning method. They have to be widened to dcs(x), or, where this is not + possible, it's an error. + +In box adaptation: + + - Check that existential variables are not boxed or unboxed. + +Subtype rules + + - new alphabet: existentially bound variables `a`. + - they can be stored in environments Gamma. + - they are alpha-renable, usual hygiene conditions apply + + Gamma |- EX a.T <: U + if Gamma, a |- T <: U + + Gamma |- T <: EX a.U + if exists capture set C consisting of capture refs and ex-bound variables + bound in Gamma such that Gamma |- T <: [a := C]U + +Representation: + + EX a.T[a] is represented as a dependent function type + + (a: Exists) => T[a]] + + where Exists is defined in caps like this: + + sealed trait Exists extends Capability + + The defn.RefinedFunctionOf extractor will exclude existential types from + its results, so only normal refined functions match. + + Let `boundvar(ex)` be the TermParamRef defined by the existential type `ex`. + +Subtype checking algorithm, general scheme: + + Maintain two structures in TypeComparer: + + openExistentials: List[TermParamRef] + assocExistentials: Map[TermParamRef, List[TermParamRef]] + + `openExistentials` corresponds to the list of existential variables stored in the environment. + `assocExistentials` maps existential variables bound by existentials appearing on the right + to the value of `openExistentials` at the time when the existential on the right was dropped. + +Subtype checking algorithm, steps to add for tp1 <:< tp2: + + If tp1 is an existential EX a.tp1a: + + val saved = openExistentials + openExistentials = boundvar(tp1) :: openExistentials + try tp1a <:< tp2 + finally openExistentials = saved + + If tp2 is an existential EX a.tp2a: + + val saved = assocExistentials + assocExistentials = assocExistentials + (boundvar(tp2) -> openExistentials) + try tp1 <:< tp2a + finally assocExistentials = saved + + If tp2 is an existentially bound variable: + assocExistentials(tp2).isDefined + && (assocExistentials(tp2).contains(tp1) || tp1 is not existentially bound) + +Subtype checking algorithm, comparing two capture sets CS1 <:< CS2: + + We need to map the (possibly to-be-added) existentials in CS1 to existentials + in CS2 so that we can compare them. We use `assocExistentals` for that: + To map an EX-variable V1 in CS1, pick the last (i.e. outermost, leading to the smallest + type) EX-variable in `assocExistentials` that has V1 in its possible instances. + To go the other way (and therby produce a BiTypeMap), map an EX-variable + V2 in CS2 to the first (i.e. innermost) EX-variable it can be instantiated to. + If either direction is not defined, we choose a special "bad-existetal" value + that represents and out-of-scope existential. This leads to failure + of the comparison. + +Existential source syntax: + + Existential types are ususally not written in source, since we still allow the `^` + syntax that can express most of them more concesely (see below for translation rules). + But we should also allow to write existential types explicity, even if it ends up mainly + for debugging. To express them, we use the encoding with `Exists`, so a typical + expression of an existential would be + + (x: Exists) => A ->{x} B + + Existential types can only at the top level of the result type + of a function or method. + +Restrictions on Existential Types: (to be implemented if we want to +keep the source syntax for users). + + - An existential capture ref must be the only member of its set. This is + intended to model the idea that existential variables effectibely range + over capture sets, not capture references. But so far our calculus + and implementation does not yet acoommodate first-class capture sets. + - Existential capture refs must appear co-variantly in their bound type + + So the following would all be illegal: + + EX x.C^{x, io} // error: multiple members + EX x.() => EX y.C^{x, y} // error: multiple members + EX x.C^{x} ->{x} D // error: contra-variant occurrence + EX x.Set[C^{x}] // error: invariant occurrence + +Expansion of ^: + + We expand all occurrences of `cap` in the result types of functions or methods + to existentially quantified types. Nested scopes are expanded before outer ones. + + The expansion algorithm is then defined as follows: + + 1. In a result type, replace every occurrence of ^ with a fresh existentially + bound variable and quantify over all variables such introduced. + + 2. After this step, type aliases are expanded. If aliases have aliases in arguments, + the outer alias is expanded before the aliases in the arguments. Each time an alias + is expanded that reveals a `^`, apply step (1). + + 3. The algorithm ends when no more alieases remain to be expanded. + + Examples: + + - `A => B` is an alias type that expands to `(A -> B)^`, therefore + `() -> A => B` expands to `() -> EX c. A ->{c} B`. + + - `() => Iterator[A => B]` expands to `() => EX c. Iterator[A ->{c} B]` + + - `A -> B^` expands to `A -> EX c.B^{c}`. + + - If we define `type Fun[T] = A -> T`, then `() -> Fun[B^]` expands to `() -> EX c.Fun[B^{c}]`, which + dealiases to `() -> EX c.A -> B^{c}`. + + - If we define + + type F = A -> Fun[B^] + + then the type alias expands to + + type F = A -> EX c.A -> B^{c} +*/ +object Existential: + + type Carrier = RefinedType + + def unapply(tp: Carrier)(using Context): Option[(TermParamRef, Type)] = + tp.refinedInfo match + case mt: MethodType + if isExistentialMethod(mt) && defn.isNonRefinedFunction(tp.parent) => + Some(mt.paramRefs.head, mt.resultType) + case _ => None + + /** Create method type in the refinement of an existential type */ + private def exMethodType(using Context)( + mk: TermParamRef => Type, + boundName: TermName = ExistentialBinderName.fresh() + ): MethodType = + MethodType(boundName :: Nil)( + mt => defn.Caps_Exists.typeRef :: Nil, + mt => mk(mt.paramRefs.head)) + + /** Create existential */ + def apply(mk: TermParamRef => Type)(using Context): Type = + exMethodType(mk).toFunctionType(alwaysDependent = true) + + /** Create existential if bound variable appears in result of `mk` */ + def wrap(mk: TermParamRef => Type)(using Context): Type = + val mt = exMethodType(mk) + if mt.isResultDependent then mt.toFunctionType() else mt.resType + + extension (tp: Carrier) + def derivedExistentialType(core: Type)(using Context): Type = tp match + case Existential(boundVar, unpacked) => + if core eq unpacked then tp + else apply(bv => core.substParam(boundVar, bv)) + case _ => + core + + /** Map top-level existentials to `cap`. Do the same for existentials + * in function results if all preceding arguments are known to be always pure. + */ + def toCap(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match + case Existential(boundVar, unpacked) => + val transformed = unpacked.substParam(boundVar, defn.captureRoot.termRef) + transformed match + case FunctionOrMethod(args, res @ Existential(_, _)) + if args.forall(_.isAlwaysPure) => + transformed.derivedFunctionOrMethod(args, toCap(res)) + case _ => + transformed + case tp1 @ CapturingType(parent, refs) => + tp1.derivedCapturingType(toCap(parent), refs) + case tp1 @ AnnotatedType(parent, ann) => + tp1.derivedAnnotatedType(toCap(parent), ann) + case _ => tp + + /** Map existentials at the top-level and in all nested result types to `cap` + */ + def toCapDeeply(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match + case Existential(boundVar, unpacked) => + toCapDeeply(unpacked.substParam(boundVar, defn.captureRoot.termRef)) + case tp1 @ FunctionOrMethod(args, res) => + val tp2 = tp1.derivedFunctionOrMethod(args, toCapDeeply(res)) + if tp2 ne tp1 then tp2 else tp + case tp1 @ CapturingType(parent, refs) => + tp1.derivedCapturingType(toCapDeeply(parent), refs) + case tp1 @ AnnotatedType(parent, ann) => + tp1.derivedAnnotatedType(toCapDeeply(parent), ann) + case _ => tp + + /** Knowing that `tp` is a function type, is an alias to a function other + * than `=>`? + */ + private def isAliasFun(tp: Type)(using Context) = tp match + case AppliedType(tycon, _) => !defn.isFunctionSymbol(tycon.typeSymbol) + case _ => false + + /** Replace all occurrences of `cap` in parts of this type by an existentially bound + * variable. If there are such occurrences, or there might be in the future due to embedded + * capture set variables, create an existential with the variable wrapping the type. + * Stop at function or method types since these have been mapped before. + */ + def mapCap(tp: Type, fail: Message => Unit)(using Context): Type = + var needsWrap = false + + abstract class CapMap extends BiTypeMap: + override def mapOver(t: Type): Type = t match + case t @ FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => + t // `t` should be mapped in this case by a different call to `mapCap`. + case Existential(_, _) => + t + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) + case _ => + super.mapOver(t) + + class Wrap(boundVar: TermParamRef) extends CapMap: + def apply(t: Type) = t match + case t: TermRef if t.isRootCapability => + if variance > 0 then + needsWrap = true + boundVar + else + if variance == 0 then + fail(em"""$tp captures the root capability `cap` in invariant position""") + // we accept variance < 0, and leave the cap as it is + super.mapOver(t) + case t @ CapturingType(parent, refs: CaptureSet.Var) => + if variance > 0 then needsWrap = true + super.mapOver(t) + case defn.FunctionNOf(args, res, contextual) if t.typeSymbol.name.isImpureFunction => + if variance > 0 then + needsWrap = true + super.mapOver: + defn.FunctionNOf(args, res, contextual).capturing(boundVar.singletonCaptureSet) + else mapOver(t) + case _ => + mapOver(t) + //.showing(i"mapcap $t = $result") + + lazy val inverse = new BiTypeMap: + def apply(t: Type) = t match + case t: TermParamRef if t eq boundVar => defn.captureRoot.termRef + case _ => mapOver(t) + def inverse = Wrap.this + override def toString = "Wrap.inverse" + end Wrap + + if ccConfig.useExistentials then + val wrapped = apply(Wrap(_)(tp)) + if needsWrap then wrapped else tp + else tp + end mapCap + + def mapCapInResults(fail: Message => Unit)(using Context): TypeMap = new: + + def mapFunOrMethod(tp: Type, args: List[Type], res: Type): Type = + val args1 = atVariance(-variance)(args.map(this)) + val res1 = res match + case res: MethodType => mapFunOrMethod(res, res.paramInfos, res.resType) + case res: PolyType => mapFunOrMethod(res, Nil, res.resType) // TODO: Also map bounds of PolyTypes + case _ => mapCap(apply(res), fail) + //.showing(i"map cap res $res / ${apply(res)} of $tp = $result") + tp.derivedFunctionOrMethod(args1, res1) + + def apply(t: Type): Type = t match + case FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => + mapFunOrMethod(t, args, res) + case CapturingType(parent, refs) => + t.derivedCapturingType(this(parent), refs) + case Existential(_, _) => + t + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) + case _ => + mapOver(t) + end mapCapInResults + + /** Is `mt` a method represnting an existential type when used in a refinement? */ + def isExistentialMethod(mt: TermLambda)(using Context): Boolean = mt.paramInfos match + case (info: TypeRef) :: rest => info.symbol == defn.Caps_Exists && rest.isEmpty + case _ => false + + /** Is `ref` this an existentially bound variable? */ + def isExistentialVar(ref: CaptureRef)(using Context) = ref match + case ref: TermParamRef => isExistentialMethod(ref.binder) + case _ => false + + /** An value signalling an out-of-scope existential that should + * lead to a compare failure. + */ + def badExistential(using Context): TermParamRef = + exMethodType(identity, nme.OOS_EXISTENTIAL).paramRefs.head + + def isBadExistential(ref: CaptureRef) = ref match + case ref: TermParamRef => ref.paramName == nme.OOS_EXISTENTIAL + case _ => false + +end Existential diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e6953dbf67b7..3147a0f7bd47 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -14,8 +14,10 @@ import transform.{PreRecheck, Recheck}, Recheck.* import CaptureSet.{IdentityCaptRefMap, IdempotentCaptRefMap} import Synthetics.isExcluded import util.Property +import reporting.Message import printing.{Printer, Texts}, Texts.{Text, Str} import collection.mutable +import CCState.* /** Operations accessed from CheckCaptures */ trait SetupAPI: @@ -23,10 +25,12 @@ trait SetupAPI: def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit def isPreCC(sym: Symbol)(using Context): Boolean def postCheck()(using Context): Unit - def isCapabilityClassRef(tp: Type)(using Context): Boolean object Setup: + val name: String = "ccSetup" + val description: String = "prepare compilation unit for capture checking" + /** Recognizer for `res $throws exc`, returning `(res, exc)` in case of success */ object throwsAlias: def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match @@ -52,15 +56,33 @@ import Setup.* class Setup extends PreRecheck, SymTransformer, SetupAPI: thisPhase => + override def phaseName: String = Setup.name + + override def description: String = Setup.description + override def isRunnable(using Context) = super.isRunnable && Feature.ccEnabledSomewhere private val toBeUpdated = new mutable.HashSet[Symbol] private def newFlagsFor(symd: SymDenotation)(using Context): FlagSet = - if symd.isAllOf(PrivateParamAccessor) && symd.owner.is(CaptureChecked) && !symd.hasAnnotation(defn.ConstructorOnlyAnnot) - then symd.flags &~ Private | Recheck.ResetPrivate + + object containsCovarRetains extends TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = + if x then true + else if tp.derivesFromCapability && variance >= 0 then true + else tp match + case AnnotatedType(_, ann) if ann.symbol.isRetains && variance >= 0 => true + case _ => foldOver(x, tp) + def apply(tp: Type): Boolean = apply(false, tp) + + if symd.symbol.isRefiningParamAccessor + && symd.is(Private) + && symd.owner.is(CaptureChecked) + && containsCovarRetains(symd.symbol.originDenotation.info) + then symd.flags &~ Private else symd.flags + end newFlagsFor def isPreCC(sym: Symbol)(using Context): Boolean = sym.isTerm && sym.maybeOwner.isClass @@ -68,31 +90,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && !sym.owner.is(CaptureChecked) && !defn.isFunctionSymbol(sym.owner) - private val capabilityClassMap = new util.HashMap[Symbol, Boolean] - - /** Check if the class is capability, which means: - * 1. the class has a capability annotation, - * 2. or at least one of its parent type has universal capability. - */ - def isCapabilityClassRef(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match - case _: TypeRef | _: AppliedType => - val sym = tp.classSymbol - def checkSym: Boolean = - sym.hasAnnotation(defn.CapabilityAnnot) - || sym.info.parents.exists(hasUniversalCapability) - sym.isClass && capabilityClassMap.getOrElseUpdate(sym, checkSym) - case _ => false - - private def hasUniversalCapability(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match - case CapturingType(parent, refs) => - refs.isUniversal || hasUniversalCapability(parent) - case AnnotatedType(parent, ann) => - if ann.symbol.isRetains then - try ann.tree.toCaptureSet.isUniversal || hasUniversalCapability(parent) - catch case ex: IllegalCaptureRef => false - else hasUniversalCapability(parent) - case tp => isCapabilityClassRef(tp) - private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: def apply(t: Type): Type = t match case t: MethodType => @@ -144,9 +141,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: private def box(tp: Type)(using Context): Type = def recur(tp: Type): Type = tp.dealiasKeepAnnotsAndOpaques match case tp @ CapturingType(parent, refs) => - if tp.isBoxed then tp else tp.boxed + if tp.isBoxed || parent.derivesFrom(defn.Caps_CapSet) then tp + else tp.boxed case tp @ AnnotatedType(parent, ann) => - if ann.symbol.isRetains + if ann.symbol.isRetains && !parent.derivesFrom(defn.Caps_CapSet) then CapturingType(parent, ann.tree.toCaptureSet, boxed = true) else tp.derivedAnnotatedType(box(parent), ann) case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => @@ -196,11 +194,14 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case cls: ClassSymbol if !defn.isFunctionClass(cls) && cls.is(CaptureChecked) => cls.paramGetters.foldLeft(tp) { (core, getter) => - if atPhase(thisPhase.next)(getter.termRef.isTracked) then + if atPhase(thisPhase.next)(getter.hasTrackedParts) + && getter.isRefiningParamAccessor + && !getter.is(Tracked) + then val getterType = mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias RefinedType(core, getter.name, - CapturingType(getterType, CaptureSet.RefiningVar(ctx.owner))) + CapturingType(getterType, new CaptureSet.RefiningVar(ctx.owner))) .showing(i"add capture refinement $tp --> $result", capt) else core @@ -248,6 +249,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val rinfo1 = apply(rinfo) if rinfo1 ne rinfo then rinfo1.toFunctionType(alwaysDependent = true) else tp + case Existential(_, unpacked) => + // drop the existential, the bound variables will be replaced by capture set variables + apply(unpacked) case tp: MethodType => tp.derivedLambdaType( paramInfos = mapNested(tp.paramInfos), @@ -263,11 +267,18 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: end apply end mapInferred - mapInferred(refine = true)(tp) + try + val tp1 = mapInferred(refine = true)(tp) + val tp2 = Existential.mapCapInResults(_ => assert(false))(tp1) + if tp2 ne tp then capt.println(i"expanded implicit in ${ctx.owner}: $tp --> $tp1 --> $tp2") + tp2 + catch case ex: AssertionError => + println(i"error while mapping inferred $tp") + throw ex end transformInferredType private def transformExplicitType(tp: Type, tptToCheck: Option[Tree] = None)(using Context): Type = - val expandAliases = new DeepTypeMap: + val toCapturing = new DeepTypeMap: override def toString = "expand aliases" /** Expand $throws aliases. This is hard-coded here since $throws aliases in stdlib @@ -295,44 +306,49 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - private def recur(t: Type): Type = normalizeCaptures(mapOver(t)) + def stripImpliedCaptureSet(tp: Type): Type = tp match + case tp @ CapturingType(parent, refs) + if (refs eq defn.universalCSImpliedByCapability) && !tp.isBoxedCapturing => + parent + case tp @ CapturingType(parent, refs) => tp + case _ => tp def apply(t: Type) = t match case t @ CapturingType(parent, refs) => - t.derivedCapturingType(this(parent), refs) + t.derivedCapturingType(stripImpliedCaptureSet(this(parent)), refs) case t @ AnnotatedType(parent, ann) => val parent1 = this(parent) if ann.symbol.isRetains then + val parent2 = stripImpliedCaptureSet(parent1) for tpt <- tptToCheck do - checkWellformedLater(parent1, ann.tree, tpt) - CapturingType(parent1, ann.tree.toCaptureSet) + checkWellformedLater(parent2, ann.tree, tpt) + CapturingType(parent2, ann.tree.toCaptureSet) else t.derivedAnnotatedType(parent1, ann) case throwsAlias(res, exc) => this(expandThrowsAlias(res, exc, Nil)) - case t: LazyRef => - val t1 = this(t.ref) - if t1 ne t.ref then t1 else t - case t: TypeVar => - this(t.underlying) case t => // Map references to capability classes C to C^ - if isCapabilityClassRef(t) - then CapturingType(t, defn.expandedUniversalSet, boxed = false) - else recur(t) - end expandAliases - - val tp1 = expandAliases(tp) // TODO: Do we still need to follow aliases? - if tp1 ne tp then capt.println(i"expanded in ${ctx.owner}: $tp --> $tp1") - tp1 + if t.derivesFromCapability && !t.isSingleton && t.typeSymbol != defn.Caps_Exists + then CapturingType(t, defn.universalCSImpliedByCapability, boxed = false) + else normalizeCaptures(mapOver(t)) + end toCapturing + + def fail(msg: Message) = + for tree <- tptToCheck do report.error(msg, tree.srcPos) + + val tp1 = toCapturing(tp) + val tp2 = Existential.mapCapInResults(fail)(tp1) + if tp2 ne tp then capt.println(i"expanded explicit in ${ctx.owner}: $tp --> $tp1 --> $tp2") + tp2 end transformExplicitType /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = + private def transformTT(tree: TypeTree, boxed: Boolean)(using Context): Unit = if !tree.hasRememberedType then val transformed = - if tree.isInstanceOf[InferredTypeTree] && !exact + if tree.isInferred then transformInferredType(tree.tpe) else transformExplicitType(tree.tpe, tptToCheck = Some(tree)) tree.rememberType(if boxed then box(transformed) else transformed) @@ -381,7 +397,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: sym.updateInfo(thisPhase, info, newFlagsFor(sym)) toBeUpdated -= sym sym.namedType match - case ref: CaptureRef => ref.invalidateCaches() // TODO: needed? + case ref: CaptureRef if ref.isTrackableRef => ref.invalidateCaches() // TODO: needed? case _ => extension (sym: Symbol) def nextInfo(using Context): Type = @@ -392,10 +408,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = try transformTT(tpt, - boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), - // types of mutable variables are boxed in pre 3.3 codee - exact = sym.allOverriddenSymbols.hasNext, - // types of symbols that override a parent don't get a capture set TODO drop + boxed = + sym.is(Mutable, butNot = Method) + && !ccConfig.useSealed + && !sym.hasAnnotation(defn.UncheckedCapturesAnnot), + // types of mutable variables are boxed in pre 3.3 code ) catch case ex: IllegalCaptureRef => capt.println(i"fail while transforming result type $tpt of $sym") @@ -403,7 +420,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val addDescription = new TypeTraverser: def traverse(tp: Type) = tp match case tp @ CapturingType(parent, refs) => - if !refs.isConst then refs.withDescription(i"of $sym") + if !refs.isConst && refs.description.isEmpty then + refs.withDescription(i"of $sym") traverse(parent) case _ => traverseChildren(tp) @@ -416,14 +434,17 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if isExcluded(meth) then return - inContext(ctx.withOwner(meth)): - paramss.foreach(traverse) - transformResultType(tpt, meth) - traverse(tree.rhs) - //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") + meth.recordLevel() + inNestedLevel: + inContext(ctx.withOwner(meth)): + paramss.foreach(traverse) + transformResultType(tpt, meth) + traverse(tree.rhs) + //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") case tree @ ValDef(_, tpt: TypeTree, _) => val sym = tree.symbol + sym.recordLevel() val defCtx = if sym.isOneOf(TermParamOrAccessor) then ctx else ctx.withOwner(sym) inContext(defCtx): transformResultType(tpt, sym) @@ -432,21 +453,24 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ TypeApply(fn, args) => traverse(fn) - fn match - case Select(qual, nme.asInstanceOf_) => - // No need to box type arguments of an asInstanceOf call. See #20224. - case _ => - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + if !defn.isTypeTestOrCast(fn.symbol) then + for case arg: TypeTree <- args do + transformTT(arg, boxed = true) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => - inContext(ctx.withOwner(tree.symbol)): - traverseChildren(tree) + val sym = tree.symbol + sym.recordLevel() + inNestedLevelUnless(sym.is(Module)): + inContext(ctx.withOwner(sym)) + traverseChildren(tree) case tree @ SeqLiteral(elems, tpt: TypeTree) => traverse(elems) tpt.rememberType(box(transformInferredType(tpt.tpe))) + case tree: Block => + inNestedLevel(traverseChildren(tree)) + case _ => traverseChildren(tree) postProcess(tree) @@ -454,7 +478,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postProcess(tree: Tree)(using Context): Unit = tree match case tree: TypeTree => - transformTT(tree, boxed = false, exact = false) + transformTT(tree, boxed = false) case tree: ValOrDefDef => val sym = tree.symbol @@ -477,11 +501,14 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else tree.tpt.knownType def paramSignatureChanges = tree.match - case tree: DefDef => tree.paramss.nestedExists: - case param: ValDef => param.tpt.hasRememberedType - case param: TypeDef => param.rhs.hasRememberedType + case tree: DefDef => + tree.paramss.nestedExists: + case param: ValDef => param.tpt.hasRememberedType + case param: TypeDef => param.rhs.hasRememberedType case _ => false + // A symbol's signature changes if some of its parameter types or its result type + // have a new type installed here (meaning hasRememberedType is true) def signatureChanges = tree.tpt.hasRememberedType && !sym.isConstructor || paramSignatureChanges @@ -498,6 +525,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: info match case mt: MethodOrPoly => val psyms = psymss.head + // TODO: the substitution does not work for param-dependent method types. + // For example, `(x: T, y: x.f.type) => Unit`. In this case, when we + // substitute `x.f.type`, `x` becomes a `TermParamRef`. But the new method + // type is still under initialization and `paramInfos` is still `null`, + // so the new `NamedType` will not have a denoation. mt.companion(mt.paramNames)( mt1 => if !paramSignatureChanges && !mt.isParamDependent && prevLambdas.isEmpty then @@ -516,7 +548,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else SubstParams(prevPsymss, prevLambdas)(resType) if sym.exists && signatureChanges then - val newInfo = integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) + val newInfo = + Existential.mapCapInResults(report.error(_, tree.srcPos)): + integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) .showing(i"update info $sym: ${sym.info} = $result", capt) if newInfo ne sym.info then val updatedInfo = @@ -545,36 +579,37 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree: TypeDef => tree.symbol match case cls: ClassSymbol => - val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - def innerModule = cls.is(ModuleClass) && !cls.isStatic - val selfInfo1 = - if (selfInfo ne NoType) && !innerModule then - // if selfInfo is explicitly given then use that one, except if - // self info applies to non-static modules, these still need to be inferred - selfInfo - else if cls.isPureClass then - // is cls is known to be pure, nothing needs to be added to self type - selfInfo - else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitSelfType then - // assume {cap} for completely unconstrained self types of publicly extensible classes - CapturingType(cinfo.selfType, CaptureSet.universal) - else - // Infer the self type for the rest, which is all classes without explicit - // self types (to which we also add nested module classes), provided they are - // neither pure, nor are publicily extensible with an unconstrained self type. - CapturingType(cinfo.selfType, CaptureSet.Var(cls)) - val ps1 = inContext(ctx.withOwner(cls)): - ps.mapConserve(transformExplicitType(_)) - if (selfInfo1 ne selfInfo) || (ps1 ne ps) then - val newInfo = ClassInfo(prefix, cls, ps1, decls, selfInfo1) - updateInfo(cls, newInfo) - capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") - cls.thisType.asInstanceOf[ThisType].invalidateCaches() - if cls.is(ModuleClass) then - // if it's a module, the capture set of the module reference is the capture set of the self type - val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, selfInfo1.asInstanceOf[Type].captureSet)) - modul.termRef.invalidateCaches() + inNestedLevelUnless(cls.is(Module)): + val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo + def innerModule = cls.is(ModuleClass) && !cls.isStatic + val selfInfo1 = + if (selfInfo ne NoType) && !innerModule then + // if selfInfo is explicitly given then use that one, except if + // self info applies to non-static modules, these still need to be inferred + selfInfo + else if cls.isPureClass then + // is cls is known to be pure, nothing needs to be added to self type + selfInfo + else if !cls.isEffectivelySealed && !cls.baseClassHasExplicitNonUniversalSelfType then + // assume {cap} for completely unconstrained self types of publicly extensible classes + CapturingType(cinfo.selfType, CaptureSet.universal) + else + // Infer the self type for the rest, which is all classes without explicit + // self types (to which we also add nested module classes), provided they are + // neither pure, nor are publicily extensible with an unconstrained self type. + CapturingType(cinfo.selfType, CaptureSet.Var(cls, level = currentLevel)) + val ps1 = inContext(ctx.withOwner(cls)): + ps.mapConserve(transformExplicitType(_)) + if (selfInfo1 ne selfInfo) || (ps1 ne ps) then + val newInfo = ClassInfo(prefix, cls, ps1, decls, selfInfo1) + updateInfo(cls, newInfo) + capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") + cls.thisType.asInstanceOf[ThisType].invalidateCaches() + if cls.is(ModuleClass) then + // if it's a module, the capture set of the module reference is the capture set of the self type + val modul = cls.sourceModule + updateInfo(modul, CapturingType(modul.info, selfInfo1.asInstanceOf[Type].captureSet)) + modul.termRef.invalidateCaches() case _ => case _ => end postProcess @@ -589,10 +624,10 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: !refs.isEmpty case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol - if sym.isClass then - !sym.isPureClass - else - sym != defn.Caps_Cap && instanceCanBeImpure(tp.superType) + if sym.isClass + then !sym.isPureClass + else !tp.derivesFrom(defn.Caps_CapSet) // CapSet arguments don't get other capture set variables added + && instanceCanBeImpure(tp.superType) case tp: (RefinedOrRecType | MatchType) => instanceCanBeImpure(tp.underlying) case tp: AndType => @@ -686,11 +721,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Add a capture set variable to `tp` if necessary, or maybe pull out * an embedded capture set variable from a part of `tp`. */ - def addVar(tp: Type, owner: Symbol)(using Context): Type = + private def addVar(tp: Type, owner: Symbol)(using Context): Type = decorate(tp, addedSet = _.dealias.match - case CapturingType(_, refs) => CaptureSet.Var(owner, refs.elems) - case _ => CaptureSet.Var(owner)) + case CapturingType(_, refs) => CaptureSet.Var(owner, refs.elems, level = currentLevel) + case _ => CaptureSet.Var(owner, level = currentLevel)) def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit = setupTraverser(recheckDef).traverse(tree)(using ctx.withPhase(thisPhase)) @@ -712,32 +747,34 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: var retained = ann.retainedElems.toArray for i <- 0 until retained.length do val refTree = retained(i) - val ref = refTree.toCaptureRef - - def pos = - if refTree.span.exists then refTree.srcPos - else if ann.span.exists then ann.srcPos - else tpt.srcPos - - def check(others: CaptureSet, dom: Type | CaptureSet): Unit = - if others.accountsFor(ref) then - report.warning(em"redundant capture: $dom already accounts for $ref", pos) - - if ref.captureSetOfInfo.elems.isEmpty then - report.error(em"$ref cannot be tracked since its capture set is empty", pos) - if parent.captureSet ne defn.expandedUniversalSet then + for ref <- refTree.toCaptureRefs do + def pos = + if refTree.span.exists then refTree.srcPos + else if ann.span.exists then ann.srcPos + else tpt.srcPos + + def check(others: CaptureSet, dom: Type | CaptureSet): Unit = + if others.accountsFor(ref) then + report.warning(em"redundant capture: $dom already accounts for $ref", pos) + + if ref.captureSetOfInfo.elems.isEmpty && !ref.derivesFrom(defn.Caps_Capability) then + report.error(em"$ref cannot be tracked since its capture set is empty", pos) check(parent.captureSet, parent) - val others = - for j <- 0 until retained.length if j != i yield retained(j).toCaptureRef - val remaining = CaptureSet(others*) - check(remaining, remaining) + val others = + for + j <- 0 until retained.length if j != i + r <- retained(j).toCaptureRefs + yield r + val remaining = CaptureSet(others*) + check(remaining, remaining) + end for end for end checkWellformedPost /** Check well formed at post check time */ private def checkWellformedLater(parent: Type, ann: Tree, tpt: Tree)(using Context): Unit = - if !tpt.span.isZeroExtent then + if !tpt.span.isZeroExtent && enclosingInlineds.isEmpty then todoAtPostCheck += (ctx1 => checkWellformedPost(parent, ann, tpt)(using ctx1.withOwner(ctx.owner))) diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 5ac6b772df95..b0046ee49cd1 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -53,7 +53,7 @@ trait CliCommand: end distill /** Creates a help message for a subset of options based on cond */ - protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = + protected def availableOptionsMsg(p: Setting[?] => Boolean, showArgFileMsg: Boolean = true)(using settings: ConcreteSettings)(using SettingsState): String = // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z def help(s: Setting[?]): (String, String) = // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. @@ -68,7 +68,10 @@ trait CliCommand: val ss = settings.allSettings.filter(p).toList.sortBy(_.name) val formatter = Columnator("", "", maxField = 30) val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) - formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) + var msg = ss.map(help) + if showArgFileMsg then + msg = msg :+ ("@", "A text file containing compiler arguments (options and source files).") + formatter(List(msg))(using fresh) end availableOptionsMsg protected def shortUsage: String = s"Usage: $cmdName " diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 587f94dad7b3..e90bbcc36878 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -9,6 +9,7 @@ abstract class CompilerCommand extends CliCommand: final def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String = settings.allSettings.find(isHelping) match + case Some(s @ settings.language) => availableOptionsMsg(_ == s, showArgFileMsg = false) case Some(s) => s.description case _ => if (settings.help.value) usageMessage diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index ee8ed4b215d7..e8a234ff821f 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -229,7 +229,7 @@ object Config { inline val reuseSymDenotations = true /** If `checkLevelsOnConstraints` is true, check levels of type variables - * and create fresh ones as needed when bounds are first entered intot he constraint. + * and create fresh ones as needed when bounds are first entered into the constraint. * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but * fix levels on type variable instantiation. */ diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 0d551094da4d..8b9a64924ace 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -11,6 +11,7 @@ import SourceVersion.* import reporting.Message import NameKinds.QualifiedName import Annotations.ExperimentalAnnotation +import Settings.Setting.ChoiceWithHelp object Feature: @@ -36,12 +37,51 @@ object Feature: val namedTuples = experimental("namedTuples") val modularity = experimental("modularity") val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") + val quotedPatternsWithPolymorphicFunctions = experimental("quotedPatternsWithPolymorphicFunctions") + val betterFors = experimental("betterFors") def experimentalAutoEnableFeatures(using Context): List[TermName] = defn.languageExperimentalFeatures .map(sym => experimental(sym.name)) .filterNot(_ == captureChecking) // TODO is this correct? + val values = List( + (nme.help, "Display all available features"), + (nme.noAutoTupling, "Disable automatic tupling"), + (nme.dynamics, "Allow direct or indirect subclasses of scala.Dynamic"), + (nme.unsafeNulls, "Enable unsafe nulls for explicit nulls"), + (nme.postfixOps, "Allow postfix operators (not recommended)"), + (nme.strictEquality, "Enable strict equality (disable canEqualAny)"), + (nme.implicitConversions, "Allow implicit conversions without warnings"), + (nme.adhocExtensions, "Allow ad-hoc extension methods"), + (namedTypeArguments, "Allow named type arguments"), + (genericNumberLiterals, "Allow generic number literals"), + (scala2macros, "Allow Scala 2 macros"), + (dependent, "Allow dependent method types"), + (erasedDefinitions, "Allow erased definitions"), + (symbolLiterals, "Allow symbol literals"), + (fewerBraces, "Enable support for using indentation for arguments"), + (saferExceptions, "Enable safer exceptions"), + (clauseInterleaving, "Enable clause interleaving"), + (pureFunctions, "Enable pure functions for capture checking"), + (captureChecking, "Enable experimental capture checking"), + (into, "Allow into modifier on parameter types"), + (namedTuples, "Allow named tuples"), + (modularity, "Enable experimental modularity features"), + (betterMatchTypeExtractors, "Enable better match type extractors"), + (betterFors, "Enable improvements in `for` comprehensions") + ) + + // legacy language features from Scala 2 that are no longer supported. + val legacyFeatures = List( + "higherKinds", + "existentials", + "reflectiveCalls" + ) + + private def enabledLanguageFeaturesBySetting(using Context): List[String] = + ctx.settings.language.value.asInstanceOf + /** Is `feature` enabled by by a command-line setting? The enabling setting is * * -language:feature @@ -50,7 +90,7 @@ object Feature: * but subtracting the prefix `scala.language.` at the front. */ def enabledBySetting(feature: TermName)(using Context): Boolean = - ctx.base.settings.language.value.contains(feature.toString) + enabledLanguageFeaturesBySetting.contains(feature.toString) /** Is `feature` enabled by by an import? This is the case if the feature * is imported by a named import @@ -84,13 +124,17 @@ object Feature: def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) - def clauseInterleavingEnabled(using Context) = enabled(clauseInterleaving) + def clauseInterleavingEnabled(using Context) = + sourceVersion.isAtLeast(`3.6`) || enabled(clauseInterleaving) + + def betterForsEnabled(using Context) = enabled(betterFors) def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) - def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) + def quotedPatternsWithPolymorphicFunctionsEnabled(using Context) = + enabled(quotedPatternsWithPolymorphicFunctions) /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = diff --git a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala index 4dd9d065395b..1d99caa789d3 100644 --- a/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/MigrationVersion.scala @@ -6,46 +6,38 @@ import SourceVersion.* import Feature.* import core.Contexts.Context -class MigrationVersion( - val warnFrom: SourceVersion, - val errorFrom: SourceVersion): - require(warnFrom.ordinal <= errorFrom.ordinal) - - def needsPatch(using Context): Boolean = - sourceVersion.isMigrating && sourceVersion.isAtLeast(warnFrom) - - def patchFrom: SourceVersion = - warnFrom.prevMigrating - -object MigrationVersion: - - val Scala2to3 = MigrationVersion(`3.0`, `3.0`) - - val OverrideValParameter = MigrationVersion(`3.0`, future) - +enum MigrationVersion(val warnFrom: SourceVersion, val errorFrom: SourceVersion): + case Scala2to3 extends MigrationVersion(`3.0`, `3.0`) + case OverrideValParameter extends MigrationVersion(`3.0`, future) // we tighten for-comprehension without `case` to error in 3.4, // but we keep pat-defs as warnings for now ("@unchecked"), // until we propose an alternative way to assert exhaustivity to the typechecker. - val ForComprehensionPatternWithoutCase = MigrationVersion(`3.2`, `3.4`) - val ForComprehensionUncheckedPathDefs = MigrationVersion(`3.2`, future) - - val NonLocalReturns = MigrationVersion(`3.2`, future) - - val AscriptionAfterPattern = MigrationVersion(`3.3`, future) + case ForComprehensionPatternWithoutCase extends MigrationVersion(`3.2`, `3.4`) + case ForComprehensionUncheckedPathDefs extends MigrationVersion(`3.2`, future) + + case NonLocalReturns extends MigrationVersion(`3.2`, future) + case AscriptionAfterPattern extends MigrationVersion(`3.3`, future) + case ExplicitContextBoundArgument extends MigrationVersion(`3.4`, `3.5`) + case AlphanumericInfix extends MigrationVersion(`3.4`, future) + case RemoveThisQualifier extends MigrationVersion(`3.4`, future) + case UninitializedVars extends MigrationVersion(`3.4`, future) + case VarargSpliceAscription extends MigrationVersion(`3.4`, future) + case WildcardType extends MigrationVersion(`3.4`, future) + case WithOperator extends MigrationVersion(`3.4`, future) + case FunctionUnderscore extends MigrationVersion(`3.4`, future) + case NonNamedArgumentInJavaAnnotation extends MigrationVersion(`3.6`, `3.6`) + case AmbiguousNamedTupleSyntax extends MigrationVersion(`3.6`, future) + case ImportWildcard extends MigrationVersion(future, future) + case ImportRename extends MigrationVersion(future, future) + case ParameterEnclosedByParenthesis extends MigrationVersion(future, future) + case XmlLiteral extends MigrationVersion(future, future) + case GivenSyntax extends MigrationVersion(future, never) - val ExplicitContextBoundArgument = MigrationVersion(`3.4`, `3.5`) + require(warnFrom.ordinal <= errorFrom.ordinal) - val AlphanumericInfix = MigrationVersion(`3.4`, future) - val RemoveThisQualifier = MigrationVersion(`3.4`, future) - val UninitializedVars = MigrationVersion(`3.4`, future) - val VarargSpliceAscription = MigrationVersion(`3.4`, future) - val WildcardType = MigrationVersion(`3.4`, future) - val WithOperator = MigrationVersion(`3.4`, future) - val FunctionUnderscore = MigrationVersion(`3.4`, future) + def needsPatch(using Context): Boolean = + sourceVersion.isMigrating && sourceVersion.isAtLeast(warnFrom) - val ImportWildcard = MigrationVersion(future, future) - val ImportRename = MigrationVersion(future, future) - val ParameterEnclosedByParenthesis = MigrationVersion(future, future) - val XmlLiteral = MigrationVersion(future, future) + def patchFrom: SourceVersion = warnFrom.prevMigrating end MigrationVersion diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index 29e6e35855c8..f60727e6bba2 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -36,9 +36,16 @@ object PathResolver { /** Values found solely by inspecting environment or property variables. */ object Environment { - private def searchForBootClasspath = ( - systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" - ) + private def searchForBootClasspath = { + import scala.jdk.CollectionConverters.* + val props = System.getProperties + // This formulation should be immune to ConcurrentModificationExceptions when system properties + // we're unlucky enough to witness a partially published result of System.setProperty or direct + // mutation of the System property map. stringPropertyNames internally uses the Enumeration interface, + // rather than Iterator, and this disables the fail-fast ConcurrentModificationException. + val propNames = props.stringPropertyNames() + propNames.asScala collectFirst { case k if k endsWith ".boot.class.path" => props.getProperty(k) } getOrElse "" + } /** Environment variables which java pays attention to so it * seems we do as well. @@ -46,7 +53,8 @@ object PathResolver { def classPathEnv: String = envOrElse("CLASSPATH", "") def sourcePathEnv: String = envOrElse("SOURCEPATH", "") - def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) + //using propOrNone/getOrElse instead of propOrElse so that searchForBootClasspath is lazy evaluated + def javaBootClassPath: String = propOrNone("sun.boot.class.path") getOrElse searchForBootClasspath def javaExtDirs: String = propOrEmpty("java.ext.dirs") def scalaHome: String = propOrEmpty("scala.home") diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 2a362a707ade..41cd14955759 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -10,7 +10,7 @@ import java.io.IOException import java.util.jar.Attributes.{ Name => AttributeName } import java.nio.charset.StandardCharsets -/** Loads `library.properties` from the jar. */ +/** Loads `compiler.properties` from the jar. */ object Properties extends PropertiesTrait { protected def propCategory: String = "compiler" protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 86b657ddf00d..6ef33d24f8be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -114,7 +114,7 @@ trait CommonScalaSettings: val explainTypes: Setting[Boolean] = BooleanSetting(RootSetting, "explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) val explainCyclic: Setting[Boolean] = BooleanSetting(RootSetting, "explain-cyclic", "Explain cyclic reference errors in more detail.", aliases = List("--explain-cyclic")) val unchecked: Setting[Boolean] = BooleanSetting(RootSetting, "unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiStringSetting(RootSetting, "language", "feature", "Enable one or more language features.", aliases = List("--language")) + val language: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting(RootSetting, "language", "feature", "Enable one or more language features.", choices = ScalaSettingsProperties.supportedLanguageFeatures, legacyChoices = ScalaSettingsProperties.legacyLanguageFeatures, default = Nil, aliases = List("--language")) val experimental: Setting[Boolean] = BooleanSetting(RootSetting, "experimental", "Annotate all top-level definitions with @experimental. This enables the use of experimental features anywhere in the project.") /* Coverage settings */ @@ -158,49 +158,53 @@ private sealed trait WarningSettings: val Whelp: Setting[Boolean] = BooleanSetting(WarningSetting, "W", "Print a synopsis of warning options.") val XfatalWarnings: Setting[Boolean] = BooleanSetting(WarningSetting, "Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - val WvalueDiscard: Setting[Boolean] = BooleanSetting(WarningSetting, "Wvalue-discard", "Warn when non-Unit expression results are unused.") - val WNonUnitStatement = BooleanSetting(WarningSetting, "Wnonunit-statement", "Warn when block statements are non-Unit expressions.") - val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") - val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") - val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") - val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( + val Wall: Setting[Boolean] = BooleanSetting(WarningSetting, "Wall", "Enable all warning settings.") + private val WvalueDiscard: Setting[Boolean] = BooleanSetting(WarningSetting, "Wvalue-discard", "Warn when non-Unit expression results are unused.") + private val WNonUnitStatement = BooleanSetting(WarningSetting, "Wnonunit-statement", "Warn when block statements are non-Unit expressions.") + private val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") + private val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") + private val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") + private val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( WarningSetting, name = "Wunused", helpArg = "warning", descr = "Enable or disable specific `unused` warnings", choices = List( ChoiceWithHelp("nowarn", ""), - ChoiceWithHelp("all",""), + ChoiceWithHelp("all", ""), ChoiceWithHelp( name = "imports", description = "Warn if an import selector is not referenced.\n" + "NOTE : overrided by -Wunused:strict-no-implicit-warn"), - ChoiceWithHelp("privates","Warn if a private member is unused"), - ChoiceWithHelp("locals","Warn if a local definition is unused"), - ChoiceWithHelp("explicits","Warn if an explicit parameter is unused"), - ChoiceWithHelp("implicits","Warn if an implicit parameter is unused"), - ChoiceWithHelp("params","Enable -Wunused:explicits,implicits"), - ChoiceWithHelp("linted","Enable -Wunused:imports,privates,locals,implicits"), - ChoiceWithHelp( - name = "strict-no-implicit-warn", - description = "Same as -Wunused:import, only for imports of explicit named members.\n" + - "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" - ), - // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), - ChoiceWithHelp( - name = "unsafe-warn-patvars", - description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + - "This warning can generate false positive, as warning cannot be\n" + - "suppressed yet." - ) + ChoiceWithHelp("privates", "Warn if a private member is unused"), + ChoiceWithHelp("locals", "Warn if a local definition is unused"), + ChoiceWithHelp("explicits", "Warn if an explicit parameter is unused"), + ChoiceWithHelp("implicits", "Warn if an implicit parameter is unused"), + ChoiceWithHelp("params", "Enable -Wunused:explicits,implicits"), + ChoiceWithHelp("linted", "Enable -Wunused:imports,privates,locals,implicits"), + ChoiceWithHelp( + name = "strict-no-implicit-warn", + description = "Same as -Wunused:import, only for imports of explicit named members.\n" + + "NOTE : This overrides -Wunused:imports and NOT set by -Wunused:all" + ), + // ChoiceWithHelp("patvars","Warn if a variable bound in a pattern is unused"), + ChoiceWithHelp( + name = "unsafe-warn-patvars", + description = "(UNSAFE) Warn if a variable bound in a pattern is unused.\n" + + "This warning can generate false positive, as warning cannot be\n" + + "suppressed yet." + ) ), default = Nil ) object WunusedHas: def isChoiceSet(s: String)(using Context) = Wunused.value.pipe(us => us.contains(s)) - def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def allOr(s: String)(using Context) = Wall.value || Wunused.value.pipe(us => us.contains("all") || us.contains(s)) def nowarn(using Context) = allOr("nowarn") + // Is any choice set for -Wunused? + def any(using Context): Boolean = Wall.value || Wunused.value.nonEmpty + // overrided by strict-no-implicit-warn def imports(using Context) = (allOr("imports") || allOr("linted")) && !(strictNoImplicitWarn) @@ -229,7 +233,7 @@ private sealed trait WarningSettings: "patterns", default = List(), descr = - s"""Configure compiler warnings. + raw"""Configure compiler warnings. |Syntax: -Wconf::,:,... |multiple are combined with &, i.e., &...& | @@ -250,6 +254,9 @@ private sealed trait WarningSettings: | - Source location: src=regex | The regex is evaluated against the full source path. | + | - Origin of warning: origin=regex + | The regex must match the full name (`package.Class.method`) of the deprecated entity. + | |In verbose warning mode the compiler prints matching filters for warnings. |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). @@ -269,6 +276,7 @@ private sealed trait WarningSettings: |Examples: | - change every warning into an error: -Wconf:any:error | - silence deprecations: -Wconf:cat=deprecation:s + | - silence a deprecation: -Wconf:origin=java\.lang\.Thread\.getId:s | - silence warnings in src_managed directory: -Wconf:src=src_managed/.*:s | |Note: on the command-line you might need to quote configurations containing `*` or `&` @@ -298,6 +306,16 @@ private sealed trait WarningSettings: val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") + object Whas: + def allOr(s: Setting[Boolean])(using Context): Boolean = + Wall.value || s.value + def valueDiscard(using Context): Boolean = allOr(WvalueDiscard) + def nonUnitStatement(using Context): Boolean = allOr(WNonUnitStatement) + def enumCommentDiscard(using Context): Boolean = allOr(WenumCommentDiscard) + def implausiblePatterns(using Context): Boolean = allOr(WimplausiblePatterns) + def unstableInlineAccessors(using Context): Boolean = allOr(WunstableInlineAccessors) + def checkInit(using Context): Boolean = allOr(WcheckInit) + /** -X "Extended" or "Advanced" settings */ private sealed trait XSettings: self: SettingGroup => @@ -423,12 +441,10 @@ private sealed trait YSettings: val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "", depends = List(YprofileEnabled -> true)) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer", depends = List(YprofileEnabled -> true)) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_", depends = List(YprofileEnabled -> true)) + val YprofileTrace: Setting[String] = StringSetting(ForkSetting, "Yprofile-trace", "file", s"Capture trace of compilation in JSON Chrome Trace format to the specified file. This option requires ${YprofileEnabled.name}. The output file can be visualized using https://ui.perfetto.dev/.", "", depends = List(YprofileEnabled -> true)) val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") @@ -492,3 +508,4 @@ private sealed trait YSettings: @deprecated(message = "Scheduled for removal.", since = "3.5.0") val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles", deprecation = Deprecation.removed()) end YSettings + diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index e8a55dc6e737..e42d2d53529e 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -1,6 +1,7 @@ package dotty.tools.dotc package config +import Settings.Setting.ChoiceWithHelp import dotty.tools.backend.jvm.BackendUtils.classfileVersionMap import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} import scala.language.unsafeNulls @@ -24,7 +25,13 @@ object ScalaSettingsProperties: ScalaRelease.values.toList.map(_.show) def supportedSourceVersions: List[String] = - SourceVersion.values.toList.map(_.toString) + (SourceVersion.values.toList.diff(SourceVersion.illegalSourceVersionNames)).toList.map(_.toString) + + def supportedLanguageFeatures: List[ChoiceWithHelp[String]] = + Feature.values.map((n, d) => ChoiceWithHelp(n.toString, d)) + + val legacyLanguageFeatures: List[String] = + Feature.legacyFeatures def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 1e2ced4d65a7..f85f2cc57de4 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -47,6 +47,10 @@ object Settings: values(idx) = x changed.add(idx) this + + def reinitializedCopy(): SettingsState = + SettingsState(values.toSeq, changed.toSet) + end SettingsState case class ArgsSummary( @@ -69,6 +73,11 @@ object Settings: def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") + /** List of setting-value pairs that are required for another setting to be valid. + * For example, `s = Setting(..., depends = List(YprofileEnabled -> true))` + * means that `s` requires `YprofileEnabled` to be set to `true`. + */ + type SettingDependencies = List[(Setting[?], Any)] case class Setting[T: ClassTag] private[Settings] ( category: SettingCategory, @@ -79,13 +88,15 @@ object Settings: choices: Option[Seq[?]] = None, prefix: Option[String] = None, aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, + depends: SettingDependencies = Nil, ignoreInvalidArgs: Boolean = false, preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, deprecation: Option[Deprecation] = None, // kept only for -Xkind-projector option compatibility - legacyArgs: Boolean = false)(private[Settings] val idx: Int): + legacyArgs: Boolean = false, + // accept legacy choices (for example, valid in Scala 2 but no longer supported) + legacyChoices: Option[Seq[?]] = None)(private[Settings] val idx: Int): validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) @@ -206,9 +217,14 @@ object Settings: def appendList(strings: List[String], argValue: String, args: List[String]) = choices match - case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, argValue, args) - case invalid => invalidChoices(invalid) + case Some(valid) => strings.partition(valid.contains) match + case (_, Nil) => update(strings, argValue, args) + case (validStrs, invalidStrs) => legacyChoices match + case Some(validBefore) => + invalidStrs.filterNot(validBefore.contains) match + case Nil => update(validStrs, argValue, args) + case realInvalidStrs => invalidChoices(realInvalidStrs) + case _ => invalidChoices(invalidStrs) case _ => update(strings, argValue, args) def doSet(argRest: String) = @@ -374,17 +390,17 @@ object Settings: def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation, depends = depends)) def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) - def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String] = Nil, legacyChoices: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), legacyChoices = Some(legacyChoices), aliases = aliases, deprecation = deprecation)) - def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) + def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], legacyChoices: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), legacyChoices = Some(legacyChoices), aliases = aliases, deprecation = deprecation)) def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Int] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) @@ -401,12 +417,13 @@ object Settings: def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation, depends = depends)) - def PrefixSetting(category: SettingCategory, name: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = + def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = + val name = prependName(name0) val prefix = name.takeWhile(_ != '<') - publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) + publish(Setting(category, name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[ScalaVersion] = publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs, deprecation = deprecation)) diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 02140c3f4e3b..3a7285751827 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -16,6 +16,8 @@ enum SourceVersion: // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` + case `never` // needed for MigrationVersion.errorFrom if we never want to issue an error + val isMigrating: Boolean = toString.endsWith("-migration") def stable: SourceVersion = @@ -29,10 +31,10 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.5` + def defaultSourceVersion = `3.6` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ - val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) + val illegalSourceVersionNames = List("3.1-migration", "never").map(_.toTermName) /** language versions that the compiler recognises. */ val validSourceVersionNames = values.toList.map(_.toString.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index a5ef4c26eed1..d6a99b12e3b3 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -30,8 +30,8 @@ object Annotations { def derivedAnnotation(tree: Tree)(using Context): Annotation = if (tree eq this.tree) this else Annotation(tree) - /** All arguments to this annotation in a single flat list */ - def arguments(using Context): List[Tree] = tpd.allArguments(tree) + /** All term arguments of this annotation in a single flat list */ + def arguments(using Context): List[Tree] = tpd.allTermArguments(tree) def argument(i: Int)(using Context): Option[Tree] = { val args = arguments @@ -43,7 +43,7 @@ object Annotations { def argumentConstantString(i: Int)(using Context): Option[String] = for (case Constant(s: String) <- argumentConstant(i)) yield s - /** The tree evaluaton is in progress. */ + /** The tree evaluation is in progress. */ def isEvaluating: Boolean = false /** The tree evaluation has finished. */ @@ -54,15 +54,18 @@ object Annotations { * type, since ranges cannot be types of trees. */ def mapWith(tm: TypeMap)(using Context) = - val args = arguments + val args = tpd.allArguments(tree) if args.isEmpty then this else + // Checks if `tm` would result in any change by applying it to types + // inside the annotations' arguments and checking if the resulting types + // are different. val findDiff = new TreeAccumulator[Type]: def apply(x: Type, tree: Tree)(using Context): Type = if tm.isRange(x) then x else val tp1 = tm(tree.tpe) - foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) + foldOver(if !tp1.exists || (tp1 frozen_=:= tree.tpe) then x else tp1, tree) val diff = findDiff(NoType, args) if tm.isRange(diff) then EmptyAnnotation else if diff.exists then derivedAnnotation(tm.mapOver(tree)) diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 060189016828..d8241f3ff304 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -116,15 +116,7 @@ class CheckRealizable(using Context) { case _: SingletonType | NoPrefix => Realizable case tp => - def isConcrete(tp: Type): Boolean = tp.dealias match { - case tp: TypeRef => tp.symbol.isClass - case tp: TypeParamRef => false - case tp: TypeProxy => isConcrete(tp.underlying) - case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case _ => false - } - if (!isConcrete(tp)) NotConcrete + if !MatchTypes.isConcrete(tp) then NotConcrete else boundsRealizability(tp).andAlso(memberRealizability(tp)) } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 06711ec97abf..04d55475ec60 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -120,7 +120,7 @@ trait ConstraintHandling { */ private var myTrustBounds = true - inline def withUntrustedBounds(op: => Type): Type = + transparent inline def withUntrustedBounds(op: => Type): Type = val saved = myTrustBounds myTrustBounds = false try op finally myTrustBounds = saved @@ -295,40 +295,63 @@ trait ConstraintHandling { end legalBound protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = + + // Replace top-level occurrences of `param` in `bound` by `Nothing` + def sanitize(bound: Type): Type = + if bound.stripped eq param then defn.NothingType + else bound match + case bound: AndOrType => + bound.derivedAndOrType(sanitize(bound.tp1), sanitize(bound.tp2)) + case _ => + bound + if !constraint.contains(param) then true - else if !isUpper && param.occursIn(rawBound) then - // We don't allow recursive lower bounds when defining a type, - // so we shouldn't allow them as constraints either. - false + else if !isUpper && param.occursIn(rawBound.widen) then + val rawBound1 = sanitize(rawBound.widenDealias) + if param.occursIn(rawBound1) then + // We don't allow recursive lower bounds when defining a type, + // so we shouldn't allow them as constraints either. + false + else addOneBound(param, rawBound1, isUpper) else - val bound = legalBound(param, rawBound, isUpper) - val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) - val equalBounds = (if isUpper then lo else hi) eq bound - if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then - // The narrowed bounds are equal and not recursive, - // so we can remove `param` from the constraint. - constraint = constraint.replace(param, bound) - true - else - // Narrow one of the bounds of type parameter `param` - // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure - // that `param >: bound`. - val narrowedBounds = - val saved = homogenizeArgs - homogenizeArgs = Config.alignArgsInAnd - try - withUntrustedBounds( - if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) - else oldBounds.derivedTypeBounds(lo | bound, hi)) - finally - homogenizeArgs = saved + + // Narrow one of the bounds of type parameter `param` + // If `isUpper` is true, ensure that `param <: `bound`, + // otherwise ensure that `param >: bound`. + val narrowedBounds: TypeBounds = + val bound = legalBound(param, rawBound, isUpper) + val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) + + val saved = homogenizeArgs + homogenizeArgs = Config.alignArgsInAnd + try + withUntrustedBounds( + if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) + else oldBounds.derivedTypeBounds(lo | bound, hi)) + finally + homogenizeArgs = saved + end narrowedBounds + + // If the narrowed bounds are equal and not recursive, + // we can remove `param` from the constraint. + def tryReplace(newBounds: TypeBounds): Boolean = + val TypeBounds(lo, hi) = newBounds + val canReplace = (lo eq hi) && !newBounds.existsPart(_ eq param, StopAt.Static) + if canReplace then constraint = constraint.replace(param, lo) + canReplace + + tryReplace(narrowedBounds) || locally: //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") val c1 = constraint.updateEntry(param, narrowedBounds) (c1 eq constraint) || { constraint = c1 val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) + val isSat = isSub(lo, hi) + if isSat then + // isSub may have narrowed the bounds further + tryReplace(constraint.nonParamBounds(param)) + isSat } end addOneBound diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index 57c369a08de6..c307b6ac569e 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -132,7 +132,7 @@ object ContextOps: } def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { - if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) + if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree).setNewScope else ctx } end ContextOps diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 79a0b279aefe..d69c7408d0b1 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -265,7 +265,7 @@ object Contexts { /** SourceFile with given path, memoized */ def getSource(path: String): SourceFile = getSource(path.toTermName) - /** AbstraFile with given path name, memoized */ + /** AbstractFile with given path name, memoized */ def getFile(name: TermName): AbstractFile = base.files.get(name) match case Some(file) => file @@ -769,6 +769,7 @@ object Contexts { .updated(settingsStateLoc, settingsGroup.defaultState) .updated(notNullInfosLoc, Nil) .updated(compilationUnitLoc, NoCompilationUnit) + .updated(profilerLoc, Profiler.NoOp) c._searchHistory = new SearchRoot c._gadtState = GadtState(GadtConstraint.empty) c diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 29d4b3fa4052..96a2d45db80d 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -292,7 +292,7 @@ object Decorators { case _ => String.valueOf(x).nn /** Returns the simple class name of `x`. */ - def className: String = x.getClass.getSimpleName.nn + def className: String = if x == null then "" else x.getClass.getSimpleName.nn extension [T](x: T) def assertingErrorsReported(using Context): T = { diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 6a1332e91afb..0195a4ddbf34 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -15,7 +15,7 @@ import Comments.{Comment, docCtx} import util.Spans.NoSpan import config.Feature import Symbols.requiredModuleRef -import cc.{CaptureSet, RetainingType} +import cc.{CaptureSet, RetainingType, Existential} import ast.tpd.ref import scala.annotation.tailrec @@ -903,6 +903,7 @@ class Definitions { @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") + @tu lazy val QuotedRuntimePatterns_higherOrderHoleWithTypes: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHoleWithTypes") @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") @@ -991,14 +992,21 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Cap: TypeSymbol = requiredClass("scala.caps.Cap") + @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_CapSet: ClassSymbol = requiredClass("scala.caps.CapSet") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") + @tu lazy val Caps_capsOf: TermSymbol = CapsModule.requiredMethod("capsOf") + @tu lazy val Caps_Exists: ClassSymbol = requiredClass("scala.caps.Exists") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - @tu lazy val expandedUniversalSet: CaptureSet = CaptureSet(captureRoot.termRef) + @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Contains") + @tu lazy val Caps_containsImpl: TermSymbol = CapsModule.requiredMethod("containsImpl") + + /** The same as CaptureSet.universal but generated implicitly for references of Capability subtypes */ + @tu lazy val universalCSImpliedByCapability = CaptureSet(captureRoot.termRef) @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") @@ -1014,7 +1022,6 @@ class Definitions { @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") - @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") @@ -1035,6 +1042,7 @@ class Definitions { @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") + @tu lazy val RuntimeCheckedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.RuntimeChecked") @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") @@ -1049,10 +1057,12 @@ class Definitions { @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") + @tu lazy val UnboxAnnot: ClassSymbol = requiredClass("scala.caps.unbox") @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") + @tu lazy val UntrackedCapturesAnnot: ClassSymbol = requiredClass("scala.caps.untrackedCaptures") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") @tu lazy val BeanSetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanSetter") @@ -1161,6 +1171,8 @@ class Definitions { if mt.hasErasedParams then RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) else FunctionNOf(args, resultType, isContextual) + // Unlike PolyFunctionOf and RefinedFunctionOf this extractor follows aliases. + // Can we do without? Same for FunctionNOf and isFunctionNType. def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { ft match case PolyFunctionOf(mt: MethodType) => @@ -1190,11 +1202,17 @@ class Definitions { /** Matches a refined `PolyFunction`/`FunctionN[...]`/`ContextFunctionN[...]`. * Extracts the method type type and apply info. + * Will NOT math an existential type encoded as a dependent function. */ def unapply(tpe: RefinedType)(using Context): Option[MethodOrPoly] = tpe.refinedInfo match - case mt: MethodOrPoly - if tpe.refinedName == nme.apply && isFunctionType(tpe.parent) => Some(mt) + case mt: MethodType + if tpe.refinedName == nme.apply + && isFunctionType(tpe.parent) + && !Existential.isExistentialMethod(mt) => Some(mt) + case mt: PolyType + if tpe.refinedName == nme.apply + && isFunctionType(tpe.parent) => Some(mt) case _ => None end RefinedFunctionOf @@ -1203,7 +1221,6 @@ class Definitions { /** Creates a refined `PolyFunction` with an `apply` method with the given info. */ def apply(mt: MethodOrPoly)(using Context): Type = - assert(isValidPolyFunctionInfo(mt), s"Not a valid PolyFunction refinement: $mt") RefinedType(PolyFunctionClass.typeRef, nme.apply, mt) /** Matches a refined `PolyFunction` type and extracts the apply info. @@ -1757,6 +1774,12 @@ class Definitions { def isPolymorphicAfterErasure(sym: Symbol): Boolean = (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) + def isTypeTestOrCast(sym: Symbol): Boolean = + (sym eq Any_isInstanceOf) + || (sym eq Any_asInstanceOf) + || (sym eq Any_typeTest) + || (sym eq Any_typeCast) + /** Is this type a `TupleN` type? * * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` @@ -2033,7 +2056,7 @@ class Definitions { */ @tu lazy val ccExperimental: Set[Symbol] = Set( CapsModule, CapsModule.moduleClass, PureClass, - CapabilityAnnot, RequiresCapabilityAnnot, + RequiresCapabilityAnnot, RetainsAnnot, RetainsCapAnnot, RetainsByNameAnnot) /** Experimental language features defined in `scala.runtime.stdLibPatches.language.experimental`. diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 2418aba1978b..816b28177333 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -2,7 +2,7 @@ package dotty.tools package dotc package core -import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } +import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, movedToCompanionClass, acceptStale, traceInvalid } import Contexts.* import Names.* import NameKinds.* @@ -755,6 +755,11 @@ object Denotations { } if (!symbol.exists) return updateValidity() if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation + // Moved to a companion class, likely at a later phase (in MoveStatics) + this match { + case symd: SymDenotation if movedToCompanionClass(symd) => return NoDenotation + case _ => + } if (ctx.debug) traceInvalid(this) staleSymbolError } @@ -956,7 +961,7 @@ object Denotations { } def staleSymbolError(using Context): Nothing = - if symbol.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended + if symbol.lastKnownDenotation.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended then throw StaleSymbolTypeError(symbol) else throw StaleSymbolException(staleSymbolMsg) diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index b1bf7a266c91..b915373da021 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -569,6 +569,7 @@ object Flags { val ConstructorProxyModule: FlagSet = ConstructorProxy | Module val DefaultParameter: FlagSet = HasDefault | Param // A Scala 2x default parameter val DeferredInline: FlagSet = Deferred | Inline + val DeferredMethod: FlagSet = Deferred | Method val DeferredOrLazy: FlagSet = Deferred | Lazy val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 1cbfabc08958..5a8938602523 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -193,7 +193,8 @@ sealed trait GadtState { case i => pt.paramRefs(i) case tp => tp } - + if !param.info.exists then + throw TypeError(em"illegal recursive reference involving $param") val tb = param.info.bounds tb.derivedTypeBounds( lo = substDependentSyms(tb.lo, isUpper = false), diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index e16a950aa32a..00143f05b4fb 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -138,8 +138,10 @@ object MatchTypeTrace: | ${casesText(cases)}""" def illegalPatternText(scrut: Type, cas: MatchTypeCaseSpec.LegacyPatMat)(using Context): String = + val explanation = + if cas.err == null then "" else s"The pattern contains ${cas.err.explanation}.\n" i"""The match type contains an illegal case: | ${caseText(cas)} - |(this error can be ignored for now with `-source:3.3`)""" + |$explanation(this error can be ignored for now with `-source:3.3`)""" end MatchTypeTrace diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypes.scala b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala new file mode 100644 index 000000000000..a3becea40886 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/MatchTypes.scala @@ -0,0 +1,47 @@ +package dotty.tools +package dotc +package core + +import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* + +object MatchTypes: + + /* Concreteness checking + * + * When following a baseType and reaching a non-wildcard, in-variant-pos type capture, + * we have to make sure that the scrutinee is concrete enough to uniquely determine + * the values of the captures. This comes down to checking that we do not follow any + * upper bound of an abstract type. + * + * See notably neg/wildcard-match.scala for examples of this. + * + * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for + * ClassCastException reproducers if we disable this check. + */ + def isConcrete(tp: Type)(using Context): Boolean = + val tp1 = tp.normalized + + tp1 match + case tp1: TypeRef => + if tp1.symbol.isClass then true + else + tp1.info match + case info: AliasingBounds => isConcrete(info.alias) + case _ => false + case tp1: AppliedType => + isConcrete(tp1.tycon) && isConcrete(tp1.superType) + case tp1: HKTypeLambda => + true + case tp1: TermRef => + !tp1.symbol.is(Param) && isConcrete(tp1.underlying) + case _: (ParamRef | MatchType) => + false + case tp1: TypeProxy => + isConcrete(tp1.underlying) + case tp1: AndOrType => + isConcrete(tp1.tp1) && isConcrete(tp1.tp2) + case _ => + false + end isConcrete + +end MatchTypes \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 74d440562824..e9575c7d6c4a 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -325,6 +325,7 @@ object NameKinds { val TailLocalName: UniqueNameKind = new UniqueNameKind("$tailLocal") val TailTempName: UniqueNameKind = new UniqueNameKind("$tmp") val ExceptionBinderName: UniqueNameKind = new UniqueNameKind("ex") + val ExistentialBinderName: UniqueNameKind = new UniqueNameKind("ex$") val SkolemName: UniqueNameKind = new UniqueNameKind("?") val SuperArgName: UniqueNameKind = new UniqueNameKind("$superArg$") val DocArtifactName: UniqueNameKind = new UniqueNameKind("$doc") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 07cb9292baa4..363a01665564 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -272,7 +272,7 @@ object NamerOps: * where * * is the CBCompanion type created in Definitions - * withnessRefK is a refence to the K'th witness. + * withnessRefK is a reference to the K'th witness. * * The companion has the same access flags as the original type. */ diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 7f925b0fc322..015cf6fc0f2c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -370,7 +370,7 @@ object Phases { // Test that we are in a state where we need to check if the phase should be skipped for a java file, // this prevents checking the expensive `unit.typedAsJava` unnecessarily. val doCheckJava = skipIfJava && !isAfterLastJavaPhase - for unit <- units do + for unit <- units do ctx.profiler.onUnit(this, unit): given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try @@ -378,14 +378,18 @@ object Phases { () else run - catch case ex: Throwable if !ctx.run.enrichedErrorMessage => - println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) - throw ex + buf += unitCtx.compilationUnit + catch + case _: CompilationUnit.SuspendException => // this unit will be run again in `Run#compileSuspendedUnits` + case ex: Throwable if !ctx.run.enrichedErrorMessage => + println(ctx.run.enrichErrorMessage(s"unhandled exception while running $phaseName on $unit")) + throw ex finally ctx.run.advanceUnit() - buf += unitCtx.compilationUnit end if end for - buf.result() + val res = buf.result() + ctx.run.nn.checkSuspendedUnits(res) + res end runOn /** Convert a compilation unit's tree to a string; can be overridden */ @@ -532,7 +536,7 @@ object Phases { def sbtExtractAPIPhase(using Context): Phase = ctx.base.sbtExtractAPIPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase - def stagingPhase(using Context): Phase = ctx.base.stagingPhase + def stagingPhase(using Context): Phase = ctx.base.stagingPhase def splicingPhase(using Context): Phase = ctx.base.splicingPhase def firstTransformPhase(using Context): Phase = ctx.base.firstTransformPhase def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index b935488695e0..56d71c7fb57e 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -294,6 +294,7 @@ object StdNames { val EVT2U: N = "evt2u$" val EQEQ_LOCAL_VAR: N = "eqEqTemp$" val LAZY_FIELD_OFFSET: N = "OFFSET$" + val OOS_EXISTENTIAL: N = "" val OUTER: N = "$outer" val REFINE_CLASS: N = "" val ROOTPKG: N = "_root_" @@ -357,6 +358,7 @@ object StdNames { val AppliedTypeTree: N = "AppliedTypeTree" val ArrayAnnotArg: N = "ArrayAnnotArg" val CAP: N = "CAP" + val CapSet: N = "CapSet" val Constant: N = "Constant" val ConstantType: N = "ConstantType" val Eql: N = "Eql" @@ -440,8 +442,8 @@ object StdNames { val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" - val capIn: N = "capIn" val caps: N = "caps" + val capsOf: N = "capsOf" val captureChecking: N = "captureChecking" val checkInitialized: N = "checkInitialized" val classOf: N = "classOf" @@ -509,6 +511,7 @@ object StdNames { val _hashCode_ : N = "_hashCode" val hash_ : N = "hash" val head: N = "head" + val help: N = "help" val higherKinds: N = "higherKinds" val idx: N = "idx" val identity: N = "identity" @@ -665,6 +668,7 @@ object StdNames { val readResolve: N = "readResolve" val zero: N = "zero" val zip: N = "zip" + val `++` : N = "++" val nothingRuntimeClass: N = "scala.runtime.Nothing$" val nullRuntimeClass: N = "scala.runtime.Null$" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 3904228756a0..f54b8a62fa25 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1162,10 +1162,10 @@ object SymDenotations { final def enclosingClass(using Context): Symbol = { def enclClass(sym: Symbol, skip: Boolean): Symbol = { def newSkip = sym.is(JavaStaticTerm) - if (!sym.exists) + if !sym.exists then NoSymbol - else if (sym.isClass) - if (skip) enclClass(sym.owner, newSkip) else sym + else if sym.isClass then + if skip || sym.isRefinementClass then enclClass(sym.owner, newSkip) else sym else enclClass(sym.owner, skip || newSkip) } @@ -2680,6 +2680,10 @@ object SymDenotations { stillValidInOwner(denot) } + def movedToCompanionClass(denot: SymDenotation)(using Context): Boolean = + val ownerCompanion = denot.maybeOwner.companionClass + stillValid(ownerCompanion) && ownerCompanion.unforcedDecls.contains(denot.name, denot.symbol) + private[SymDenotations] def stillValidInOwner(denot: SymDenotation)(using Context): Boolean = try val owner = denot.maybeOwner.denot stillValid(owner) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 51e6a5e6138a..68f2e350c3e4 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -8,7 +8,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} -import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } +import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile, NoAbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* @@ -51,8 +51,9 @@ object SymbolLoaders { */ def enterClass( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { - val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, compUnitInfo = completer.compilationUnitInfo) + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Symbol = { + val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, privateWithin, compUnitInfo = completer.compilationUnitInfo) enterNew(owner, cls, completer, scope) } @@ -60,10 +61,13 @@ object SymbolLoaders { */ def enterModule( owner: Symbol, name: PreName, completer: SymbolLoader, - modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { + modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, + scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Symbol = { val module = newModuleSymbol( owner, name.toTermName.decode, modFlags, clsFlags, (module, _) => completer.proxy.withDecls(newScope).withSourceModule(module), + privateWithin, compUnitInfo = completer.compilationUnitInfo) enterNew(owner, module, completer, scope) enterNew(owner, module.moduleClass, completer, scope) @@ -103,13 +107,16 @@ object SymbolLoaders { */ def enterClassAndModule( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Unit = { - val clazz = enterClass(owner, name, completer, flags, scope) + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope, privateWithin: Symbol = NoSymbol, + )(using Context): Unit = { + val clazz = enterClass(owner, name, completer, flags, scope, privateWithin) val module = enterModule( owner, name, completer, modFlags = flags.toTermFlags & RetainedModuleValFlags, clsFlags = flags.toTypeFlags & RetainedModuleClassFlags, - scope = scope) + scope = scope, + privateWithin = privateWithin, + ) } /** Enter all toplevel classes and objects in file `src` into package `owner`, provided @@ -333,7 +340,15 @@ abstract class SymbolLoader extends LazyType { self => def description(using Context): String = s"proxy to ${self.description}" } - override def complete(root: SymDenotation)(using Context): Unit = { + private inline def profileCompletion[T](root: SymDenotation)(inline body: T)(using Context): T = { + val sym = root.symbol + def associatedFile = root.symbol.associatedFile match + case file: AbstractFile => file + case null => NoAbstractFile + ctx.profiler.onCompletion(sym, associatedFile)(body) + } + + override def complete(root: SymDenotation)(using Context): Unit = profileCompletion(root) { def signalError(ex: Exception): Unit = { if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index da0ecac47b7d..7de75e371752 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -84,7 +84,7 @@ object Symbols extends SymUtils { ctx.settings.YretainTrees.value || denot.owner.isTerm || // no risk of leaking memory after a run for these denot.isOneOf(InlineOrProxy) || // need to keep inline info - ctx.settings.WcheckInit.value || // initialization check + ctx.settings.Whas.checkInit || // initialization check ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ @@ -846,7 +846,8 @@ object Symbols extends SymUtils { /** Map given symbols, subjecting their attributes to the mappings * defined in the given TreeTypeMap `ttmap`. * Cross symbol references are brought over from originals to copies. - * Do not copy any symbols if all attributes of all symbols stay the same. + * Do not copy any symbols if all attributes of all symbols stay the same + * and mapAlways is false. */ def mapSymbols(originals: List[Symbol], ttmap: TreeTypeMap, mapAlways: Boolean = false)(using Context): List[Symbol] = if (originals.forall(sym => diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 54636ff4ad58..136384413810 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -267,7 +267,9 @@ class TypeApplications(val self: Type) extends AnyVal { */ def hkResult(using Context): Type = self.dealias match { case self: TypeRef => - if (self.symbol == defn.AnyKindClass) self else self.info.hkResult + if self.symbol == defn.AnyKindClass then self + else if self.symbol.isClass then NoType // avoid forcing symbol if it's a class, not an alias to a HK type lambda + else self.info.hkResult case self: AppliedType => if (self.tycon.typeSymbol.isClass) NoType else self.superType.hkResult case self: HKTypeLambda => self.resultType diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 93ed6e7d03a5..17d427513e58 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -10,8 +10,8 @@ import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} -import config.Printers.{subtyping, gadts, matchTypes, noPrinter} +import config.Feature.{migrateTo3, sourceVersion} +import config.Printers.{subtyping, gadts, matchTypes, capt, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} import TypeApplications.* @@ -24,6 +24,7 @@ import reporting.trace import annotation.constructorOnly import cc.* import NameKinds.WildcardParamName +import MatchTypes.isConcrete /** Provides methods to compare types. */ @@ -46,6 +47,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling monitored = false GADTused = false opaquesUsed = false + openedExistentials = Nil + assocExistentials = Nil recCount = 0 needsGc = false if Config.checkTypeComparerReset then checkReset() @@ -64,6 +67,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used opaque types */ private var opaquesUsed: Boolean = false + /** In capture checking: The existential types that are open because they + * appear in an existential type on the left in an enclosing comparison. + */ + private var openedExistentials: List[TermParamRef] = Nil + + /** In capture checking: A map from existential types that are appear + * in an existential type on the right in an enclosing comparison. + * Each existential gets mapped to the opened existentials to which it + * may resolve at this point. + */ + private var assocExistentials: ExAssoc = Nil + private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -325,14 +340,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isSubPrefix(tp1.prefix, tp2.prefix) || thirdTryNamed(tp2) else - ( (tp1.name eq tp2.name) + (tp1.name eq tp2.name) && !sym1.is(Private) && tp2.isPrefixDependentMemberRef && isSubPrefix(tp1.prefix, tp2.prefix) && tp1.signature == tp2.signature && !(sym1.isClass && sym2.isClass) // class types don't subtype each other - ) || - thirdTryNamed(tp2) + || thirdTryNamed(tp2) case _ => secondTry end compareNamed @@ -344,7 +358,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: ProtoType => isMatchedByProto(tp2, tp1) case tp2: BoundType => - tp2 == tp1 || secondTry + tp2 == tp1 + || secondTry case tp2: TypeVar => recur(tp1, typeVarInstance(tp2)) case tp2: WildcardType => @@ -546,6 +561,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if reduced.exists then recur(reduced, tp2) && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } else thirdTry + case Existential(boundVar, tp1unpacked) => + compareExistentialLeft(boundVar, tp1unpacked, tp2) case _: FlexType => true case _ => @@ -627,6 +644,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling thirdTryNamed(tp2) case tp2: TypeParamRef => compareTypeParamRef(tp2) + case Existential(boundVar, tp2unpacked) => + compareExistentialRight(tp1, boundVar, tp2unpacked) case tp2: RefinedType => def compareRefinedSlow: Boolean = val name2 = tp2.refinedName @@ -842,13 +861,27 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val refs1 = tp1.captureSet try if refs1.isAlwaysEmpty then recur(tp1, parent2) - else subCaptures(refs1, refs2, frozenConstraint).isOK - && sameBoxed(tp1, tp2, refs1) - && (recur(tp1.widen.stripCapturing, parent2) - || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) - // this alternative is needed in case the right hand side is a - // capturing type that contains the lhs as an alternative of a union type. - ) + else + // The singletonOK branch is because we sometimes have a larger capture set in a singleton + // than in its underlying type. An example is `f: () -> () ->{x} T`, which might be + // the type of a closure. In that case the capture set of `f.type` is `{x}` but the + // capture set of the underlying type is `{}`. So without the `singletonOK` test, a singleton + // might not be a subtype of its underlying type. Examples where this arises is + // capt-capibility.scala and function-combinators.scala + val singletonOK = tp1 match + case tp1: SingletonType + if subCaptures(tp1.underlying.captureSet, refs2, frozen = true).isOK => + recur(tp1.widen, tp2) + case _ => + false + singletonOK + || subCaptures(refs1, refs2, frozenConstraint).isOK + && sameBoxed(tp1, tp2, refs1) + && (recur(tp1.widen.stripCapturing, parent2) + || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) + // this alternative is needed in case the right hand side is a + // capturing type that contains the lhs as an alternative of a union type. + ) catch case ex: AssertionError => println(i"assertion failed while compare captured $tp1 <:< $tp2") throw ex @@ -970,12 +1003,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling compareAppliedType1(tp1, tycon1, args1) case tp1: SingletonType => def comparePaths = tp2 match - case tp2: TermRef => + case tp2: (TermRef | ThisType) => compareAtoms(tp1, tp2, knownSingletons = true).getOrElse(false) - || { // needed to make from-tasty work. test cases: pos/i1753.scala, pos/t839.scala - tp2.info.widenExpr.dealias match - case tp2i: SingletonType => recur(tp1, tp2i) - case _ => false + || { + // If tp2's underlying type tp2super is also effectively a singleton, compare + // against that. The idea is that if tp1 <: tp2super and tp2 <: tp2super and + // tp2super is also singleton, then tp1 and tp2 must be the same singleton. + // Needed to make from-tasty work. test cases: pos/i1753.scala, pos/t839.scala + val tp2super = tp2.superType.widenExpr + tp2super.isEffectivelySingleton && recur(tp1, tp2super) } case _ => false @@ -1402,20 +1438,21 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling canConstrain(param2) && canInstantiate(param2) || compareLower(bounds(param2), tyconIsTypeRef = false) case tycon2: TypeRef => - isMatchingApply(tp1) || - byGadtBounds || - defn.isCompiletimeAppliedType(tycon2.symbol) && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) || { - tycon2.info match { - case info2: TypeBounds => - compareLower(info2, tyconIsTypeRef = true) - case info2: ClassInfo => - tycon2.name.startsWith("Tuple") && - defn.isTupleNType(tp2) && recur(tp1, tp2.toNestedPairs) || - tryBaseType(info2.cls) - case _ => - fourthTry - } - } || tryLiftedToThis2 + isMatchingApply(tp1) + || byGadtBounds + || defn.isCompiletimeAppliedType(tycon2.symbol) + && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) + || tycon2.info.match + case info2: TypeBounds => + compareLower(info2, tyconIsTypeRef = true) + case info2: ClassInfo => + tycon2.name.startsWith("Tuple") + && defn.isTupleNType(tp2) + && recur(tp1, tp2.toNestedPairs) + || tryBaseType(info2.cls) + case _ => + fourthTry + || tryLiftedToThis2 case tv: TypeVar => if tv.isInstantiated then @@ -1452,12 +1489,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling inFrozenGadt { isSubType(bounds1.hi.applyIfParameterized(args1), tp2, approx.addLow) } } && recordGadtUsageIf(true) - !sym.isClass && { defn.isCompiletimeAppliedType(sym) && compareCompiletimeAppliedType(tp1, tp2, fromBelow = false) || { recur(tp1.superTypeNormalized, tp2) && recordGadtUsageIf(MatchType.thatReducesUsingGadt(tp1)) } || tryLiftedToThis1 - } || byGadtBounds + } + || byGadtBounds case tycon1: TypeProxy => recur(tp1.superTypeNormalized, tp2) case _ => @@ -1579,7 +1616,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Note: It would be legal to do the lifting also if M does not contain opaque types, * but in this case the retries in tryLiftedToThis would be redundant. */ - private def liftToThis(tp: Type): Type = { + def liftToThis(tp: Type): Type = { def findEnclosingThis(moduleClass: Symbol, from: Symbol): Type = if ((from.owner eq moduleClass) && from.isPackageObject && from.is(Opaque)) from.thisType @@ -1936,7 +1973,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // check whether `op2` generates a weaker constraint than `op1` val leftConstraint = constraint constraint = preConstraint - if !(op && subsumes(leftConstraint, constraint, preConstraint)) then + val res = try op catch case _: TypeError => false + if !(res && subsumes(leftConstraint, constraint, preConstraint)) then if constr != noPrinter && !subsumes(constraint, leftConstraint, preConstraint) then constr.println(i"CUT - prefer $leftConstraint over $constraint") constraint = leftConstraint @@ -2365,7 +2403,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else def mergedGlb(tp1: Type, tp2: Type): Type = val tp1a = dropIfSuper(tp1, tp2) - if tp1a ne tp1 then glb(tp1a, tp2) + if tp1a ne tp1 then + glb(tp1a, tp2) else val tp2a = dropIfSuper(tp2, tp1) if tp2a ne tp2 then glb(tp1, tp2a) @@ -2518,36 +2557,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** If some (&-operand of) `tp` is a supertype of `sub` replace it with `NoType`. */ private def dropIfSuper(tp: Type, sub: Type): Type = - - def isSuperOf(sub: Type): Boolean = sub match - case AndType(sub1, sub2) => isSuperOf(sub1) || isSuperOf(sub2) - case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.instanceOpt) - case _ => isSubTypeWhenFrozen(sub, tp) - + // We need to be careful to check branches of AndTypes and OrTypes in correct order, + // see discussion in issue #20516. tp match case tp @ AndType(tp1, tp2) => recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) case tp: TypeVar if tp.isInstantiated => dropIfSuper(tp.instanceOpt, sub) case _ => - if isSuperOf(sub) then NoType else tp + if isSubTypeWhenFrozen(sub, tp) then NoType else tp end dropIfSuper /** If some (|-operand of) `tp` is a subtype of `sup` replace it with `NoType`. */ private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = - - def isSubOf(sup: Type): Boolean = sup match - case OrType(sup1, sup2) => isSubOf(sup1) || isSubOf(sup2) - case sup: TypeVar if sup.isInstantiated => isSubOf(sup.instanceOpt) - case _ => isSubType(tp, sup, whenFrozen = !canConstrain) - + // We need to be careful to check branches of AndTypes and OrTypes in correct order, + // see discussion in issue #20516. tp match case tp @ OrType(tp1, tp2) => recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) case tp: TypeVar if tp.isInstantiated => dropIfSub(tp.instanceOpt, sup, canConstrain) case _ => - if isSubOf(sup) then NoType else tp + if isSubType(tp, sup, whenFrozen = !canConstrain) then NoType else tp end dropIfSub /** There's a window of vulnerability between ElimByName and Erasure where some @@ -2691,11 +2722,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: TypeVar if tp1.isInstantiated => tp1.underlying & tp2 case CapturingType(parent1, refs1) => - val refs2 = tp2.captureSet - if subCaptures(refs2, refs1, frozen = true).isOK - && tp1.isBoxedCapturing == tp2.isBoxedCapturing - then (parent1 & tp2).capturing(refs2) - else tp1.derivedCapturingType(parent1 & tp2, refs1) + val jointRefs = refs1 ** tp2.captureSet + if jointRefs.isAlwaysEmpty then parent1 & tp2 + else if tp1.isBoxCompatibleWith(tp2) then + tp1.derivedCapturingType(parent1 & tp2, jointRefs) + else NoType case tp1: AnnotatedType if !tp1.isRefining => tp1.underlying & tp2 case _ => @@ -2720,6 +2751,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } case tp1: TypeVar if tp1.isInstantiated => lub(tp1.underlying, tp2, isSoft = isSoft) + case CapturingType(parent1, refs1) => + if tp1.isBoxCompatibleWith(tp2) then + tp1.derivedCapturingType(lub(parent1, tp2, isSoft = isSoft), refs1) + else // TODO: Analyze cases where they are not box compatible + NoType case tp1: AnnotatedType if !tp1.isRefining => lub(tp1.underlying, tp2, isSoft = isSoft) case _ => @@ -2758,7 +2794,109 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } + // ----------- Capture checking ----------------------------------------------- + + /** A type associating instantiatable existentials on the right of a comparison + * with the existentials they can be instantiated with. + */ + type ExAssoc = List[(TermParamRef, List[TermParamRef])] + + private def compareExistentialLeft(boundVar: TermParamRef, tp1unpacked: Type, tp2: Type)(using Context): Boolean = + val saved = openedExistentials + try + openedExistentials = boundVar :: openedExistentials + recur(tp1unpacked, tp2) + finally + openedExistentials = saved + + private def compareExistentialRight(tp1: Type, boundVar: TermParamRef, tp2unpacked: Type)(using Context): Boolean = + val saved = assocExistentials + try + assocExistentials = (boundVar, openedExistentials) :: assocExistentials + recur(tp1, tp2unpacked) + finally + assocExistentials = saved + + /** Is `tp1` an existential var that subsumes `tp2`? This is the case if `tp1` is + * instantiatable (i.e. it's a key in `assocExistentials`) and one of the + * following is true: + * - `tp2` is not an existential var, + * - `tp1` is associated via `assocExistentials` with `tp2`, + * - `tp2` appears as key in `assocExistentials` further out than `tp1`. + * The third condition allows to instantiate c2 to c1 in + * EX c1: A -> Ex c2. B + */ + def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context): Boolean = + def canInstantiateWith(assoc: ExAssoc): Boolean = assoc match + case (bv, bvs) :: assoc1 => + if bv == tp1 then + !Existential.isExistentialVar(tp2) + || bvs.contains(tp2) + || assoc1.exists(_._1 == tp2) + else + canInstantiateWith(assoc1) + case Nil => + false + Existential.isExistentialVar(tp1) && canInstantiateWith(assocExistentials) + + /** bi-map taking existentials to the left of a comparison to matching + * existentials on the right. This is not a bijection. However + * we have `forwards(backwards(bv)) == bv` for an existentially bound `bv`. + * That's enough to qualify as a BiTypeMap. + */ + private class MapExistentials(assoc: ExAssoc)(using Context) extends BiTypeMap: + + private def bad(t: Type) = + Existential.badExistential + .showing(i"existential match not found for $t in $assoc", capt) + + def apply(t: Type) = t match + case t: TermParamRef if Existential.isExistentialVar(t) => + // Find outermost existential on the right that can be instantiated to `t`, + // or `badExistential` if none exists. + def findMapped(assoc: ExAssoc): CaptureRef = assoc match + case (bv, assocBvs) :: assoc1 => + val outer = findMapped(assoc1) + if !Existential.isBadExistential(outer) then outer + else if assocBvs.contains(t) then bv + else bad(t) + case Nil => + bad(t) + findMapped(assoc) + case _ => + mapOver(t) + + /** The inverse takes existentials on the right to the innermost existential + * on the left to which they can be instantiated. + */ + lazy val inverse = new BiTypeMap: + def apply(t: Type) = t match + case t: TermParamRef if Existential.isExistentialVar(t) => + assoc.find(_._1 == t) match + case Some((_, bvs)) if bvs.nonEmpty => bvs.head + case _ => bad(t) + case _ => + mapOver(t) + + def inverse = MapExistentials.this + override def toString = "MapExistentials.inverse" + end inverse + end MapExistentials + protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + try + if assocExistentials.isEmpty then + refs1.subCaptures(refs2, frozen) + else + val mapped = refs1.map(MapExistentials(assocExistentials)) + if mapped.elems.exists(Existential.isBadExistential) + then CaptureSet.CompareResult.Fail(refs2 :: Nil) + else subCapturesMapped(mapped, refs2, frozen) + catch case ex: AssertionError => + println(i"fail while subCaptures $refs1 <:< $refs2") + throw ex + + protected def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = refs1.subCaptures(refs2, frozen) /** Is the boxing status of tp1 and tp2 the same, or alternatively, is @@ -2902,8 +3040,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case tp: ConstantType => tp + case tp: AppliedType if tp.tryCompiletimeConstantFold.exists => + tp.tryCompiletimeConstantFold case tp: HKTypeLambda => tp + case tp: ParamRef => + val st = tp.superTypeNormalized + if st.exists then + disjointnessBoundary(st) + else + // workaround for when ParamRef#underlying returns NoType + defn.AnyType case tp: TypeProxy => disjointnessBoundary(tp.superTypeNormalized) case tp: WildcardType => @@ -2917,6 +3064,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case pair if pending != null && pending.contains(pair) => false + /* Nothing is not a class type in the spec but dotc represents it as if it were one. + * Get it out of the way early to avoid mistakes (see for example #20897). + * Nothing ⋔ T and T ⋔ Nothing for all T. + */ + case (tp1, tp2) if tp1.isExactlyNothing || tp2.isExactlyNothing => + true + // Cases where there is an intersection or union on the right case (tp1, tp2: OrType) => provablyDisjoint(tp1, tp2.tp1, pending) && provablyDisjoint(tp1, tp2.tp2, pending) @@ -2929,14 +3083,21 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case (tp1: AndType, tp2) => provablyDisjoint(tp1.tp1, tp2, pending) || provablyDisjoint(tp1.tp2, tp2, pending) + /* Handle AnyKind now for the same reason as Nothing above: it is not a real class type. + * Other than the rules with Nothing, unions and intersections, there is structurally + * no rule such that AnyKind ⋔ T or T ⋔ AnyKind for any T. + */ + case (tp1, tp2) if tp1.isDirectRef(AnyKindClass) || tp2.isDirectRef(AnyKindClass) => + false + // Cases involving type lambdas case (tp1: HKTypeLambda, tp2: HKTypeLambda) => tp1.paramNames.sizeCompare(tp2.paramNames) != 0 || provablyDisjoint(tp1.resultType, tp2.resultType, pending) case (tp1: HKTypeLambda, tp2) => - !tp2.isDirectRef(defn.AnyKindClass) + true case (tp1, tp2: HKTypeLambda) => - !tp1.isDirectRef(defn.AnyKindClass) + true /* Cases where both are unique values (enum cases or constant types) * @@ -3035,22 +3196,19 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling cls.is(Sealed) && !cls.hasAnonymousChild def decompose(cls: Symbol): List[Symbol] = - cls.children.map { child => - if child.isTerm then child.info.classSymbol - else child + cls.children.flatMap { child => + if child.isTerm then + child.info.classSymbols // allow enum vals to be decomposed to their enum class (then filtered out) and any mixins + else child :: Nil }.filter(child => child.exists && child != cls) - // TODO? Special-case for Nothing and Null? We probably need Nothing/Null disjoint from Nothing/Null def eitherDerivesFromOther(cls1: Symbol, cls2: Symbol): Boolean = cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1) def smallestNonTraitBase(cls: Symbol): Symbol = cls.asClass.baseClasses.find(!_.is(Trait)).get - if cls1 == defn.AnyKindClass || cls2 == defn.AnyKindClass then - // For some reason, A.derivesFrom(AnyKind) returns false, so we have to handle it specially - false - else if (eitherDerivesFromOther(cls1, cls2)) + if (eitherDerivesFromOther(cls1, cls2)) false else if (cls1.is(Final) || cls2.is(Final)) @@ -3074,6 +3232,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling end provablyDisjointClasses private def provablyDisjointTypeArgs(cls: ClassSymbol, args1: List[Type], args2: List[Type], pending: util.HashSet[(Type, Type)])(using Context): Boolean = + // sjrd: I will not be surprised when this causes further issues in the future. + // This is a compromise to be able to fix #21295 without breaking the world. + def cannotBeNothing(tp: Type): Boolean = tp match + case tp: TypeParamRef => cannotBeNothing(tp.paramInfo) + case _ => !(tp.loBound.stripTypeVar <:< defn.NothingType) + // It is possible to conclude that two types applied are disjoint by // looking at covariant type parameters if the said type parameters // are disjoint and correspond to fields. @@ -3082,9 +3246,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = provablyDisjoint(tp1, tp2, pending) && typeparamCorrespondsToField(cls.appliedRef, tparam) - // In the invariant case, direct type parameter disjointness is enough. + // In the invariant case, we have more ways to prove disjointness: + // - either the type param corresponds to a field, like in the covariant case, or + // - one of the two actual args can never be `Nothing`. + // The latter condition, as tested by `cannotBeNothing`, is ad hoc and was + // not carefully evaluated to be sound. We have it because we had to + // reintroduce the former condition to fix #21295, and alone, that broke a + // lot of existing test cases. + // Having either one of the two conditions be true is better than not requiring + // any, which was the status quo before #21295. def invariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = - provablyDisjoint(tp1, tp2, pending) + provablyDisjoint(tp1, tp2, pending) && { + typeparamCorrespondsToField(cls.appliedRef, tparam) + || (cannotBeNothing(tp1) || cannotBeNothing(tp2)) + } args1.lazyZip(args2).lazyZip(cls.typeParams).exists { (arg1, arg2, tparam) => @@ -3111,9 +3286,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** The trace of comparison operations when performing `op` */ def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean)(using Context): String = - val cmp = explainingTypeComparer(short) - inSubComparer(cmp)(op) - cmp.lastTrace(header) + explaining(cmp => { op(cmp); cmp.lastTrace(header) }, short) + + def explaining[T](op: ExplainingTypeComparer => T, short: Boolean)(using Context): T = + inSubComparer(explainingTypeComparer(short))(op) def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = inSubComparer(matchReducer)(op) @@ -3283,11 +3459,17 @@ object TypeComparer { def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean = false)(using Context): String = comparing(_.explained(op, header, short)) + def explaining[T](op: ExplainingTypeComparer => T, short: Boolean = false)(using Context): T = + comparing(_.explaining(op, short)) + def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = comparing(_.reduceMatchWith(op)) def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = comparing(_.subCaptures(refs1, refs2, frozen)) + + def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context) = + comparing(_.subsumesExistentially(tp1, tp2)) } object MatchReducer: @@ -3391,58 +3573,6 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { // See https://docs.scala-lang.org/sips/match-types-spec.html#matching def matchSpeccedPatMat(spec: MatchTypeCaseSpec.SpeccedPatMat): MatchResult = - /* Concreteness checking - * - * When following a baseType and reaching a non-wildcard, in-variant-pos type capture, - * we have to make sure that the scrutinee is concrete enough to uniquely determine - * the values of the captures. This comes down to checking that we do not follow any - * upper bound of an abstract type. - * - * See notably neg/wildcard-match.scala for examples of this. - * - * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for - * ClassCastException reproducers if we disable this check. - */ - - def isConcrete(tp: Type): Boolean = - val tp1 = tp.normalized - - tp1 match - case tp1: TypeRef => - if tp1.symbol.isClass then true - else - tp1.info match - case info: AliasingBounds => isConcrete(info.alias) - case _ => false - case tp1: AppliedType => - isConcrete(tp1.tycon) && isConcrete(tp1.superType) - case tp1: HKTypeLambda => - true - case tp1: TermRef => - !tp1.symbol.is(Param) && isConcrete(tp1.underlying) - case tp1: TermParamRef => - false - case tp1: SingletonType => - isConcrete(tp1.underlying) - case tp1: ExprType => - isConcrete(tp1.underlying) - case tp1: AnnotatedType => - isConcrete(tp1.parent) - case tp1: RefinedType => - isConcrete(tp1.underlying) - case tp1: RecType => - isConcrete(tp1.underlying) - case tp1: AndOrType => - isConcrete(tp1.tp1) && isConcrete(tp1.tp2) - case tp1: FlexibleType => - isConcrete(tp1.hi) - case _ => - val tp2 = tp1.stripped.stripLazyRef - (tp2 ne tp) && isConcrete(tp2) - end isConcrete - - // Actual matching logic - val instances = Array.fill[Type](spec.captureCount)(NoType) val noInstances = mutable.ListBuffer.empty[(TypeName, TypeBounds)] @@ -3513,10 +3643,8 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => /** Try to remove references to `skolem` from a type in accordance with the spec. * - * If `betterMatchTypeExtractorsEnabled` is enabled then references - * to `skolem` occuring are avoided by following aliases and - * singletons, otherwise no attempt made to avoid references to - * `skolem`. + * References to `skolem` occuring are avoided by following aliases and + * singletons. * * If any reference to `skolem` remains in the result type, * `refersToSkolem` is set to true. @@ -3530,7 +3658,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { case `skolem` => refersToSkolem = true tp - case tp: NamedType if betterMatchTypeExtractorsEnabled => + case tp: NamedType => val pre1 = apply(tp.prefix) if refersToSkolem then tp match @@ -3548,7 +3676,7 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { tp.derivedSelect(pre1) else tp.derivedSelect(pre1) - case tp: LazyRef if betterMatchTypeExtractorsEnabled => + case tp: LazyRef => // By default, TypeMap maps LazyRefs lazily. We need to // force it for `refersToSkolem` to be correctly set. apply(tp.ref) @@ -3571,19 +3699,37 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => - val info = denot.info match - case alias: AliasingBounds => alias.alias // Extract the alias - case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix - case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances - val info1 = stableScrut match + val info = stableScrut match case skolem: SkolemType => - dropSkolem(info, skolem).orElse: - info match - case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances - case _ => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances - case _ => info - rec(capture, info1, variance = 0, scrutIsWidenedAbstract) + /* If it is a skolem type, we cannot have class selections nor + * abstract type selections. If it is an alias, we try to remove + * any reference to the skolem from the right-hand-side. If that + * succeeds, we take the result, otherwise we fail as not-specific. + */ + + def adaptToTriggerNotSpecific(info: Type): Type = info match + case info: TypeBounds => info + case _ => RealTypeBounds(info, info) + + denot.info match + case denotInfo: AliasingBounds => + val alias = denotInfo.alias + dropSkolem(alias, skolem).orElse(adaptToTriggerNotSpecific(alias)) + case ClassInfo(prefix, cls, _, _, _) => + // for clean error messages + adaptToTriggerNotSpecific(prefix.select(cls)) + case denotInfo => + adaptToTriggerNotSpecific(denotInfo) + + case _ => + // The scrutinee type is truly stable. We select the type member directly on it. + stableScrut.select(typeMemberName) + end info + + rec(capture, info, variance = 0, scrutIsWidenedAbstract) + case _ => + // The type member was not found; no match false end rec @@ -3728,6 +3874,11 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa private val b = new StringBuilder private var lastForwardGoal: String | Null = null + private def appendFailure(x: String) = + if lastForwardGoal != null then // last was deepest goal that failed + b.append(s" = $x") + lastForwardGoal = null + override def traceIndented[T](str: String)(op: => T): T = val str1 = str.replace('\n', ' ') if short && str1 == lastForwardGoal then @@ -3739,12 +3890,13 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa b.append("\n").append(" " * indent).append("==> ").append(str1) val res = op if short then - if res == false then - if lastForwardGoal != null then // last was deepest goal that failed - b.append(" = false") - lastForwardGoal = null - else - b.length = curLength // don't show successful subtraces + res match + case false => + appendFailure("false") + case res: CaptureSet.CompareResult if res != CaptureSet.CompareResult.OK => + appendFailure(show(res)) + case _ => + b.length = curLength // don't show successful subtraces else b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) indent -= 2 @@ -3759,7 +3911,7 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa override def recur(tp1: Type, tp2: Type): Boolean = def moreInfo = if Config.verboseExplainSubtype || ctx.settings.verbose.value - then s" ${tp1.getClass} ${tp2.getClass}" + then s" ${tp1.className} ${tp2.className}" else "" val approx = approxState def approxStr = if short then "" else approx.show @@ -3797,5 +3949,10 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa super.subCaptures(refs1, refs2, frozen) } + override def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + traceIndented(i"subcaptures mapped $refs1 <:< $refs2 ${if frozen then "frozen" else ""}") { + super.subCapturesMapped(refs1, refs2, frozen) + } + def lastTrace(header: String): String = header + { try b.toString finally b.clear() } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index ce4956e6e847..33a1b6ae789e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -5,7 +5,6 @@ package core import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.* import Flags.JavaDefined import Uniques.unique -import TypeOps.makePackageObjPrefixExplicit import backend.sjs.JSDefinitions import transform.ExplicitOuter.* import transform.ValueClasses.* @@ -24,11 +23,16 @@ enum SourceLanguage: object SourceLanguage: /** The language in which `sym` was defined. */ def apply(sym: Symbol)(using Context): SourceLanguage = - if sym.is(JavaDefined) then + // We might be using this method while recalculating the denotation, + // so let's use `lastKnownDenotation`. + // This is ok as the source of the symbol and whether it is inline should + // not change between runs/phases. + val denot = sym.lastKnownDenotation + if denot.is(JavaDefined) then SourceLanguage.Java // Scala 2 methods don't have Inline set, except for the ones injected with `patchStdlibClass` // which are really Scala 3 methods. - else if sym.isClass && sym.is(Scala2x) || (sym.maybeOwner.is(Scala2x) && !sym.is(Inline)) then + else if denot.isClass && denot.is(Scala2x) || (denot.maybeOwner.lastKnownDenotation.is(Scala2x) && !denot.is(Inline)) then SourceLanguage.Scala2 else SourceLanguage.Scala3 diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 5b19fe0e7bdd..1c9696da67d1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -57,17 +57,32 @@ end TypeError class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using Context) extends TypeError: def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" -class MissingType(pre: Type, name: Name)(using Context) extends TypeError: - private def otherReason(pre: Type)(using Context): String = pre match { - case pre: ThisType if pre.cls.givenSelfType.exists => - i"\nor the self type of $pre might not contain all transitive dependencies" - case _ => "" - } +class MissingType(val pre: Type, val name: Name)(using Context) extends TypeError: + + def reason(using Context): String = + def missingClassFile = + "The classfile defining the type might be missing from the classpath" + val cls = pre.classSymbol + val givenSelf = cls match + case cls: ClassSymbol => cls.givenSelfType + case _ => NoType + pre match + case pre: ThisType if pre.cls.givenSelfType.exists => + i"""$missingClassFile + |or the self type of $pre might not contain all transitive dependencies""" + case _ if givenSelf.exists && givenSelf.member(name).exists => + i"""$name exists as a member of the self type $givenSelf of $cls + |but it cannot be called on a receiver whose type does not extend $cls""" + case _ if pre.baseClasses.exists(_.findMember(name, pre, Private, EmptyFlags).exists) => + i"$name is a private member in a base class" + case _ => + missingClassFile + override def toMessage(using Context): Message = if ctx.debug then printStackTrace() - em"""cannot resolve reference to type $pre.$name - |the classfile defining the type might be missing from the classpath${otherReason(pre)}""" + em"""Cannot resolve reference to type $pre.$name. + |$reason.""" end MissingType class RecursionOverflow(val op: String, details: => String, val previous: Throwable, val weight: Int)(using Context) diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index af4f1e0153dd..4d5496cff880 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -101,7 +101,7 @@ object TypeEval: expectArgsNum(1) val arg = tp.args.head val cls = arg.classSymbol - if cls.is(CaseClass) then + if MatchTypes.isConcrete(arg) && cls.is(CaseClass) then val fields = cls.caseAccessors val fieldLabels = fields.map: field => ConstantType(Constant(field.name.toString)) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 1282b77f013e..1106ba68fb97 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -18,7 +18,7 @@ import typer.ForceDegree import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter -import cc.{CapturingType, derivedCapturingType, CaptureSet, isBoxed, isBoxedCapturing} +import cc.{CapturingType, derivedCapturingType, CaptureSet, captureSet, isBoxed, isBoxedCapturing} import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable @@ -190,6 +190,10 @@ object TypeOps: // Mapping over a skolem creates a new skolem which by definition won't // be =:= to the original one. tp + case tp: SuperType => + // Mapping a supertype might re-balance an AndType which is not permitted since + // we need the original order of parents for current super resolution. + tp case _ => mapOver } @@ -556,36 +560,6 @@ object TypeOps: widenMap(tp) } - /** If `tpe` is of the form `p.x` where `p` refers to a package - * but `x` is not owned by a package, expand it to - * - * p.package.x - */ - def makePackageObjPrefixExplicit(tpe: NamedType)(using Context): Type = { - def tryInsert(pkgClass: SymDenotation): Type = pkgClass match { - case pkg: PackageClassDenotation => - var sym = tpe.symbol - if !sym.exists && tpe.denot.isOverloaded then - // we know that all alternatives must come from the same package object, since - // otherwise we would get "is already defined" errors. So we can take the first - // symbol we see. - sym = tpe.denot.alternatives.head.symbol - val pobj = pkg.packageObjFor(sym) - if (pobj.exists) tpe.derivedSelect(pobj.termRef) - else tpe - case _ => - tpe - } - if (tpe.symbol.isRoot) - tpe - else - tpe.prefix match { - case pre: ThisType if pre.cls.is(Package) => tryInsert(pre.cls) - case pre: TermRef if pre.symbol.is(Package) => tryInsert(pre.symbol.moduleClass) - case _ => tpe - } - } - /** An argument bounds violation is a triple consisting of * - the argument tree * - a string "upper" or "lower" indicating which bound is violated @@ -687,11 +661,20 @@ object TypeOps: val hiBound = instantiate(bounds.hi, skolemizedArgTypes) val loBound = instantiate(bounds.lo, skolemizedArgTypes) - def check(using Context) = { - if (!(lo <:< hiBound)) violations += ((arg, "upper", hiBound)) - if (!(loBound <:< hi)) violations += ((arg, "lower", loBound)) - } - check(using checkCtx) + def check(tp1: Type, tp2: Type, which: String, bound: Type)(using Context) = + val isSub = TypeComparer.isSubType(tp1, tp2) + if !isSub then + // inContext(ctx.fresh.setSetting(ctx.settings.verbose, true)): // uncomment to enable moreInfo in ExplainingTypeComparer + TypeComparer.explaining: cmp => + if !ctx.typerState.constraint.domainLambdas.isEmpty then + typr.println(i"${ctx.typerState.constraint}") + if !ctx.gadt.symbols.isEmpty then + typr.println(i"${ctx.gadt}") + typr.println(cmp.lastTrace(i"checkOverlapsBounds($lo, $hi, $arg, $bounds)($which)")) + violations += ((arg, which, bound)) + + check(lo, hiBound, "upper", hiBound)(using checkCtx) + check(loBound, hi, "lower", loBound)(using checkCtx) } def loop(args: List[Tree], boundss: List[TypeBounds]): Unit = args match @@ -754,6 +737,67 @@ object TypeOps: * Otherwise, return NoType. */ private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { + /** Gather GADT symbols and singletons found in `tp2`, ie. the scrutinee. */ + object TraverseTp2 extends TypeTraverser: + val singletons = util.HashMap[Symbol, SingletonType]() + val gadtSyms = new mutable.ListBuffer[Symbol] + + def traverse(tp: Type) = try + val tpd = tp.dealias + if tpd ne tp then traverse(tpd) + else tp match + case tp: ThisType if !singletons.contains(tp.tref.symbol) && !tp.tref.symbol.isStaticOwner => + singletons(tp.tref.symbol) = tp + traverseChildren(tp.tref) + case tp: TermRef => + singletons(tp.typeSymbol) = tp + traverseChildren(tp) + case tp: TypeRef if !gadtSyms.contains(tp.symbol) && tp.symbol.isAbstractOrParamType => + gadtSyms += tp.symbol + traverseChildren(tp) + // traverse abstract type infos, to add any singletons + // for example, i16451.CanForward.scala, add `Namer.this`, from the info of the type parameter `A1` + // also, i19031.ci-reg2.scala, add `out`, from the info of the type parameter `A1` (from synthetic applyOrElse) + traverseChildren(tp.info) + case _ => + traverseChildren(tp) + catch case ex: Throwable => handleRecursive("traverseTp2", tp.show, ex) + TraverseTp2.traverse(tp2) + val singletons = TraverseTp2.singletons + val gadtSyms = TraverseTp2.gadtSyms.toList + + // Prefix inference, given `p.C.this.Child`: + // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or + // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. + // + // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala + class InferPrefixMap extends TypeMap { + var prefixTVar: Type | Null = null + def apply(tp: Type): Type = tp match { + case tp: TermRef if singletons.contains(tp.symbol) => + prefixTVar = singletons(tp.symbol) // e.g. tests/pos/i19031.ci-reg2.scala, keep out + prefixTVar.uncheckedNN + case ThisType(tref) if !tref.symbol.isStaticOwner => + val symbol = tref.symbol + if singletons.contains(symbol) then + prefixTVar = singletons(symbol) // e.g. tests/pos/i16785.scala, keep Outer.this + prefixTVar.uncheckedNN + else if symbol.is(Module) then + TermRef(this(tref.prefix), symbol.sourceModule) + else if (prefixTVar != null) + this(tref.applyIfParameterized(tref.typeParams.map(_ => WildcardType))) + else { + prefixTVar = WildcardType // prevent recursive call from assigning it + // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` + val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tref2 = this(tref.applyIfParameterized(tvars)) + prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) + prefixTVar.uncheckedNN + } + case tp => mapOver(tp) + } + } + // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -775,8 +819,15 @@ object TypeOps: // then to avoid it failing the <:< // we'll approximate by widening to its bounds + case tp: TermRef if singletons.contains(tp.symbol) => + singletons(tp.symbol) + case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner => - tref + val symbol = tref.symbol + if singletons.contains(symbol) then + singletons(symbol) + else + tref case tp: TypeRef if !tp.symbol.isClass => val lookup = boundTypeParams.lookup(tp) @@ -827,69 +878,10 @@ object TypeOps: } } - /** Gather GADT symbols and singletons found in `tp2`, ie. the scrutinee. */ - object TraverseTp2 extends TypeTraverser: - val singletons = util.HashMap[Symbol, SingletonType]() - val gadtSyms = new mutable.ListBuffer[Symbol] - - def traverse(tp: Type) = try - val tpd = tp.dealias - if tpd ne tp then traverse(tpd) - else tp match - case tp: ThisType if !singletons.contains(tp.tref.symbol) && !tp.tref.symbol.isStaticOwner => - singletons(tp.tref.symbol) = tp - traverseChildren(tp.tref) - case tp: TermRef if tp.symbol.is(Param) => - singletons(tp.typeSymbol) = tp - traverseChildren(tp) - case tp: TypeRef if !gadtSyms.contains(tp.symbol) && tp.symbol.isAbstractOrParamType => - gadtSyms += tp.symbol - traverseChildren(tp) - // traverse abstract type infos, to add any singletons - // for example, i16451.CanForward.scala, add `Namer.this`, from the info of the type parameter `A1` - // also, i19031.ci-reg2.scala, add `out`, from the info of the type parameter `A1` (from synthetic applyOrElse) - traverseChildren(tp.info) - case _ => - traverseChildren(tp) - catch case ex: Throwable => handleRecursive("traverseTp2", tp.show, ex) - TraverseTp2.traverse(tp2) - val singletons = TraverseTp2.singletons - val gadtSyms = TraverseTp2.gadtSyms.toList - - // Prefix inference, given `p.C.this.Child`: - // 1. return it as is, if `C.this` is found in `tp`, i.e. the scrutinee; or - // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. - // - // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala - class InferPrefixMap extends TypeMap { - var prefixTVar: Type | Null = null - def apply(tp: Type): Type = tp match { - case tp: TermRef if singletons.contains(tp.symbol) => - prefixTVar = singletons(tp.symbol) // e.g. tests/pos/i19031.ci-reg2.scala, keep out - prefixTVar.uncheckedNN - case ThisType(tref) if !tref.symbol.isStaticOwner => - val symbol = tref.symbol - if singletons.contains(symbol) then - prefixTVar = singletons(symbol) // e.g. tests/pos/i16785.scala, keep Outer.this - prefixTVar.uncheckedNN - else if symbol.is(Module) then - TermRef(this(tref.prefix), symbol.sourceModule) - else if (prefixTVar != null) - this(tref.applyIfParameterized(tref.typeParams.map(_ => WildcardType))) - else { - prefixTVar = WildcardType // prevent recursive call from assigning it - // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` - val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } - val tref2 = this(tref.applyIfParameterized(tvars)) - prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) - prefixTVar.uncheckedNN - } - case tp => mapOver(tp) - } - } - val inferThisMap = new InferPrefixMap - val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } + val tvars = tp1.etaExpand match + case eta: TypeLambda => constrained(eta) + case _ => Nil val protoTp1 = inferThisMap.apply(tp1).appliedTo(tvars) if gadtSyms.nonEmpty then @@ -908,7 +900,11 @@ object TypeOps: for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) - wildApprox(protoTp1) + val inst = wildApprox(protoTp1) + if inst.classSymbols.isEmpty then + // E.g. i21790, can't instantiate S#CA as a subtype of O.A, because O.CA isn't accessible + NoType + else inst } if (protoTp1 <:< tp2) instantiate() diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index afc2cc39f9cf..f343d7227bf8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -3,11 +3,12 @@ package dotc package core import TypeErasure.ErasedValueType -import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* +import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.*, SymDenotations.* import Names.{Name, TermName} import Constants.Constant import Names.Name +import StdNames.nme import config.Feature class TypeUtils: @@ -129,7 +130,7 @@ class TypeUtils: def namedTupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): List[(TermName, Type)] = (if normalize then self.normalized else self).dealias match case defn.NamedTuple(nmes, vals) => - val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map: + val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map(_.dealias).map: case ConstantType(Constant(str: String)) => str.toTermName case t => throw TypeError(em"Malformed NamedTuple: names must be string types, but $t was found.") val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) @@ -185,10 +186,64 @@ class TypeUtils: case self: Types.ThisType => self.cls == cls case _ => false + /** If `self` is of the form `p.x` where `p` refers to a package + * but `x` is not owned by a package, expand it to + * + * p.package.x + */ + def makePackageObjPrefixExplicit(using Context): Type = + def tryInsert(tpe: NamedType, pkgClass: SymDenotation): Type = pkgClass match + case pkg: PackageClassDenotation => + var sym = tpe.symbol + if !sym.exists && tpe.denot.isOverloaded then + // we know that all alternatives must come from the same package object, since + // otherwise we would get "is already defined" errors. So we can take the first + // symbol we see. + sym = tpe.denot.alternatives.head.symbol + val pobj = pkg.packageObjFor(sym) + if pobj.exists then tpe.derivedSelect(pobj.termRef) + else tpe + case _ => + tpe + self match + case tpe: NamedType => + if tpe.symbol.isRoot then + tpe + else + tpe.prefix match + case pre: ThisType if pre.cls.is(Package) => tryInsert(tpe, pre.cls) + case pre: TermRef if pre.symbol.is(Package) => tryInsert(tpe, pre.symbol.moduleClass) + case _ => tpe + case tpe => tpe + /** Strip all outer refinements off this type */ def stripRefinement: Type = self match case self: RefinedOrRecType => self.parent.stripRefinement case seld => self + /** The constructors of this type that are applicable to `argTypes`, without needing + * an implicit conversion. Curried constructors are always excluded. + * @param adaptVarargs if true, allow a constructor with just a varargs argument to + * match an empty argument list. + */ + def applicableConstructors(argTypes: List[Type], adaptVarargs: Boolean)(using Context): List[Symbol] = + def isApplicable(constr: Symbol): Boolean = + def recur(ctpe: Type): Boolean = ctpe match + case ctpe: PolyType => + if argTypes.isEmpty then recur(ctpe.resultType) // no need to know instances + else recur(ctpe.instantiate(self.argTypes)) + case ctpe: MethodType => + var paramInfos = ctpe.paramInfos + if adaptVarargs && paramInfos.length == argTypes.length + 1 + && atPhaseNoLater(Phases.elimRepeatedPhase)(constr.info.isVarArgsMethod) + then // accept missing argument for varargs parameter + paramInfos = paramInfos.init + argTypes.corresponds(paramInfos)(_ <:< _) && !ctpe.resultType.isInstanceOf[MethodType] + case _ => + false + recur(constr.info) + + self.decl(nme.CONSTRUCTOR).altsWith(isApplicable).map(_.symbol) + end TypeUtils diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index 160d7749de61..d4345916ba77 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -139,14 +139,15 @@ class TyperState() { def uncommittedAncestor: TyperState = if (isCommitted && previous != null) previous.uncheckedNN.uncommittedAncestor else this - /** Commit typer state so that its information is copied into current typer state + /** Commit `this` typer state by copying information into the current typer state, + * where "current" means contextual, so meaning `ctx.typerState`. * In addition (1) the owning state of undetermined or temporarily instantiated * type variables changes from this typer state to the current one. (2) Variables * that were temporarily instantiated in the current typer state are permanently * instantiated instead. * * A note on merging: An interesting test case is isApplicableSafe.scala. It turns out that this - * requires a context merge using the new `&' operator. Sequence of actions: + * requires a context merge using the new `&` operator. Sequence of actions: * 1) Typecheck argument in typerstate 1. * 2) Cache argument. * 3) Evolve same typer state (to typecheck other arguments, say) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index eeffc41d4159..31e11487ae38 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -38,11 +38,13 @@ import config.Printers.{core, typr, matchTypes} import reporting.{trace, Message} import java.lang.ref.WeakReference import compiletime.uninitialized -import cc.{CapturingType, CaptureSet, derivedCapturingType, isBoxedCapturing, isCaptureChecking, isRetains, isRetainsLike} +import cc.{CapturingType, CaptureRef, CaptureSet, SingletonCaptureRef, isTrackableRef, + derivedCapturingType, isBoxedCapturing, isCaptureChecking, isRetains, isRetainsLike} import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe +import dotty.tools.dotc.cc.ccConfig object Types extends TypeUtils { @@ -76,6 +78,7 @@ object Types extends TypeUtils { * | +- HKTypeLambda * | +- MatchType * | +- FlexibleType + * | +- LazyRef * | * +- GroundType -+- AndType * +- OrType @@ -141,6 +144,9 @@ object Types extends TypeUtils { !t.isPermanentlyInstantiated || test(t.permanentInst, theAcc) case t: LazyRef => !t.completed || test(t.ref, theAcc) + case t: ParamRef => + (t: Type).mightBeProvisional = false // break cycles + test(t.underlying, theAcc) case _ => (if theAcc != null then theAcc else ProAcc()).foldOver(false, t) end if @@ -196,7 +202,9 @@ object Types extends TypeUtils { */ def isRef(sym: Symbol, skipRefined: Boolean = true)(using Context): Boolean = this match { case this1: TypeRef => - this1.info match { // see comment in Namer#TypeDefCompleter#typeSig + // avoid forcing symbol if it's a class, not a type alias (see i15177.FakeEnum.scala) + if this1.symbol.isClass then this1.symbol eq sym + else this1.info match { // see comment in Namer#TypeDefCompleter#typeSig case TypeAlias(tp) => tp.isRef(sym, skipRefined) case _ => this1.symbol eq sym } @@ -328,6 +336,15 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this type a (possibly aliased) singleton type or a type proxy + * or an AndType where one operand is effectively a singleton? + */ + def isEffectivelySingleton(using Context): Boolean = dealias match + case tp: SingletonType => true + case tp: TypeProxy => tp.superType.isEffectivelySingleton + case AndType(tpL, tpR) => tpL.isEffectivelySingleton || tpR.isEffectivelySingleton + case _ => false + /** Is this upper-bounded by a (possibly aliased) singleton type? * Overridden in TypeVar */ @@ -480,14 +497,7 @@ object Types extends TypeUtils { case _ => false /** Does this application expand to a match type? */ - def isMatchAlias(using Context): Boolean = underlyingMatchType.exists - - def underlyingMatchType(using Context): Type = stripped match { - case tp: MatchType => tp - case tp: HKTypeLambda => tp.resType.underlyingMatchType - case tp: AppliedType => tp.underlyingMatchType - case _ => NoType - } + def isMatchAlias(using Context): Boolean = underlyingNormalizable.isMatch /** Is this a higher-kinded type lambda with given parameter variances? * These lambdas are used as the RHS of higher-kinded abstract types or @@ -513,11 +523,6 @@ object Types extends TypeUtils { */ def isDeclaredVarianceLambda: Boolean = false - /** Is this type a CaptureRef that can be tracked? - * This is true for all ThisTypes or ParamRefs but only for some NamedTypes. - */ - def isTrackableRef(using Context): Boolean = false - /** Does this type contain wildcard types? */ final def containsWildcardTypes(using Context) = existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) @@ -862,19 +867,23 @@ object Types extends TypeUtils { } else val isRefinedMethod = rinfo.isInstanceOf[MethodOrPoly] - val joint = pdenot.meet( - new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), - pre, - safeIntersection = ctx.base.pendingMemberSearches.contains(name)) - joint match - case joint: SingleDenotation - if isRefinedMethod - && (rinfo <:< joint.info - || name == nme.apply && defn.isFunctionType(tp.parent)) => - // use `rinfo` to keep the right parameter names for named args. See i8516.scala. - joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) + rinfo match + case CapturingType(_, refs: CaptureSet.RefiningVar) if ccConfig.optimizedRefinements => + pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, rinfo) case _ => - joint + val joint = pdenot.meet( + new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), + pre, + safeIntersection = ctx.base.pendingMemberSearches.contains(name)) + joint match + case joint: SingleDenotation + if isRefinedMethod + && (rinfo <:< joint.info + || name == nme.apply && defn.isFunctionType(tp.parent)) => + // use `rinfo` to keep the right parameter names for named args. See i8516.scala. + joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) + case _ => + joint } def goApplied(tp: AppliedType, tycon: HKTypeLambda) = @@ -1305,7 +1314,8 @@ object Types extends TypeUtils { final def widen(using Context): Type = this match case _: TypeRef | _: MethodOrPoly => this // fast path for most frequent cases case tp: TermRef => // fast path for next most frequent case - if tp.isOverloaded then tp else tp.underlying.widen + val denot = tp.denot + if denot.isOverloaded then tp else denot.info.widen case tp: SingletonType => tp.underlying.widen case tp: ExprType => tp.resultType.widen case tp => @@ -1319,7 +1329,10 @@ object Types extends TypeUtils { * base type by applying one or more `underlying` dereferences. */ final def widenSingleton(using Context): Type = stripped match { - case tp: SingletonType if !tp.isOverloaded => tp.underlying.widenSingleton + case tp: TermRef => + val denot = tp.denot + if denot.isOverloaded then this else denot.info.widenSingleton + case tp: SingletonType => tp.underlying.widenSingleton case _ => this } @@ -1327,7 +1340,9 @@ object Types extends TypeUtils { * base type, while also skipping Expr types. */ final def widenTermRefExpr(using Context): Type = stripTypeVar match { - case tp: TermRef if !tp.isOverloaded => tp.underlying.widenExpr.widenTermRefExpr + case tp: TermRef => + val denot = tp.denot + if denot.isOverloaded then this else denot.info.widenExpr.widenTermRefExpr case _ => this } @@ -1540,19 +1555,24 @@ object Types extends TypeUtils { } deskolemizer(this) - /** The result of normalization using `tryNormalize`, or the type itself if - * tryNormlize yields NoType + /** The result of normalization, or the type itself if none apply. */ + final def normalized(using Context): Type = tryNormalize.orElse(this) + + /** If this type has an underlying match type or applied compiletime.ops, + * then the result after applying all toplevel normalizations, otherwise NoType. */ - final def normalized(using Context): Type = { - val normed = tryNormalize - if (normed.exists) normed else this - } + def tryNormalize(using Context): Type = underlyingNormalizable match + case mt: MatchType => mt.reduced.normalized + case tp: AppliedType => tp.tryCompiletimeConstantFold + case _ => NoType - /** If this type can be normalized at the top-level by rewriting match types - * of S[n] types, the result after applying all toplevel normalizations, - * otherwise NoType + /** Perform successive strippings, and beta-reductions of applied types until + * a match type or applied compiletime.ops is reached, if any, otherwise NoType. */ - def tryNormalize(using Context): Type = NoType + def underlyingNormalizable(using Context): Type = stripped.stripLazyRef match + case tp: MatchType => tp + case tp: AppliedType => tp.underlyingNormalizable + case _ => NoType private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { val res = this.widen.dealias1(keep, keepOpaques = false) @@ -1644,9 +1664,6 @@ object Types extends TypeUtils { case _ => if (isRepeatedParam) this.argTypesHi.head else this } - /** The capture set of this type. Overridden and cached in CaptureRef */ - def captureSet(using Context): CaptureSet = CaptureSet.ofType(this, followResult = false) - // ----- Normalizing typerefs over refined types ---------------------------- /** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed @@ -2037,20 +2054,6 @@ object Types extends TypeUtils { case _ => this - /** A type capturing `ref` */ - def capturing(ref: CaptureRef)(using Context): Type = - if captureSet.accountsFor(ref) then this - else CapturingType(this, ref.singletonCaptureSet) - - /** A type capturing the capture set `cs`. If this type is already a capturing type - * the two capture sets are combined. - */ - def capturing(cs: CaptureSet)(using Context): Type = - if cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(captureSet, frozen = true).isOK then this - else this match - case CapturingType(parent, cs1) => parent.capturing(cs1 ++ cs) - case _ => CapturingType(this, cs) - /** The set of distinct symbols referred to by this type, after all aliases are expanded */ def coveringSet(using Context): Set[Symbol] = (new CoveringSetAccumulator).apply(Set.empty[Symbol], this) @@ -2249,64 +2252,6 @@ object Types extends TypeUtils { def isOverloaded(using Context): Boolean = false } - /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */ - trait CaptureRef extends TypeProxy, ValueType: - private var myCaptureSet: CaptureSet | Null = uninitialized - private var myCaptureSetRunId: Int = NoRunId - private var mySingletonCaptureSet: CaptureSet.Const | Null = null - - /** Is the reference tracked? This is true if it can be tracked and the capture - * set of the underlying type is not always empty. - */ - final def isTracked(using Context): Boolean = - isTrackableRef && (isRootCapability || !captureSetOfInfo.isAlwaysEmpty) - - /** Is this a reach reference of the form `x*`? */ - def isReach(using Context): Boolean = false // overridden in AnnotatedType - - /** Is this a maybe reference of the form `x?`? */ - def isMaybe(using Context): Boolean = false // overridden in AnnotatedType - - def stripReach(using Context): CaptureRef = this // overridden in AnnotatedType - def stripMaybe(using Context): CaptureRef = this // overridden in AnnotatedType - - /** Is this reference the generic root capability `cap` ? */ - def isRootCapability(using Context): Boolean = false - - /** Normalize reference so that it can be compared with `eq` for equality */ - def normalizedRef(using Context): CaptureRef = this - - /** The capture set consisting of exactly this reference */ - def singletonCaptureSet(using Context): CaptureSet.Const = - if mySingletonCaptureSet == null then - mySingletonCaptureSet = CaptureSet(this.normalizedRef) - mySingletonCaptureSet.uncheckedNN - - /** The capture set of the type underlying this reference */ - def captureSetOfInfo(using Context): CaptureSet = - if ctx.runId == myCaptureSetRunId then myCaptureSet.nn - else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty - else - myCaptureSet = CaptureSet.Pending - val computed = CaptureSet.ofInfo(this) - if !isCaptureChecking || underlying.isProvisional then - myCaptureSet = null - else - myCaptureSet = computed - myCaptureSetRunId = ctx.runId - computed - - def invalidateCaches() = - myCaptureSetRunId = NoRunId - - override def captureSet(using Context): CaptureSet = - val cs = captureSetOfInfo - if isTrackableRef && !cs.isAlwaysEmpty then singletonCaptureSet else cs - - end CaptureRef - - trait SingletonCaptureRef extends SingletonType, CaptureRef - /** A trait for types that bind other types that refer to them. * Instances are: LambdaType, RecType. */ @@ -2549,7 +2494,10 @@ object Types extends TypeUtils { } private def disambiguate(d: Denotation)(using Context): Denotation = - disambiguate(d, currentSignature, currentSymbol.targetName) + // this method might be triggered while the denotation is already being recomputed + // in NamedType, so it's better to use lastKnownDenotation instead, as targetName + // should not change between phases/runs + disambiguate(d, currentSignature, currentSymbol.lastKnownDenotation.targetName) private def disambiguate(d: Denotation, sig: Signature | Null, target: Name)(using Context): Denotation = if (sig != null) @@ -2995,28 +2943,11 @@ object Types extends TypeUtils { def implicitName(using Context): TermName = name def underlyingRef: TermRef = this - /** A term reference can be tracked if it is a local term ref to a value - * or a method term parameter. References to term parameters of classes - * cannot be tracked individually. - * They are subsumed in the capture sets of the enclosing class. - * TODO: ^^^ What about call-by-name? - */ - override def isTrackableRef(using Context) = - ((prefix eq NoPrefix) - || symbol.is(ParamAccessor) && prefix.isThisTypeOf(symbol.owner) - || isRootCapability - ) && !symbol.isOneOf(UnstableValueFlags) - - override def isRootCapability(using Context): Boolean = - name == nme.CAPTURE_ROOT && symbol == defn.captureRoot - - override def normalizedRef(using Context): CaptureRef = - if isTrackableRef then symbol.termRef else this } abstract case class TypeRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType { + extends NamedType, CaptureRef { type ThisType = TypeRef type ThisName = TypeName @@ -3065,6 +2996,7 @@ object Types extends TypeUtils { /** Hook that can be called from creation methods in TermRef and TypeRef */ def validated(using Context): this.type = this + } final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator) { @@ -3166,8 +3098,6 @@ object Types extends TypeUtils { // can happen in IDE if `cls` is stale } - override def isTrackableRef(using Context) = true - override def computeHash(bs: Binders): Int = doHash(bs, tref) override def eql(that: Type): Boolean = that match { @@ -3241,8 +3171,6 @@ object Types extends TypeUtils { private var myRef: Type | Null = null private var computed = false - override def tryNormalize(using Context): Type = ref.tryNormalize - def ref(using Context): Type = if computed then if myRef == null then @@ -3527,6 +3455,8 @@ object Types extends TypeUtils { else this match case tp: OrType => OrType.make(tp1, tp2, tp.isSoft) case tp: AndType => AndType.make(tp1, tp2, checkValid = true) + + override def hashIsStable: Boolean = tp1.hashIsStable && tp2.hashIsStable } abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType { @@ -3572,6 +3502,10 @@ object Types extends TypeUtils { case that: AndType => tp1.eq(that.tp1) && tp2.eq(that.tp2) case _ => false } + + override protected def iso(that: Any, bs: BinderPairs) = that match + case that: AndType => tp1.equals(that.tp1, bs) && tp2.equals(that.tp2, bs) + case _ => false } final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2) @@ -3682,6 +3616,7 @@ object Types extends TypeUtils { myUnion private var atomsRunId: RunId = NoRunId + private var widenedRunId: RunId = NoRunId private var myAtoms: Atoms = uninitialized private var myWidened: Type = uninitialized @@ -3697,20 +3632,18 @@ object Types extends TypeUtils { val tp2w = tp2.widenSingletons() if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft) - private def ensureAtomsComputed()(using Context): Unit = + override def atoms(using Context): Atoms = if atomsRunId != ctx.runId then myAtoms = computeAtoms() - myWidened = computeWidenSingletons() if !isProvisional then atomsRunId = ctx.runId - - override def atoms(using Context): Atoms = - ensureAtomsComputed() myAtoms override def widenSingletons(skipSoftUnions: Boolean)(using Context): Type = if isSoft && skipSoftUnions then this else - ensureAtomsComputed() + if widenedRunId != ctx.runId then + myWidened = computeWidenSingletons() + if !isProvisional then widenedRunId = ctx.runId myWidened def derivedOrType(tp1: Type, tp2: Type, soft: Boolean = isSoft)(using Context): Type = @@ -3724,6 +3657,10 @@ object Types extends TypeUtils { case that: OrType => tp1.eq(that.tp1) && tp2.eq(that.tp2) && isSoft == that.isSoft case _ => false } + + override protected def iso(that: Any, bs: BinderPairs) = that match + case that: OrType => tp1.equals(that.tp1, bs) && tp2.equals(that.tp2, bs) && isSoft == that.isSoft + case _ => false } final class CachedOrType(tp1: Type, tp2: Type, override val isSoft: Boolean) extends OrType(tp1, tp2) @@ -4187,6 +4124,7 @@ object Types extends TypeUtils { protected def prefixString: String = companion.prefixString } + // Actually.. not cached. MethodOrPoly are `UncachedGroundType`s. final class CachedMethodType(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type, val companion: MethodTypeCompanion) extends MethodType(paramNames)(paramInfosExp, resultTypeExp) @@ -4461,9 +4399,11 @@ object Types extends TypeUtils { /** Distributes Lambda inside type bounds. Examples: * - * type T[X] = U becomes type T = [X] -> U - * type T[X] <: U becomes type T >: Nothing <: ([X] -> U) - * type T[X] >: L <: U becomes type T >: ([X] -> L) <: ([X] -> U) + * {{{ + * type T[X] = U becomes type T = [X] =>> U + * type T[X] <: U becomes type T >: Nothing <: ([X] =>> U) + * type T[X] >: L <: U becomes type T >: ([X] =>> L) <: ([X] =>> U) + * }}} * * The variances of regular TypeBounds types, as well as of match aliases * and of opaque aliases are always determined from the given parameters @@ -4475,6 +4415,7 @@ object Types extends TypeUtils { * * Examples: * + * {{{ * type T[X] >: A // X is invariant * type T[X] <: List[X] // X is invariant * type T[X] = List[X] // X is covariant (determined structurally) @@ -4482,6 +4423,7 @@ object Types extends TypeUtils { * opaque type T[+X] = List[X] // X is covariant * type T[A, B] = A => B // A is contravariant, B is covariant (determined structurally) * type T[A, +B] = A => B // A is invariant, B is covariant + * }}} */ def boundsFromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], bounds: TypeBounds)(using Context): TypeBounds = { def expand(tp: Type, useVariances: Boolean) = @@ -4598,8 +4540,8 @@ object Types extends TypeUtils { private var myEvalRunId: RunId = NoRunId private var myEvalued: Type = uninitialized - private var validUnderlyingMatch: Period = Nowhere - private var cachedUnderlyingMatch: Type = uninitialized + private var validUnderlyingNormalizable: Period = Nowhere + private var cachedUnderlyingNormalizable: Type = uninitialized def isGround(acc: TypeAccumulator[Boolean])(using Context): Boolean = if myGround == 0 then myGround = if acc.foldOver(true, this) then 1 else -1 @@ -4630,12 +4572,18 @@ object Types extends TypeUtils { override def superType(using Context): Type = if ctx.period != validSuper then - validSuper = if (tycon.isProvisional) Nowhere else ctx.period + var superIsProvisional = tycon.isProvisional cachedSuper = tycon match case tycon: HKTypeLambda => defn.AnyType case tycon: TypeRef if tycon.symbol.isClass => tycon - case tycon: TypeProxy => tycon.superType.applyIfParameterized(args) + case tycon: TypeProxy => + superIsProvisional ||= args.exists(_.isProvisional) + // applyIfParameterized may perform eta-reduction leading to different + // variance annotations depending on the instantiation of type params + // see tests/pos/typeclass-encoding3b.scala:348 for an example + tycon.superType.applyIfParameterized(args) case _ => defn.AnyType + validSuper = if superIsProvisional then Nowhere else ctx.period cachedSuper override def translucentSuperType(using Context): Type = tycon match { @@ -4657,37 +4605,25 @@ object Types extends TypeUtils { case nil => x foldArgs(op(x, tycon), args) - /** Exists if the tycon is a TypeRef of an alias with an underlying match type. - * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. + /** Exists if the tycon is a TypeRef of an alias with an underlying match type, + * or a compiletime applied type. Anything else should have already been + * reduced in `appliedTo` by the TypeAssigner. This may reduce several + * HKTypeLambda applications before the underlying normalizable type is reached. */ - override def underlyingMatchType(using Context): Type = - if ctx.period != validUnderlyingMatch then - validUnderlyingMatch = if tycon.isProvisional then Nowhere else ctx.period - cachedUnderlyingMatch = superType.underlyingMatchType - cachedUnderlyingMatch + override def underlyingNormalizable(using Context): Type = + if ctx.period != validUnderlyingNormalizable then tycon match + case tycon: TypeRef if defn.isCompiletimeAppliedType(tycon.symbol) => + cachedUnderlyingNormalizable = this + validUnderlyingNormalizable = ctx.period + case _ => + cachedUnderlyingNormalizable = superType.underlyingNormalizable + validUnderlyingNormalizable = validSuper + cachedUnderlyingNormalizable - override def tryNormalize(using Context): Type = tycon.stripTypeVar match { - case tycon: TypeRef => - def tryMatchAlias = tycon.info match - case AliasingBounds(alias) if isMatchAlias => - trace(i"normalize $this", typr, show = true) { - MatchTypeTrace.recurseWith(this) { - alias.applyIfParameterized(args.map(_.normalized)).tryNormalize - /* `applyIfParameterized` may reduce several HKTypeLambda applications - * before the underlying MatchType is reached. - * Even if they do not involve any match type normalizations yet, - * we still want to record these reductions in the MatchTypeTrace. - * They should however only be attempted if they eventually expand - * to a match type, which is ensured by the `isMatchAlias` guard. - */ - } - } - case _ => - NoType - tryCompiletimeConstantFold.orElse(tryMatchAlias) - case _ => - NoType - } + override def tryNormalize(using Context): Type = + if isMatchAlias && MatchTypeTrace.isRecording then + MatchTypeTrace.recurseWith(this)(superType.tryNormalize) + else super.tryNormalize /** Is this an unreducible application to wildcard arguments? * This is the case if tycon is higher-kinded. This means @@ -4774,6 +4710,7 @@ object Types extends TypeUtils { type BT <: LambdaType def paramNum: Int def paramName: binder.ThisName = binder.paramNames(paramNum) + def paramInfo: binder.PInfo = binder.paramInfos(paramNum) override def underlying(using Context): Type = { // TODO: update paramInfos's type to nullable @@ -4808,7 +4745,6 @@ object Types extends TypeUtils { type BT = TermLambda def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) - override def isTrackableRef(using Context) = true } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) @@ -4816,7 +4752,8 @@ object Types extends TypeUtils { /** Only created in `binder.paramRefs`. Use `binder.paramRefs(paramNum)` to * refer to `TypeParamRef(binder, paramNum)`. */ - abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) extends ParamRef { + abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) + extends ParamRef, CaptureRef { type BT = TypeLambda def kindString: String = "Type" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) @@ -4948,7 +4885,7 @@ object Types extends TypeUtils { def origin: TypeParamRef = currentOrigin /** Set origin to new parameter. Called if we merge two conflicting constraints. - * See OrderingConstraint#merge, OrderingConstraint#rename + * See OrderingConstraint#merge */ def setOrigin(p: TypeParamRef) = currentOrigin = p @@ -5147,13 +5084,6 @@ object Types extends TypeUtils { private var myReduced: Type | Null = null private var reductionContext: util.MutableMap[Type, Type] | Null = null - override def tryNormalize(using Context): Type = - try - reduced.normalized - catch - case ex: Throwable => - handleRecursive("normalizing", s"${scrutinee.show} match ..." , ex) - private def thisMatchType = this def reduced(using Context): Type = atPhaseNoLater(elimOpaquePhase) { @@ -5256,7 +5186,7 @@ object Types extends TypeUtils { def apply(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = unique(new CachedMatchType(bound, scrutinee, cases)) - def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingMatchType match + def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingNormalizable match case mt: MatchType => mt.reducesUsingGadt case _ => false @@ -5282,10 +5212,25 @@ object Types extends TypeUtils { case _ => true end MatchTypeCasePattern + enum MatchTypeCaseError: + case Alias(sym: Symbol) + case RefiningBounds(name: TypeName) + case StructuralType(name: TypeName) + case UnaccountedTypeParam(name: TypeName) + + def explanation(using Context) = this match + case Alias(sym) => i"a type alias `${sym.name}`" + case RefiningBounds(name) => i"an abstract type member `$name` with bounds that need verification" + case StructuralType(name) => i"an abstract type member `$name` that does not refine a member in its parent" + case UnaccountedTypeParam(name) => i"an unaccounted type parameter `$name`" + end MatchTypeCaseError + + type MatchTypeCaseResult = MatchTypeCasePattern | MatchTypeCaseError + enum MatchTypeCaseSpec: case SubTypeTest(origMatchCase: Type, pattern: Type, body: Type) case SpeccedPatMat(origMatchCase: HKTypeLambda, captureCount: Int, pattern: MatchTypeCasePattern, body: Type) - case LegacyPatMat(origMatchCase: HKTypeLambda) + case LegacyPatMat(origMatchCase: HKTypeLambda, err: MatchTypeCaseError | Null) case MissingCaptures(origMatchCase: HKTypeLambda, missing: collection.BitSet) def origMatchCase: Type @@ -5296,18 +5241,18 @@ object Types extends TypeUtils { cas match case cas: HKTypeLambda if !sourceVersion.isAtLeast(SourceVersion.`3.4`) => // Always apply the legacy algorithm under -source:3.3 and below - LegacyPatMat(cas) + LegacyPatMat(cas, null) case cas: HKTypeLambda => val defn.MatchCase(pat, body) = cas.resultType: @unchecked val missing = checkCapturesPresent(cas, pat) if !missing.isEmpty then MissingCaptures(cas, missing) else - val specPattern = tryConvertToSpecPattern(cas, pat) - if specPattern != null then - SpeccedPatMat(cas, cas.paramNames.size, specPattern, body) - else - LegacyPatMat(cas) + tryConvertToSpecPattern(cas, pat) match + case specPattern: MatchTypeCasePattern => + SpeccedPatMat(cas, cas.paramNames.size, specPattern, body) + case err: MatchTypeCaseError => + LegacyPatMat(cas, err) case _ => val defn.MatchCase(pat, body) = cas: @unchecked SubTypeTest(cas, pat, body) @@ -5345,15 +5290,15 @@ object Types extends TypeUtils { * It must adhere to the specification of legal patterns defined at * https://docs.scala-lang.org/sips/match-types-spec.html#legal-patterns * - * Returns `null` if the pattern in `caseLambda` is a not a legal pattern. + * Returns a MatchTypeCaseError if the pattern in `caseLambda` is a not a legal pattern. */ - private def tryConvertToSpecPattern(caseLambda: HKTypeLambda, pat: Type)(using Context): MatchTypeCasePattern | Null = - var typeParamRefsAccountedFor: Int = 0 + private def tryConvertToSpecPattern(caseLambda: HKTypeLambda, pat: Type)(using Context): MatchTypeCaseResult = + var typeParamRefsUnaccountedFor = (0 until caseLambda.paramNames.length).to(mutable.BitSet) - def rec(pat: Type, variance: Int): MatchTypeCasePattern | Null = + def rec(pat: Type, variance: Int): MatchTypeCaseResult = pat match case pat @ TypeParamRef(binder, num) if binder eq caseLambda => - typeParamRefsAccountedFor += 1 + typeParamRefsUnaccountedFor -= num MatchTypeCasePattern.Capture(num, isWildcard = pat.paramName.is(WildcardParamName)) case pat @ AppliedType(tycon: TypeRef, args) if variance == 1 => @@ -5369,13 +5314,13 @@ object Types extends TypeUtils { MatchTypeCasePattern.BaseTypeTest(tycon, argPatterns, needsConcreteScrut) } else if defn.isCompiletime_S(tyconSym) && args.sizeIs == 1 then - val argPattern = rec(args.head, variance) - if argPattern == null then - null - else if argPattern.isTypeTest then - MatchTypeCasePattern.TypeTest(pat) - else - MatchTypeCasePattern.CompileTimeS(argPattern) + rec(args.head, variance) match + case err: MatchTypeCaseError => + err + case argPattern: MatchTypeCasePattern => + if argPattern.isTypeTest + then MatchTypeCasePattern.TypeTest(pat) + else MatchTypeCasePattern.CompileTimeS(argPattern) else tycon.info match case _: RealTypeBounds => @@ -5391,7 +5336,7 @@ object Types extends TypeUtils { */ rec(pat.superType, variance) case _ => - null + MatchTypeCaseError.Alias(tyconSym) case pat @ AppliedType(tycon: TypeParamRef, _) if variance == 1 => recAbstractTypeConstructor(pat) @@ -5412,40 +5357,40 @@ object Types extends TypeUtils { MatchTypeCasePattern.TypeMemberExtractor(refinedName, capture) else // Otherwise, a type-test + capture combo might be necessary, and we are out of spec - null + MatchTypeCaseError.RefiningBounds(refinedName) case _ => // If the member does not refine a member of the `parent`, we are out of spec - null + MatchTypeCaseError.StructuralType(refinedName) case _ => MatchTypeCasePattern.TypeTest(pat) end rec - def recAbstractTypeConstructor(pat: AppliedType): MatchTypeCasePattern | Null = + def recAbstractTypeConstructor(pat: AppliedType): MatchTypeCaseResult = recArgPatterns(pat) { argPatterns => MatchTypeCasePattern.AbstractTypeConstructor(pat.tycon, argPatterns) } end recAbstractTypeConstructor - def recArgPatterns(pat: AppliedType)(whenNotTypeTest: List[MatchTypeCasePattern] => MatchTypeCasePattern | Null): MatchTypeCasePattern | Null = + def recArgPatterns(pat: AppliedType)(whenNotTypeTest: List[MatchTypeCasePattern] => MatchTypeCaseResult): MatchTypeCaseResult = val AppliedType(tycon, args) = pat val tparams = tycon.typeParams val argPatterns = args.zip(tparams).map { (arg, tparam) => rec(arg, tparam.paramVarianceSign) } - if argPatterns.exists(_ == null) then - null - else - val argPatterns1 = argPatterns.asInstanceOf[List[MatchTypeCasePattern]] // they are not null + argPatterns.find(_.isInstanceOf[MatchTypeCaseError]).getOrElse: + val argPatterns1 = argPatterns.asInstanceOf[List[MatchTypeCasePattern]] // they are not errors if argPatterns1.forall(_.isTypeTest) then MatchTypeCasePattern.TypeTest(pat) else whenNotTypeTest(argPatterns1) end recArgPatterns - val result = rec(pat, variance = 1) - if typeParamRefsAccountedFor == caseLambda.paramNames.size then result - else null + rec(pat, variance = 1) match + case err: MatchTypeCaseError => err + case ok if typeParamRefsUnaccountedFor.isEmpty => ok + case _ => + MatchTypeCaseError.UnaccountedTypeParam(caseLambda.paramNames(typeParamRefsUnaccountedFor.head)) end tryConvertToSpecPattern end MatchTypeCaseSpec @@ -5690,7 +5635,8 @@ object Types extends TypeUtils { /** Common supertype of `TypeAlias` and `MatchAlias` */ abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds + def derivedAlias(alias: Type)(using Context): AliasingBounds = + if alias eq this.alias then this else AliasingBounds(alias) override def computeHash(bs: Binders): Int = doHash(bs, alias) override def hashIsStable: Boolean = alias.hashIsStable @@ -5712,10 +5658,7 @@ object Types extends TypeUtils { /** = T */ - class TypeAlias(alias: Type) extends AliasingBounds(alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds = - if (alias eq this.alias) this else TypeAlias(alias) - } + class TypeAlias(alias: Type) extends AliasingBounds(alias) /** = T where `T` is a `MatchType` * @@ -5724,10 +5667,7 @@ object Types extends TypeUtils { * If we assumed full substitutivity, we would have to reject all recursive match * aliases (or else take the jump and allow full recursive types). */ - class MatchAlias(alias: Type) extends AliasingBounds(alias) { - def derivedAlias(alias: Type)(using Context): AliasingBounds = - if (alias eq this.alias) this else MatchAlias(alias) - } + class MatchAlias(alias: Type) extends AliasingBounds(alias) object TypeBounds { def apply(lo: Type, hi: Type)(using Context): TypeBounds = @@ -5798,30 +5738,6 @@ object Types extends TypeUtils { isRefiningCache } - override def isTrackableRef(using Context) = - (isReach || isMaybe) && parent.isTrackableRef - - /** Is this a reach reference of the form `x*`? */ - override def isReach(using Context): Boolean = - annot.symbol == defn.ReachCapabilityAnnot - - /** Is this a reach reference of the form `x*`? */ - override def isMaybe(using Context): Boolean = - annot.symbol == defn.MaybeCapabilityAnnot - - override def stripReach(using Context): CaptureRef = - if isReach then parent.asInstanceOf[CaptureRef] else this - - override def stripMaybe(using Context): CaptureRef = - if isMaybe then parent.asInstanceOf[CaptureRef] else this - - override def normalizedRef(using Context): CaptureRef = - if isReach then AnnotatedType(stripReach.normalizedRef, annot) else this - - override def captureSet(using Context): CaptureSet = - if isReach then super.captureSet - else CaptureSet.ofType(this, followResult = false) - // equals comes from case class; no matching override is needed override def computeHash(bs: Binders): Int = @@ -6046,17 +5962,18 @@ object Types extends TypeUtils { def samClass(tp: Type)(using Context): Symbol = tp match case tp: ClassInfo => - def zeroParams(tp: Type): Boolean = tp.stripPoly match - case mt: MethodType => mt.paramInfos.isEmpty && !mt.resultType.isInstanceOf[MethodType] - case et: ExprType => true - case _ => false val cls = tp.cls - val validCtor = - val ctor = cls.primaryConstructor - // `ContextFunctionN` does not have constructors - !ctor.exists || zeroParams(ctor.info) - val isInstantiable = !cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) - if validCtor && isInstantiable then tp.cls + def takesNoArgs(tp: Type) = + !tp.classSymbol.primaryConstructor.exists + // e.g. `ContextFunctionN` does not have constructors + || tp.applicableConstructors(Nil, adaptVarargs = true).lengthCompare(1) == 0 + // we require a unique constructor so that SAM expansion is deterministic + val noArgsNeeded: Boolean = + takesNoArgs(tp) + && (!tp.cls.is(Trait) || takesNoArgs(tp.parents.head)) + def isInstantiable = + !tp.cls.isOneOf(FinalOrSealed) && (tp.appliedRef <:< tp.selfType) + if noArgsNeeded && isInstantiable then tp.cls else NoSymbol case tp: AppliedType => samClass(tp.superType) @@ -6155,8 +6072,15 @@ object Types extends TypeUtils { def inverse: BiTypeMap /** A restriction of this map to a function on tracked CaptureRefs */ - def forward(ref: CaptureRef): CaptureRef = this(ref) match - case result: CaptureRef if result.isTrackableRef => result + def forward(ref: CaptureRef): CaptureRef = + val result = this(ref) + def ensureTrackable(tp: Type): CaptureRef = tp match + case tp: CaptureRef => + if tp.isTrackableRef then tp + else ensureTrackable(tp.underlying) + case _ => + assert(false, i"not a trackable captureRef ref: $result, ${result.underlyingIterator.toList}") + ensureTrackable(result) /** A restriction of the inverse to a function on tracked CaptureRefs */ def backward(ref: CaptureRef): CaptureRef = inverse(ref) match @@ -6241,6 +6165,15 @@ object Types extends TypeUtils { try derivedCapturingType(tp, this(parent), refs.map(this)) finally variance = saved + /** Utility method. Maps the supertype of a type proxy. Returns the + * type proxy itself if the mapping leaves the supertype unchanged. + * This avoids needless changes in mapped types. + */ + protected def mapConserveSuper(t: TypeProxy): Type = + val t1 = t.superType + val t2 = apply(t1) + if t2 ne t1 then t2 else t + /** Map this function over given type */ def mapOver(tp: Type): Type = { record(s"TypeMap mapOver ${getClass}") @@ -6248,6 +6181,12 @@ object Types extends TypeUtils { val ctx = this.mapCtx // optimization for performance given Context = ctx tp match { + case tp: TermRef if tp.symbol.isImport => + // see tests/pos/i19493.scala for examples requiring mapping over imports + val ImportType(e) = tp.info: @unchecked + val e1 = singleton(apply(e.tpe)) + newImportSymbol(tp.symbol.owner, e1).termRef + case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else @@ -6352,7 +6291,14 @@ object Types extends TypeUtils { } } - private def treeTypeMap = new TreeTypeMap(typeMap = this) + private def treeTypeMap = new TreeTypeMap( + typeMap = this, + // Using `ConservativeTreeCopier` is needed when copying dependent annoted + // types, where we can refer to a previous parameter represented as + // `TermParamRef` that has no underlying type yet. + // See tests/pos/annot-17242.scala. + cpy = ConservativeTreeCopier() + ) def mapOver(syms: List[Symbol]): List[Symbol] = mapSymbols(syms, treeTypeMap) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 22a43dd524e1..3af0fc6603d5 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -403,9 +403,10 @@ class ClassfileParser( val privateWithin = getPrivateWithin(jflags) - classRoot.setPrivateWithin(privateWithin) - moduleRoot.setPrivateWithin(privateWithin) - moduleRoot.sourceModule.setPrivateWithin(privateWithin) + if privateWithin.exists then + classRoot.setPrivateWithin(privateWithin) + moduleRoot.setPrivateWithin(privateWithin) + moduleRoot.sourceModule.setPrivateWithin(privateWithin) for (i <- 0 until in.nextChar) parseMember(method = false) for (i <- 0 until in.nextChar) parseMember(method = true) @@ -1059,11 +1060,13 @@ class ClassfileParser( private def enterOwnInnerClasses()(using Context, DataReader): Unit = { def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = SymbolLoaders.enterClassAndModule( - getOwner(jflags), - entry.originalName, - new ClassfileLoader(file), - classTranslation.flags(jflags), - getScope(jflags)) + getOwner(jflags), + entry.originalName, + new ClassfileLoader(file), + classTranslation.flags(jflags), + getScope(jflags), + getPrivateWithin(jflags), + ) for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes @@ -1163,7 +1166,10 @@ class ClassfileParser( // attribute isn't, this classfile is a compilation artifact. return Some(NoEmbedded) - if (scan(tpnme.ScalaSignatureATTR) && scan(tpnme.RuntimeVisibleAnnotationATTR)) { + if (scan(tpnme.ScalaSignatureATTR)) { + if !scan(tpnme.RuntimeVisibleAnnotationATTR) then + report.error(em"No RuntimeVisibleAnnotations in classfile with ScalaSignature attribute: ${classRoot.fullName}") + return None val attrLen = in.nextInt val nAnnots = in.nextChar var i = 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala index 9cdfb042b8fb..13a6a274ed96 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala @@ -18,8 +18,8 @@ object BestEffortTastyWriter: unit.pickled.foreach { (clz, binary) => val parts = clz.fullName.mangledString.split('.') val outPath = outputPath(parts.toList, dir) - val outTastyFile = new PlainFile(new File(outPath)) - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) + val outTastyFile = new File(outPath) + val outstream = new DataOutputStream(new PlainFile(outTastyFile).bufferedOutput) try outstream.write(binary()) catch case ex: ClosedByInterruptException => try diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala index 10df2a437af6..39293b947326 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentPickler.scala @@ -29,7 +29,7 @@ object CommentPickler: def traverse(x: Any): Unit = x match case x: untpd.Tree @unchecked => x match - case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d. + case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d. for comment <- docString(x) do pickleComment(addrOfTree(x), comment) case _ => val limit = x.productArity diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index 86076517021a..3d8080e72a29 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -33,6 +33,7 @@ object PositionPickler: pickler: TastyPickler, addrOfTree: TreeToAddr, treeAnnots: untpd.MemberDef => List[tpd.Tree], + typeAnnots: List[tpd.Tree], relativePathReference: String, source: SourceFile, roots: List[Tree], @@ -136,6 +137,9 @@ object PositionPickler: } for (root <- roots) traverse(root, NoSource) + + for annotTree <- typeAnnots do + traverse(annotTree, NoSource) end picklePositions end PositionPickler diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 8d1eca8fb5f0..7fd6444746ce 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -41,6 +41,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { */ private val annotTrees = util.EqHashMap[untpd.MemberDef, mutable.ListBuffer[Tree]]() + /** A set of annotation trees appearing in annotated types. + */ + private val annotatedTypeTrees = mutable.ListBuffer[Tree]() + /** A map from member definitions to their doc comments, so that later * parallel comment pickling does not need to access symbols of trees (which * would involve accessing symbols of named types and possibly changing phases @@ -57,6 +61,8 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { val ts = annotTrees.lookup(tree) if ts == null then Nil else ts.toList + def typeAnnots: List[Tree] = annotatedTypeTrees.toList + def docString(tree: untpd.MemberDef): Option[Comment] = Option(docStrings.lookup(tree)) @@ -278,6 +284,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case tpe: AnnotatedType => writeByte(ANNOTATEDtype) withLength { pickleType(tpe.parent, richTypes); pickleTree(tpe.annot.tree) } + annotatedTypeTrees += tpe.annot.tree case tpe: AndType => writeByte(ANDtype) withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } @@ -466,7 +473,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { } case _ => if passesConditionForErroringBestEffortCode(tree.hasType) then - val sig = tree.tpe.signature + // #19951 The signature of a constructor of a Java annotation is irrelevant + val sig = + if name == nme.CONSTRUCTOR && tree.symbol.exists && tree.symbol.owner.is(JavaAnnotation) then Signature.NotAMethod + else tree.tpe.signature var ename = tree.symbol.targetName val selectFromQualifier = name.isTypeName @@ -507,7 +517,14 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { writeByte(APPLY) withLength { pickleTree(fun) - args.foreach(pickleTree) + // #19951 Do not pickle default arguments to Java annotation constructors + if fun.symbol.isClassConstructor && fun.symbol.owner.is(JavaAnnotation) then + for arg <- args do + arg match + case NamedArg(_, Ident(nme.WILDCARD)) => () + case _ => pickleTree(arg) + else + args.foreach(pickleTree) } } case TypeApply(fun, args) => @@ -766,8 +783,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleType(tree.tpe) bindings.foreach(pickleTree) } - case SplicePattern(pat, args) => - val targs = Nil // SplicePattern `targs` will be added with #18271 + case SplicePattern(pat, targs, args) => writeByte(SPLICEPATTERN) withLength { pickleTree(pat) @@ -797,10 +813,10 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { report.error(ex.toMessage, tree.srcPos.focus) pickleErrorType() case ex: AssertionError => - println(i"error when pickling tree $tree") + println(i"error when pickling tree $tree of class ${tree.getClass}") throw ex case ex: MatchError => - println(i"error when pickling tree $tree") + println(i"error when pickling tree $tree of class ${tree.getClass}") throw ex } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 91a5899146cc..de99ce0105ea 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -999,6 +999,7 @@ class TreeUnpickler(reader: TastyReader, } } + tree.ensureHasSym(sym) tree.setDefTree } @@ -1272,7 +1273,7 @@ class TreeUnpickler(reader: TastyReader, val tpe0 = name match case name: TypeName => TypeRef(qualType, name, denot) case name: TermName => TermRef(qualType, name, denot) - val tpe = TypeOps.makePackageObjPrefixExplicit(tpe0) + val tpe = tpe0.makePackageObjPrefixExplicit ConstFold.Select(untpd.Select(qual, name).withType(tpe)) def completeSelect(name: Name, sig: Signature, target: Name): Select = @@ -1281,7 +1282,14 @@ class TreeUnpickler(reader: TastyReader, if unpicklingJava && name == tpnme.Object && qual.symbol == defn.JavaLangPackageVal then defn.FromJavaObjectSymbol.denot else - accessibleDenot(qual.tpe.widenIfUnstable, name, sig, target) + val qualType = qual.tpe.widenIfUnstable + if name == nme.CONSTRUCTOR && qualType.classSymbol.is(JavaAnnotation) then + // #19951 Disregard the signature (or the absence thereof) for constructors of Java annotations + // Note that Java annotations always have a single public constructor + // They may have a PrivateLocal constructor if compiled from source in mixed compilation + qualType.findMember(name, qualType, excluded = Private) + else + accessibleDenot(qualType, name, sig, target) makeSelect(qual, name, denot) def readQualId(): (untpd.Ident, TypeRef) = @@ -1335,7 +1343,16 @@ class TreeUnpickler(reader: TastyReader, readPathTree() } - /** Adapt constructor calls where class has only using clauses from old to new scheme. + /** Adapt constructor calls for Java annot constructors and for the new scheme of `using` clauses. + * + * #19951 If the `fn` is the constructor of a Java annotation, reorder and refill + * arguments against the constructor signature. Only reorder if all the arguments + * are `NamedArg`s, which is always the case if the TASTy was produced by 3.5+. + * If some arguments are positional, only *add* missing arguments to the right + * and hope for the best; this will at least fix #19951 after the fact if the new + * annotation fields are added after all the existing ones. + * + * Otherwise, adapt calls where class has only using clauses from old to new scheme. * or class has mixed using clauses and other clauses. * Old: leading (), new: nothing, or trailing () if all clauses are using clauses. * This is neccessary so that we can read pre-3.2 Tasty correctly. There, @@ -1343,7 +1360,9 @@ class TreeUnpickler(reader: TastyReader, * use the new scheme, since they are reconstituted with normalizeIfConstructor. */ def constructorApply(fn: Tree, args: List[Tree]): Tree = - if fn.tpe.widen.isContextualMethod && args.isEmpty then + if fn.symbol.owner.is(JavaAnnotation) then + tpd.Apply(fn, fixArgsToJavaAnnotConstructor(fn.tpe.widen, args)) + else if fn.tpe.widen.isContextualMethod && args.isEmpty then fn.withAttachment(SuppressedApplyToNone, ()) else val fn1 = fn match @@ -1365,6 +1384,68 @@ class TreeUnpickler(reader: TastyReader, res.withAttachment(SuppressedApplyToNone, ()) else res + def fixArgsToJavaAnnotConstructor(methType: Type, args: List[Tree]): List[Tree] = + methType match + case methType: MethodType => + val formalNames = methType.paramNames + val sizeCmp = args.sizeCompare(formalNames) + + def makeDefault(name: TermName, tpe: Type): NamedArg = + NamedArg(name, Underscore(tpe)) + + def extendOnly(args: List[NamedArg]): List[NamedArg] = + if sizeCmp < 0 then + val argsSize = args.size + val additionalArgs: List[NamedArg] = + formalNames.drop(argsSize).lazyZip(methType.paramInfos.drop(argsSize)).map(makeDefault(_, _)) + args ::: additionalArgs + else + args // fast path + + if formalNames.isEmpty then + // fast path + args + else if sizeCmp > 0 then + // Something's wrong anyway; don't touch anything + args + else if args.exists(!_.isInstanceOf[NamedArg]) then + // Pre 3.5 TASTy -- do our best, assuming that args match as a prefix of the formals + val prefixMatch = args.lazyZip(formalNames).forall { + case (NamedArg(actualName, _), formalName) => actualName == formalName + case _ => true + } + // If the prefix does not match, something's wrong; don't touch anything + if !prefixMatch then + args + else + // Turn non-named args to named and extend with defaults + extendOnly(args.lazyZip(formalNames).map { + case (arg: NamedArg, _) => arg + case (arg, formalName) => NamedArg(formalName, arg) + }) + else + // Good TASTy where all the arguments are named; reorder and extend if needed + val namedArgs = args.asInstanceOf[List[NamedArg]] + val prefixMatch = namedArgs.lazyZip(formalNames).forall((arg, formalName) => arg.name == formalName) + if prefixMatch then + // fast path, extend only + extendOnly(namedArgs) + else + // needs reordering, and possibly fill in holes for default arguments + val argsByName = mutable.AnyRefMap.from(namedArgs.map(arg => arg.name -> arg)) + val reconstructedArgs = formalNames.lazyZip(methType.paramInfos).map { (name, tpe) => + argsByName.remove(name).getOrElse(makeDefault(name, tpe)) + } + if argsByName.nonEmpty then + // something's wrong; don't touch anything + args + else + reconstructedArgs + + case _ => + args + end fixArgsToJavaAnnotConstructor + def quotedExpr(fn: Tree, args: List[Tree]): Tree = val TypeApply(_, targs) = fn: @unchecked untpd.Quote(args.head, Nil).withBodyType(targs.head.tpe) @@ -1491,8 +1572,12 @@ class TreeUnpickler(reader: TastyReader, NoDenotation val denot = - val d = ownerTpe.decl(name).atSignature(sig, target) - (if !d.exists then lookupInSuper else d).asSeenFrom(prefix) + if owner.is(JavaAnnotation) && name == nme.CONSTRUCTOR then + // #19951 Fix up to read TASTy produced before 3.5.0 -- ignore the signature + ownerTpe.nonPrivateDecl(name).asSeenFrom(prefix) + else + val d = ownerTpe.decl(name).atSignature(sig, target) + (if !d.exists then lookupInSuper else d).asSeenFrom(prefix) makeSelect(qual, name, denot) case REPEATED => @@ -1584,8 +1669,7 @@ class TreeUnpickler(reader: TastyReader, val pat = readTree() val patType = readType() val (targs, args) = until(end)(readTree()).span(_.isType) - assert(targs.isEmpty, "unexpected type arguments in SPLICEPATTERN") // `targs` will be needed for #18271. Until this fearure is added they should be empty. - SplicePattern(pat, args, patType) + SplicePattern(pat, targs, args, patType) case HOLE => readHole(end, isTerm = true) case _ => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 7c79e972c126..e06e6b3e1615 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -96,15 +96,6 @@ object Inliner: } end isElideableExpr - // InlineCopier is a more fault-tolerant copier that does not cause errors when - // function types in applications are undefined. This is necessary since we copy at - // the same time as establishing the proper context in which the copied tree should - // be evaluated. This matters for opaque types, see neg/i14653.scala. - private class InlineCopier() extends TypedTreeCopier: - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = - if fun.tpe.widen.exists then super.Apply(tree)(fun, args) - else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) - // InlinerMap is a TreeTypeMap with special treatment for inlined arguments: // They are generally left alone (not mapped further, and if they wrap a type // the type Inlined wrapper gets dropped @@ -116,7 +107,13 @@ object Inliner: substFrom: List[Symbol], substTo: List[Symbol])(using Context) extends TreeTypeMap( - typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, InlineCopier()): + typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, + // It is necessary to use the `ConservativeTreeCopier` since we copy at + // the same time as establishing the proper context in which the copied + // tree should be evaluated. This matters for opaque types, see + // neg/i14653.scala. + ConservativeTreeCopier() + ): override def copy( typeMap: Type => Type, @@ -957,6 +954,12 @@ class Inliner(val call: tpd.Tree)(using Context): case None => tree case _ => tree + + /** For inlining only: Given `(x: T)` with expected type `x.type`, replace the tree with `x`. + */ + override def healAdapt(tree: Tree, pt: Type)(using Context): Tree = (tree, pt) match + case (Typed(tree1, _), pt: SingletonType) if tree1.tpe <:< pt => tree1 + case _ => tree end InlineTyper /** Drop any side-effect-free bindings that are unused in expansion or other reachable bindings. @@ -1077,9 +1080,7 @@ class Inliner(val call: tpd.Tree)(using Context): hints.nn += i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}" if suspendable then if ctx.settings.YnoSuspendedUnits.value then - return ref(defn.Predef_undefined) - .withType(ErrorType(em"could not expand macro, suspended units are disabled by -Yno-suspended-units")) - .withSpan(splicePos.span) + return errorTree(ref(defn.Predef_undefined), em"could not expand macro, suspended units are disabled by -Yno-suspended-units", splicePos) else ctx.compilationUnit.suspend(hints.nn.toList.mkString(", ")) // this throws a SuspendException @@ -1104,6 +1105,8 @@ class Inliner(val call: tpd.Tree)(using Context): new TreeAccumulator[List[Symbol]] { override def apply(syms: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = tree match { + case Closure(env, meth, tpt) if meth.symbol.isAnonymousFunction => + this(syms, tpt :: env) case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) case _: This if level == -1 && tree.symbol.isDefinedInCurrentRun => diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index fffe87c3f57a..aeecd9c376e3 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -342,10 +342,13 @@ object Inlines: if Inlines.isInlineable(codeArg1.symbol) then stripTyped(Inlines.inlineCall(codeArg1)) else codeArg1 + // We should not be rewriting tested strings + val noRewriteSettings = ctx.settings.rewrite.updateIn(ctx.settingsState.reinitializedCopy(), None) + ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match { case ConstantType(Constant(code: String)) => val source2 = SourceFile.virtual("tasty-reflect", code) - inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { + inContext(ctx.fresh.setSettings(noRewriteSettings).setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { val tree2 = new Parser(source2).block() if ctx.reporter.allErrors.nonEmpty then ctx.reporter.allErrors.map((ErrorKind.Parser, _)) @@ -429,6 +432,7 @@ object Inlines: val constVal = tryConstValue(tpe) if constVal.isEmpty then val msg = NotConstant("cannot take constValue", tpe) + report.error(msg, callTypeArgs.head.srcPos) ref(defn.Predef_undefined).withSpan(callTypeArgs.head.span).withType(ErrorType(msg)) else constVal diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 1acc6a1c8317..bb950fbe43cd 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -91,7 +91,7 @@ object PrepareInlineable { postTransform(super.transform(preTransform(tree))) protected def checkUnstableAccessor(accessedTree: Tree, accessor: Symbol)(using Context): Unit = - if ctx.settings.WunstableInlineAccessors.value then + if ctx.settings.Whas.unstableInlineAccessors then val accessorTree = accessorDef(accessor, accessedTree.symbol) report.warning(reporting.UnstableInlineAccessor(accessedTree.symbol, accessorTree), accessedTree) } diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 7882d635f84a..7a0a19552f48 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -32,6 +32,8 @@ import dotty.tools.dotc.core.Names import dotty.tools.dotc.core.Types import dotty.tools.dotc.core.Symbols import dotty.tools.dotc.core.Constants +import dotty.tools.dotc.core.TypeOps +import dotty.tools.dotc.core.StdNames /** * One of the results of a completion query. @@ -119,16 +121,17 @@ object Completion: case _ => "" + def naiveCompletionPrefix(text: String, offset: Int): String = + var i = offset - 1 + while i >= 0 && text(i).isUnicodeIdentifierPart do i -= 1 + i += 1 // move to first character + text.slice(i, offset) + /** * Inspect `path` to determine the completion prefix. Only symbols whose name start with the * returned prefix should be considered. */ def completionPrefix(path: List[untpd.Tree], pos: SourcePosition)(using Context): String = - def fallback: Int = - var i = pos.point - 1 - while i >= 0 && Character.isUnicodeIdentifierPart(pos.source.content()(i)) do i -= 1 - i + 1 - path match case GenericImportSelector(sel) => if sel.isGiven then completionPrefix(sel.bound :: Nil, pos) @@ -146,7 +149,7 @@ object Completion: case (tree: untpd.RefTree) :: _ if tree.name != nme.ERROR => tree.name.toString.take(pos.span.point - tree.span.point) - case _ => pos.source.content.slice(fallback, pos.point).mkString + case _ => naiveCompletionPrefix(pos.source.content().mkString, pos.point) end completionPrefix @@ -200,7 +203,8 @@ object Completion: private def computeCompletions( pos: SourcePosition, - mode: Mode, rawPrefix: String, + mode: Mode, + rawPrefix: String, adjustedPath: List[tpd.Tree], untpdPath: List[untpd.Tree], matches: Option[Name => Boolean] @@ -283,7 +287,6 @@ object Completion: if denot.isType then denot.symbol.showFullName else denot.info.widenTermRefExpr.show - def isInNewContext(untpdPath: List[untpd.Tree]): Boolean = untpdPath match case _ :: untpd.New(selectOrIdent: (untpd.Select | untpd.Ident)) :: _ => true @@ -442,9 +445,17 @@ object Completion: def selectionCompletions(qual: tpd.Tree)(using Context): CompletionMap = val adjustedQual = widenQualifier(qual) - implicitConversionMemberCompletions(adjustedQual) ++ - extensionCompletions(adjustedQual) ++ - directMemberCompletions(adjustedQual) + val implicitConversionMembers = implicitConversionMemberCompletions(adjustedQual) + val extensionMembers = extensionCompletions(adjustedQual) + val directMembers = directMemberCompletions(adjustedQual) + val namedTupleMembers = namedTupleCompletions(adjustedQual) + + List( + implicitConversionMembers, + extensionMembers, + directMembers, + namedTupleMembers + ).reduce(_ ++ _) /** Completions for members of `qual`'s type. * These include inherited definitions but not members added by extensions or implicit conversions @@ -516,6 +527,30 @@ object Completion: .toSeq .groupByName + /** Completions for named tuples */ + private def namedTupleCompletions(qual: tpd.Tree)(using Context): CompletionMap = + def namedTupleCompletionsFromType(tpe: Type): CompletionMap = + val freshCtx = ctx.fresh.setExploreTyperState() + inContext(freshCtx): + tpe.namedTupleElementTypes + .map { (name, tpe) => + val symbol = newSymbol(owner = NoSymbol, name, EmptyFlags, tpe) + val denot = SymDenotation(symbol, NoSymbol, name, EmptyFlags, tpe) + name -> denot + } + .toSeq + .filter((name, denot) => include(denot, name)) + .groupByName + + val qualTpe = qual.typeOpt + if qualTpe.isNamedTupleType then + namedTupleCompletionsFromType(qualTpe) + else if qualTpe.derivesFrom(defn.SelectableClass) then + val pre = if !TypeOps.isLegalPrefix(qualTpe) then Types.SkolemType(qualTpe) else qualTpe + val fieldsType = pre.select(StdNames.tpnme.Fields).dealias.simplified + namedTupleCompletionsFromType(fieldsType) + else Map.empty + /** Completions from extension methods */ private def extensionCompletions(qual: tpd.Tree)(using Context): CompletionMap = def asDefLikeType(tpe: Type): Type = tpe match diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 79282b0e5223..fe797c66d104 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -298,6 +298,7 @@ object JavaParsers { } while (in.token == DOT) { in.nextToken() + annotations() t = typeArgs(atSpan(t.span.start, in.offset)(typeSelect(t, ident()))) } convertToTypeId(t) @@ -369,8 +370,8 @@ object JavaParsers { def annotation(): Option[Tree] = { def classOrId(): Tree = val id = qualId() - if in.lookaheadToken == CLASS then - in.nextToken() + if in.token == DOT && in.lookaheadToken == CLASS then + accept(DOT) accept(CLASS) TypeApply( Select( diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index e28ba5fd669e..220053e277a5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -54,9 +54,9 @@ object Parsers { enum ParamOwner: case Class // class or trait or enum case CaseClass // case class or enum case - case Type // type alias or abstract type - case TypeParam // type parameter case Def // method + case Type // type alias or abstract type or polyfunction type/expr + case Hk // type parameter (i.e. current parameter is higher-kinded) case Given // given definition case ExtensionPrefix // extension clause, up to and including extension parameter case ExtensionFollow // extension clause, following extension parameter @@ -66,7 +66,11 @@ object Parsers { def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = - this == Class || this == CaseClass || this == Type + this == Class || this == CaseClass || this == Hk + def acceptsCtxBounds = + !(this == Type || this == Hk) + def acceptsWildcard = + this == Type || this == Hk end ParamOwner @@ -402,7 +406,7 @@ object Parsers { false } - def errorTermTree(start: Offset): Tree = atSpan(start, in.offset, in.offset) { unimplementedExpr } + def errorTermTree(start: Offset): Tree = atSpan(Span(start, in.offset)) { unimplementedExpr } private var inFunReturnType = false private def fromWithinReturnType[T](body: => T): T = { @@ -514,6 +518,22 @@ object Parsers { tree } + def makePolyFunction(tparams: List[Tree], body: Tree, + kind: String, errorTree: => Tree, + start: Offset, arrowOffset: Offset): Tree = + atSpan(start, arrowOffset): + getFunction(body) match + case None => + syntaxError(em"Implementation restriction: polymorphic function ${kind}s must have a value parameter", arrowOffset) + errorTree + case Some(Function(_, _: CapturesAndResult)) => + // A function tree like this will be desugared + // into a capturing type in the typer. + syntaxError(em"Implementation restriction: polymorphic function types cannot wrap function types that have capture sets", arrowOffset) + errorTree + case Some(f) => + PolyFunction(tparams, body) + /* --------------- PLACEHOLDERS ------------------------------------------- */ /** The implicit parameters introduced by `_` in the current expression. @@ -972,18 +992,16 @@ object Parsers { followedByToken(LARROW) // `<-` comes before possible statement starts } - /** Are the next token the "GivenSig" part of a given definition, - * i.e. an identifier followed by type and value parameters, followed by `:`? + /** Are the next tokens a valid continuation of a named given def? + * i.e. an identifier, possibly followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsOldStyleGivenSig() = + def followingIsGivenDefWithColon() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() - var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then - paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -992,16 +1010,33 @@ object Parsers { skipParams() lookahead.isColon && { - !in.featureEnabled(Feature.modularity) - || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + !sourceVersion.isAtLeast(`3.6`) + || { // in the new given syntax, a `:` at EOL after an identifier represents a single identifier given // Example: // given C: // def f = ... lookahead.nextToken() !lookahead.isAfterLineEnd + } || { + // Support for for pre-3.6 syntax where type is put on the next line + // Examples: + // given namedGiven: + // X[T] with {} + // given otherGiven: + // X[T] = new X[T]{} + lookahead.isIdent && { + lookahead.nextToken() + skipParams() + lookahead.token == WITH || lookahead.token == EQUALS + } } } + def followingIsArrow() = + val lookahead = in.LookaheadScanner() + lookahead.skipParens() + lookahead.token == ARROW + def followingIsExtension() = val next = in.lookahead.token next == LBRACKET || next == LPAREN @@ -1097,9 +1132,9 @@ object Parsers { if (prec < opPrec || leftAssoc && prec == opPrec) { opStack = opStack.tail recur { - atSpan(opInfo.operator.span union opInfo.operand.span union top.span) { - InfixOp(opInfo.operand, opInfo.operator, top) - } + migrateInfixOp(opInfo, isType): + atSpan(opInfo.operator.span union opInfo.operand.span union top.span): + InfixOp(opInfo.operand, opInfo.operator, top) } } else top @@ -1107,6 +1142,22 @@ object Parsers { recur(top) } + private def migrateInfixOp(opInfo: OpInfo, isType: Boolean)(infixOp: InfixOp): Tree = { + def isNamedTupleOperator = opInfo.operator.name match + case nme.EQ | nme.NE | nme.eq | nme.ne | nme.`++` | nme.zip => true + case _ => false + if isType then infixOp + else infixOp.right match + case Tuple(args) if args.exists(_.isInstanceOf[NamedArg]) && !isNamedTupleOperator => + report.errorOrMigrationWarning(DeprecatedInfixNamedArgumentSyntax(), infixOp.right.srcPos, MigrationVersion.AmbiguousNamedTupleSyntax) + if MigrationVersion.AmbiguousNamedTupleSyntax.needsPatch then + val asApply = cpy.Apply(infixOp)(Select(opInfo.operand, opInfo.operator.name), args) + patch(source, infixOp.span, asApply.show(using ctx.withoutColors)) + asApply // allow to use pre-3.6 syntax in migration mode + else infixOp + case _ => infixOp + } + /** True if we are seeing a lambda argument after a colon of the form: * : (params) => * body @@ -1527,31 +1578,32 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(intoOK: IntoOK = IntoOK.No): Tree = rejectWildcardType(typ(intoOK)) + def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = + rejectWildcardType(typ(intoOK, inContextBound)) private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) - case Function(_, _: CapturesAndResult) => - // A function tree like this will be desugared - // into a capturing type in the typer, - // so None is returned. - None case t: Function => Some(t) case _ => None } - /** CaptureRef ::= ident | `this` | `cap` [`[` ident `]`] + /** CaptureRef ::= { SimpleRef `.` } SimpleRef [`*`] + * | [ { SimpleRef `.` } SimpleRef `.` ] id `^` */ def captureRef(): Tree = - if in.token == THIS then simpleRef() - else - val id = termIdent() - if isIdent(nme.raw.STAR) then - in.nextToken() - atSpan(startOffset(id)): - PostfixOp(id, Ident(nme.CC_REACH)) - else id + val ref = dotSelectors(simpleRef()) + if isIdent(nme.raw.STAR) then + in.nextToken() + atSpan(startOffset(ref)): + PostfixOp(ref, Ident(nme.CC_REACH)) + else if isIdent(nme.UPARROW) then + in.nextToken() + atSpan(startOffset(ref)): + convertToTypeId(ref) match + case ref: RefTree => makeCapsOf(ref) + case ref => ref + else ref /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ @@ -1565,15 +1617,15 @@ object Parsers { else core() /** Type ::= FunType - * | HkTypeParamClause ‘=>>’ Type + * | TypTypeParamClause ‘=>>’ Type * | FunParamClause ‘=>>’ Type * | MatchType * | InfixType * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions - * PolyFunType ::= HKTypeParamClause '=>' Type - * | HKTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions + * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions + * PolyFunType ::= TypTypeParamClause '=>' Type + * | TypTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions * FunTypeArgs ::= InfixType * | `(' [ FunArgType {`,' FunArgType } ] `)' * | '(' [ TypedFunParam {',' TypedFunParam } ')' @@ -1583,7 +1635,7 @@ object Parsers { * IntoTargetType ::= Type * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType */ - def typ(intoOK: IntoOK = IntoOK.No): Tree = + def typ(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = val start = in.offset var imods = Modifiers() val erasedArgs: ListBuffer[Boolean] = ListBuffer() @@ -1725,34 +1777,28 @@ object Parsers { case arg => arg val args1 = args.mapConserve(sanitize) - + if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else val tuple = atSpan(start): makeTupleOrParens(args.mapConserve(convertToElem)) typeRest: - infixTypeRest: + infixTypeRest(inContextBound): refinedTypeRest: withTypeRest: annotTypeRest: simpleTypeRest(tuple) else if in.token == LBRACKET then val start = in.offset - val tparams = typeParamClause(ParamOwner.TypeParam) + val tparams = typeParamClause(ParamOwner.Type) if in.token == TLARROW then atSpan(start, in.skipToken()): LambdaTypeTree(tparams, toplevelTyp()) else if in.token == ARROW || isPureArrow(nme.PUREARROW) then val arrowOffset = in.skipToken() val body = toplevelTyp(nestedIntoOK(in.token)) - atSpan(start, arrowOffset): - getFunction(body) match - case Some(f) => - PolyFunction(tparams, body) - case None => - syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) - Ident(nme.ERROR.toTypeName) + makePolyFunction(tparams, body, "type", Ident(nme.ERROR.toTypeName), start, arrowOffset) else accept(TLARROW) typ() @@ -1761,7 +1807,7 @@ object Parsers { else if isIntoPrefix then PrefixOp(typeIdent(), typ(IntoOK.Nested)) else - typeRest(infixType()) + typeRest(infixType(inContextBound)) end typ private def makeKindProjectorTypeDef(name: TypeName): TypeDef = { @@ -1816,13 +1862,13 @@ object Parsers { /** InfixType ::= RefinedType {id [nl] RefinedType} * | RefinedType `^` // under capture checking */ - def infixType(): Tree = infixTypeRest(refinedType()) + def infixType(inContextBound: Boolean = false): Tree = infixTypeRest(inContextBound)(refinedType()) - def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + def infixTypeRest(inContextBound: Boolean = false)(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, isOperator = !followingIsVararg() && !isPureArrow - && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) + && !(isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) && inContextBound) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -1962,13 +2008,13 @@ object Parsers { syntaxError(em"$msg\n\nHint: $hint", Span(start, in.lastOffset)) Ident(nme.ERROR.toTypeName) else if inPattern then - SplicePattern(expr, Nil) + SplicePattern(expr, Nil, Nil) else Splice(expr) } /** SimpleType ::= SimpleLiteral - * | ‘?’ SubtypeBounds + * | ‘?’ TypeBounds * | SimpleType1 * | SimpleType ‘(’ Singletons ‘)’ -- under language.experimental.dependent, checked in Typer * Singletons ::= Singleton {‘,’ Singleton} @@ -1977,7 +2023,7 @@ object Parsers { if isSimpleLiteral then SingletonTypeTree(simpleLiteral()) else if in.token == USCORE then - if ctx.settings.XkindProjector.value == "underscores" then + if ctx.settings.XkindProjector.value == "underscores" && !inMatchPattern then val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else @@ -2188,9 +2234,15 @@ object Parsers { inBraces(refineStatSeq()) /** TypeBounds ::= [`>:' Type] [`<:' Type] + * | `^` -- under captureChecking */ def typeBounds(): TypeBoundsTree = - atSpan(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) } + atSpan(in.offset): + if in.isIdent(nme.UPARROW) && Feature.ccEnabled then + in.nextToken() + TypeBoundsTree(EmptyTree, makeCapsBound()) + else + TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) private def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); toplevelTyp() } @@ -2207,22 +2259,32 @@ object Parsers { /** ContextBound ::= Type [`as` id] */ def contextBound(pname: TypeName): Tree = - val t = toplevelTyp() + val t = toplevelTyp(inContextBound = true) val ownName = - if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + if isIdent(nme.as) && sourceVersion.isAtLeast(`3.6`) then in.nextToken() ident() else EmptyTermName ContextBoundTypeTree(t, pname, ownName) - /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + /** ContextBounds ::= ContextBound [`:` ContextBounds] + * | `{` ContextBound {`,` ContextBound} `}` */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then in.nextToken() - if in.token == LBRACE && in.featureEnabled(Feature.modularity) + if in.token == LBRACE && sourceVersion.isAtLeast(`3.6`) then inBraces(commaSeparated(() => contextBound(pname))) - else contextBound(pname) :: contextBounds(pname) + else + val bound = contextBound(pname) + val rest = + if in.isColon then + report.errorOrMigrationWarning( + em"Multiple context bounds should be enclosed in `{ ... }`", + in.sourcePos(), MigrationVersion.GivenSyntax) + contextBounds(pname) + else Nil + bound :: rest else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", @@ -2286,7 +2348,7 @@ object Parsers { t /** Expr ::= [`implicit'] FunParams (‘=>’ | ‘?=>’) Expr - * | HkTypeParamClause ‘=>’ Expr + * | TypTypeParamClause ‘=>’ Expr * | Expr1 * FunParams ::= Bindings * | id @@ -2294,7 +2356,7 @@ object Parsers { * ExprInParens ::= PostfixExpr `:' Type * | Expr * BlockResult ::= [‘implicit’] FunParams (‘=>’ | ‘?=>’) Block - * | HkTypeParamClause ‘=>’ Block + * | TypTypeParamClause ‘=>’ Block * | Expr1 * Expr1 ::= [‘inline’] `if' `(' Expr `)' {nl} Expr [[semi] else Expr] * | [‘inline’] `if' Expr `then' Expr [[semi] else Expr] @@ -2330,17 +2392,10 @@ object Parsers { closure(start, location, modifiers(BitSet(IMPLICIT))) case LBRACKET => val start = in.offset - val tparams = typeParamClause(ParamOwner.TypeParam) + val tparams = typeParamClause(ParamOwner.Type) val arrowOffset = accept(ARROW) val body = expr(location) - atSpan(start, arrowOffset) { - getFunction(body) match - case Some(f) => - PolyFunction(tparams, f) - case None => - syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) - errorTermTree(arrowOffset) - } + makePolyFunction(tparams, body, "literal", errorTermTree(arrowOffset), start, arrowOffset) case _ => val saved = placeholderParams placeholderParams = Nil @@ -2424,7 +2479,7 @@ object Parsers { in.nextToken(); val expr = subExpr() if expr.span.exists then expr - else unitLiteral // finally without an expression + else syntheticUnitLiteral // finally without an expression } else { if handler.isEmpty then @@ -2663,7 +2718,7 @@ object Parsers { * ColonArgument ::= colon [LambdaStart] * indent (CaseClauses | Block) outdent * LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - * | HkTypeParamClause ‘=>’ + * | TypTypeParamClause ‘=>’ * ColonArgBody ::= indent (CaseClauses | Block) outdent * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ @@ -2881,7 +2936,11 @@ object Parsers { /** Enumerators ::= Generator {semi Enumerator | Guard} */ - def enumerators(): List[Tree] = generator() :: enumeratorsRest() + def enumerators(): List[Tree] = + if in.featureEnabled(Feature.betterFors) then + aliasesUntilGenerator() ++ enumeratorsRest() + else + generator() :: enumeratorsRest() def enumeratorsRest(): List[Tree] = if (isStatSep) { @@ -2923,6 +2982,18 @@ object Parsers { GenFrom(pat, subExpr(), checkMode) } + def aliasesUntilGenerator(): List[Tree] = + if in.token == CASE then generator() :: Nil + else { + val pat = pattern1() + if in.token == EQUALS then + atSpan(startOffset(pat), in.skipToken()) { GenAlias(pat, subExpr()) } :: { + if (isStatSep) in.nextToken() + aliasesUntilGenerator() + } + else generatorRest(pat, casePat = false) :: Nil + } + /** ForExpr ::= ‘for’ ‘(’ Enumerators ‘)’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ ‘{’ Enumerators ‘}’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr @@ -3165,7 +3236,7 @@ object Parsers { else { val start = in.lastOffset syntaxErrorOrIncomplete(IllegalStartOfSimplePattern(), expectedOffset) - errorTermTree(start) + atSpan(Span(start, in.offset)) { Ident(nme.WILDCARD) } } } @@ -3380,18 +3451,19 @@ object Parsers { /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] - * id [HkTypeParamClause] TypeParamBounds + * id [HkTypeParamClause] TypeAndCtxBounds * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ * DefTypeParam ::= {Annotation} - * [`sealed`] -- under captureChecking - * id [HkTypeParamClause] TypeParamBounds + * id [HkTypeParamClause] TypeAndCtxBounds * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ - * TypTypeParam ::= {Annotation} id [HkTypePamClause] TypeBounds + * TypTypeParam ::= {Annotation} + * (id | ‘_’) [HkTypeParamClause] TypeAndCtxBounds * * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ - * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypePamClause] | ‘_’) TypeBounds + * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] + * (id | ‘_’) [HkTypePamClause] TypeBounds */ def typeParamClause(paramOwner: ParamOwner): List[TypeDef] = inBracketsWithCommas { @@ -3402,7 +3474,6 @@ object Parsers { ok def typeParam(): TypeDef = { - val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param if paramOwner.isClass then @@ -3413,13 +3484,15 @@ object Parsers { mods |= Contravariant atSpan(start, nameStart) { val name = - if (isAbstractOwner && in.token == USCORE) { + if paramOwner.acceptsWildcard && in.token == USCORE then in.nextToken() WildcardParamName.fresh().toTypeName - } else ident().toTypeName - val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) + val hkparams = typeParamClauseOpt(ParamOwner.Hk) + val bounds = + if paramOwner.acceptsCtxBounds then typeAndCtxBounds(name) + else if sourceVersion.isAtLeast(`3.6`) && paramOwner == ParamOwner.Type then typeAndCtxBounds(name) + else typeBounds() TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } @@ -3432,7 +3505,11 @@ object Parsers { /** ContextTypes ::= FunArgType {‘,’ FunArgType} */ def contextTypes(paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = - val tps = commaSeparated(() => paramTypeOf(() => toplevelTyp())) + typesToParams( + commaSeparated(() => paramTypeOf(() => toplevelTyp())), + paramOwner, numLeadParams, impliedMods) + + def typesToParams(tps: List[Tree], paramOwner: ParamOwner, numLeadParams: Int, impliedMods: Modifiers): List[ValDef] = var counter = numLeadParams def nextIdx = { counter += 1; counter } val paramFlags = if paramOwner.isClass then LocalParamAccessor else Param @@ -3459,18 +3536,20 @@ object Parsers { def termParamClause( paramOwner: ParamOwner, numLeadParams: Int, // number of parameters preceding this clause - firstClause: Boolean = false // clause is the first in regular list of clauses + firstClause: Boolean = false, // clause is the first in regular list of clauses + initialMods: Modifiers = EmptyModifiers ): List[ValDef] = { - var impliedMods: Modifiers = EmptyModifiers + var impliedMods: Modifiers = initialMods def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) def paramMods() = if in.token == IMPLICIT then addParamMod(() => Mod.Implicit()) - else - if isIdent(nme.using) then - addParamMod(() => Mod.Given()) + else if isIdent(nme.using) then + if initialMods.is(Given) then + syntaxError(em"`using` is already implied here, should not be given explicitly", in.offset) + addParamMod(() => Mod.Given()) def param(): ValDef = { val start = in.offset @@ -3836,9 +3915,6 @@ object Parsers { /** DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr - * DefSig ::= id [DefTypeParamClause] DefTermParamClauses - * - * if clauseInterleaving is enabled: * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -3878,13 +3954,11 @@ object Parsers { val ident = termIdent() var name = ident.name.asTermName val paramss = - if in.featureEnabled(Feature.clauseInterleaving) then - // If you are making interleaving stable manually, please refer to the PR introducing it instead, section "How to make non-experimental" + if Feature.clauseInterleavingEnabled(using in.languageImportContext) then typeOrTermParamClauses(ParamOwner.Def, numLeadParams) else val tparams = typeParamClauseOpt(ParamOwner.Def) val vparamss = termParamClauses(ParamOwner.Def, numLeadParams) - joinParams(tparams, vparamss) var tpt = fromWithinReturnType { typedOpt() } @@ -3921,10 +3995,10 @@ object Parsers { val stats = selfInvocation() :: ( if (isStatSep) { in.nextToken(); blockStatSeq() } else Nil) - Block(stats, unitLiteral) + Block(stats, syntheticUnitLiteral) } } - else Block(selfInvocation() :: Nil, unitLiteral) + else Block(selfInvocation() :: Nil, syntheticUnitLiteral) /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs} */ @@ -3934,14 +4008,14 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] + /** TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() val tname = nameIdent.name.asTypeName - val tparams = typeParamClauseOpt(ParamOwner.Type) + val tparams = typeParamClauseOpt(ParamOwner.Hk) val vparamss = funParamClauses() def makeTypeDef(rhs: Tree): Tree = { @@ -3980,7 +4054,7 @@ object Parsers { case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => makeTypeDef(typeAndCtxBounds(tname)) case _ if (staged & StageKind.QuotedPattern) != 0 - || in.featureEnabled(Feature.modularity) && in.isColon => + || sourceVersion.isAtLeast(`3.6`) && in.isColon => makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) @@ -4083,7 +4157,7 @@ object Parsers { if (in.token == COMMA) { in.nextToken() val ids = commaSeparated(() => termIdent()) - if ctx.settings.WenumCommentDiscard.value then + if ctx.settings.Whas.enumCommentDiscard then in.getDocComment(start).foreach: comm => warning( em"""Ambiguous Scaladoc comment on multiple cases is ignored. @@ -4140,18 +4214,67 @@ object Parsers { * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] * - * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig - * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} - * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - * | ConstrApps ['as' id] TemplateBody - * + * NewGivenDef ::= [id ':'] GivenSig + * GivenSig ::= GivenImpl + * | '(' ')' '=>' GivenImpl + * | GivenConditional '=>' GivenSig + * GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + * | ConstrApps TemplateBody + * GivenConditional ::= DefTypeParamClause + * | DefTermParamClause + * | '(' FunArgTypes ')' + * | GivenType * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName - var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + var newSyntaxAllowed = sourceVersion.isAtLeast(`3.6`) + val hasEmbeddedColon = !in.isColon && followingIsGivenDefWithColon() + val name = if isIdent && hasEmbeddedColon then ident() else EmptyTermName + + def implemented(): List[Tree] = + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent && newSyntaxAllowed => + infixTypeRest()(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + + // The term parameters and parent references */ + def newTermParamssAndParents(numLeadParams: Int): (List[List[ValDef]], List[Tree]) = + if in.token == LPAREN && followingIsArrow() then + val params = + if in.lookahead.token == RPAREN && numLeadParams == 0 then + in.nextToken() + in.nextToken() + Nil + else + termParamClause( + ParamOwner.Given, numLeadParams, firstClause = true, initialMods = Modifiers(Given)) + accept(ARROW) + if params.isEmpty then (params :: Nil, implemented()) + else + val (paramss, parents) = newTermParamssAndParents(numLeadParams + params.length) + (params :: paramss, parents) + else + val parents = implemented() + if in.token == ARROW && parents.length == 1 && parents.head.isType then + in.nextToken() + val (paramss, parents1) = newTermParamssAndParents(numLeadParams + parents.length) + (typesToParams(parents, ParamOwner.Given, numLeadParams, Modifiers(Given)) :: paramss, parents1) + else + (Nil, parents) + + /** Type parameters, term parameters and parent clauses */ + def newSignature(): (List[TypeDef], (List[List[ValDef]], List[Tree])) = + val tparams = + if in.token == LBRACKET then + try typeParamClause(ParamOwner.Given) + finally accept(ARROW) + else Nil + (tparams, newTermParamssAndParents(numLeadParams = 0)) def moreConstrApps() = if newSyntaxAllowed && in.token == COMMA then @@ -4172,47 +4295,52 @@ object Parsers { .asInstanceOf[List[ParamClause]] val gdef = - val tparams = typeParamClauseOpt(ParamOwner.Given) - newLineOpt() - val vparamss = - if in.token == LPAREN && (in.lookahead.isIdent(nme.using) || name != EmptyTermName) - then termParamClauses(ParamOwner.Given) - else Nil - newLinesOpt() - val noParams = tparams.isEmpty && vparamss.isEmpty - val hasParamsOrId = !name.isEmpty || !noParams - if hasParamsOrId then - if in.isColon then - newSyntaxAllowed = false + val (tparams, (vparamss0, parents)) = + if in.isColon && !name.isEmpty then in.nextToken() - else if newSyntaxAllowed then accept(ARROW) - else acceptColon() - val parents = - if isSimpleLiteral then - rejectWildcardType(annotType()) :: Nil - else constrApp() match - case parent: Apply => parent :: moreConstrApps() - case parent if in.isIdent && newSyntaxAllowed => - infixTypeRest(parent, _ => annotType1()) :: Nil - case parent => parent :: moreConstrApps() - if newSyntaxAllowed && in.isIdent(nme.as) then - in.nextToken() - name = ident() - + newSignature() + else if hasEmbeddedColon then + report.errorOrMigrationWarning( + em"This old given syntax is no longer supported; use `=>` instead of `:`", + in.sourcePos(), MigrationVersion.GivenSyntax) + newSyntaxAllowed = false + val tparamsOld = typeParamClauseOpt(ParamOwner.Given) + newLineOpt() + val vparamssOld = + if in.token == LPAREN && (in.lookahead.isIdent(nme.using) || name != EmptyTermName) + then termParamClauses(ParamOwner.Given) + else Nil + acceptColon() + (tparamsOld, (vparamssOld, implemented())) + else + newSignature() + val hasParams = tparams.nonEmpty || vparamss0.nonEmpty + val vparamss = vparamss0 match + case Nil :: Nil => Nil + case _ => vparamss0 val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then // given alias accept(EQUALS) mods1 |= Final - if noParams && !mods.is(Inline) then + if !hasParams && !mods.is(Inline) then mods1 |= Lazy ValDef(name, parents.head, subExpr()) else DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + else if (isStatSep || isStatSeqEnd) && parentsIsType + && !(name.isEmpty && newSyntaxAllowed) + // under new syntax, anonymous givens are translated to concrete classes, + // so it's treated as a structural instance. + then // old-style abstract given if name.isEmpty then - syntaxError(em"anonymous given cannot be abstract") + syntaxError(em"Anonymous given cannot be abstract, or maybe you want to define a concrete given and are missing a `()` argument?", in.lastOffset) + if newSyntaxAllowed then + report.errorOrMigrationWarning( + em"""This defines an abstract given, which is no longer supported. Use a `deferred` given instead. + |Or, if you intend to define a concrete given, follow the type with `()` arguments.""", + in.sourcePos(in.lastOffset), MigrationVersion.GivenSyntax) DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else // structural instance @@ -4224,12 +4352,16 @@ object Parsers { val templ = if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else if !newSyntaxAllowed || in.token == WITH then + else if !newSyntaxAllowed + || in.token == WITH && tparams.isEmpty && vparamss.isEmpty + // if new syntax is still allowed and there are parameters, they mist be new style conditions, + // so old with-style syntax would not be allowed. + then withTemplate(constr, parents) else possibleTemplateStart() templateBodyOpt(constr, parents, Nil) - if noParams && !mods.is(Inline) then ModuleDef(name, templ) + if !hasParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef finalizeDef(gdef, mods1, start) @@ -4398,6 +4530,9 @@ object Parsers { /** with Template, with EOL interpreted */ def withTemplate(constr: DefDef, parents: List[Tree]): Template = + report.errorOrMigrationWarning( + em"Given member definitions starting with `with` are no longer supported; use `{...}` or `:` followed by newline instead", + in.sourcePos(), MigrationVersion.GivenSyntax) accept(WITH) val (self, stats) = templateBody(parents, rewriteWithColon = false) Template(constr, parents, Nil, self, stats) @@ -4560,6 +4695,12 @@ object Parsers { for (imod <- implicitMods.mods) mods = addMod(mods, imod) if (mods.is(Final)) // A final modifier means the local definition is "class-like". // FIXME: Deal with modifiers separately + + // See test 17579. We allow `final` on `given` because these can be + // translated to class definitions, for which `final` is allowed but + // redundant--there is a seperate warning for this. + if isDclIntro && in.token != GIVEN then syntaxError(FinalLocalDef()) + tmplDef(start, mods) else defOrDcl(start, mods) @@ -4573,7 +4714,7 @@ object Parsers { * | Expr1 * | */ - def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders { + def blockStatSeq(outermost: Boolean = false): List[Tree] = checkNoEscapingPlaceholders { val stats = new ListBuffer[Tree] while var empty = false @@ -4585,7 +4726,11 @@ object Parsers { stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then stats += extension() - else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then + else if isDefIntro(localModifierTokens, + excludedSoftModifiers = + // Allow opaque definitions at outermost level in REPL. + if outermost && ctx.mode.is(Mode.Interactive) + then Set.empty else Set(nme.`opaque`)) then stats +++= localDef(in.offset) else empty = true diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 831d31d6fa6e..2dc0a1a8d805 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -306,11 +306,15 @@ object Scanners { println(s"\nSTART SKIP AT ${sourcePos().line + 1}, $this in $currentRegion") var noProgress = 0 // Defensive measure to ensure we always get out of the following while loop - // even if source file is weirly formatted (i.e. we never reach EOF + // even if source file is weirly formatted (i.e. we never reach EOF) + var prevOffset = offset while !atStop && noProgress < 3 do - val prevOffset = offset nextToken() - if offset == prevOffset then noProgress += 1 else noProgress = 0 + if offset <= prevOffset then + noProgress += 1 + else + prevOffset = offset + noProgress = 0 if debugTokenStream then println(s"\nSTOP SKIP AT ${sourcePos().line + 1}, $this in $currentRegion") if token == OUTDENT then dropUntil(_.isInstanceOf[Indented]) @@ -684,7 +688,7 @@ object Scanners { if !r.isOutermost && closingRegionTokens.contains(token) && !(token == CASE && r.prefix == MATCH) - && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala + && next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala => insert(OUTDENT, offset) case _ => @@ -736,7 +740,10 @@ object Scanners { && currentRegion.commasExpected && (token == RPAREN || token == RBRACKET || token == RBRACE || token == OUTDENT) then - () /* skip the trailing comma */ + // encountered a trailing comma + // reset only the lastOffset + // so that the tree's span is correct + lastOffset = prev.lastOffset else reset() case END => diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index b0a533b2f1df..c78a336ecdf5 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -297,7 +297,7 @@ object Tokens extends TokensCommon { final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 43cac17e6318..ccd7b4e4e282 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -76,6 +76,9 @@ object Formatting { given [X: Show]: Show[Seq[X]] with def show(x: Seq[X]) = CtxShow(x.map(toStr)) + given Show[Seq[Nothing]] with + def show(x: Seq[Nothing]) = CtxShow(x) + given [K: Show, V: Show]: Show[Map[K, V]] with def show(x: Map[K, V]) = CtxShow(x.map((k, v) => s"${toStr(k)} => ${toStr(v)}")) @@ -106,12 +109,20 @@ object Formatting { case Atoms.Range(lo, hi) => CtxShow(s"Range(${toStr(lo.toList)}, ${toStr(hi.toList)})") end given + given Show[ast.untpd.Modifiers] with + def show(x: ast.untpd.Modifiers) = + CtxShow(s"Modifiers(${toStr(x.flags)}, ${toStr(x.privateWithin)}, ${toStr(x.annotations)}, ${toStr(x.mods)})") + + given Show[ast.untpd.Mod] with + def show(x: ast.untpd.Mod) = CtxShow(s"Mod(${toStr(x.flags)})") + given Show[Showable] = ShowAny given Show[Shown] = ShowAny given Show[Int] = ShowAny given Show[Char] = ShowAny given Show[Boolean] = ShowAny given Show[Integer] = ShowAny + given Show[Long] = ShowAny given Show[String] = ShowAny given Show[Class[?]] = ShowAny given Show[Throwable] = ShowAny @@ -119,6 +130,7 @@ object Formatting { given Show[CompilationUnit] = ShowAny given Show[Phases.Phase] = ShowAny given Show[TyperState] = ShowAny + given Show[Unit] = ShowAny given Show[config.ScalaVersion] = ShowAny given Show[io.AbstractFile] = ShowAny given Show[parsing.Scanners.Scanner] = ShowAny @@ -172,7 +184,7 @@ object Formatting { * The idea is to do this for known cases that are useful and then fall back * on regular syntax highlighting for the cases which are unhandled. * - * Please not that if used in combination with `disambiguateTypes` the + * Please note that if used in combination with `disambiguateTypes` the * correct `Context` for printing should also be passed when calling the * method. * diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index c06b43cafe17..cac82eb0c4bd 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -15,7 +15,7 @@ import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch import config.{Config, Feature} -import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, levelOwner, retainedElems, isRetainsLike} +import cc.{CapturingType, RetainingType, CaptureSet, ReachCapability, MaybeCapability, isBoxed, retainedElems, isRetainsLike} class PlainPrinter(_ctx: Context) extends Printer { @@ -165,6 +165,8 @@ class PlainPrinter(_ctx: Context) extends Printer { private def toTextRetainedElem[T <: Untyped](ref: Tree[T]): Text = ref match case ref: RefTree[?] if ref.typeOpt.exists => toTextCaptureRef(ref.typeOpt) + case TypeApply(fn, arg :: Nil) if fn.symbol == defn.Caps_capsOf => + toTextRetainedElem(arg) case _ => toText(ref) @@ -251,7 +253,7 @@ class PlainPrinter(_ctx: Context) extends Printer { toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) else toText(parent) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => - "" // do not print previously reported error message because they may try to print this error type again recuresevely + "" // do not print previously reported error message because they may try to print this error type again recursively case tp: ErrorType => s"" case tp: WildcardType => @@ -414,9 +416,10 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp) match case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") case tp: SingletonType => toTextRef(tp) - case ReachCapability(tp1) => toTextRef(tp1) ~ "*" - case MaybeCapability(tp1) => toTextRef(tp1) ~ "?" - case _ => toText(tp) + case tp: (TypeRef | TypeParamRef) => toText(tp) ~ "^" + case ReachCapability(tp1) => toTextCaptureRef(tp1) ~ "*" + case MaybeCapability(tp1) => toTextCaptureRef(tp1) ~ "?" + case tp => toText(tp) protected def isOmittablePrefix(sym: Symbol): Boolean = defn.unqualifiedOwnerTypes.exists(_.symbol == sym) || isEmptyPrefix(sym) diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 297dc31ea94a..9f485ee84cda 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -71,7 +71,7 @@ abstract class Printer { def changePrec(prec: Precedence)(op: => Text): Text = if (prec < this.prec) atPrec(prec) ("(" ~ op ~ ")") else atPrec(prec)(op) - /** The name, possibly with with namespace suffix if debugNames is set: + /** The name, possibly with namespace suffix if debugNames is set: * /L for local names, /V for other term names, /T for type names */ def nameString(name: Name): String diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 1ff4c8cae339..3caba59a091f 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -17,6 +17,7 @@ import Denotations.* import SymDenotations.* import StdNames.{nme, tpnme} import ast.{Trees, tpd, untpd} +import tpd.closureDef import typer.{Implicits, Namer, Applications} import typer.ProtoTypes.* import Trees.* @@ -24,10 +25,12 @@ import TypeApplications.* import NameKinds.{WildcardParamName, DefaultGetterName} import util.Chars.isOperatorPart import config.{Config, Feature} +import config.Feature.sourceVersion +import config.SourceVersion.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} -import cc.{CaptureSet, CapturingType, toCaptureSet, IllegalCaptureRef, isRetains} +import cc.{CaptureSet, CapturingType, toCaptureSet, IllegalCaptureRef, isRetains, ReachCapability, MaybeCapability} import dotty.tools.dotc.parsing.JavaParsers class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { @@ -330,6 +333,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "?" ~ (("(ignored: " ~ toText(ignored) ~ ")") provided printDebug) case tp @ PolyProto(targs, resType) => "[applied to [" ~ toTextGlobal(targs, ", ") ~ "] returning " ~ toText(resType) + case ReachCapability(_) | MaybeCapability(_) => + toTextCaptureRef(tp) case _ => super.toText(tp) } @@ -506,6 +511,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toText(name) ~ (if name.isTermName && arg.isType then " : " else " = ") ~ toText(arg) case Assign(lhs, rhs) => changePrec(GlobalPrec) { toTextLocal(lhs) ~ " = " ~ toText(rhs) } + case closureDef(meth) if !printDebug => + withEnclosingDef(meth): + meth.paramss.map(paramsText).foldRight(toText(meth.rhs))(_ ~ " => " ~ _) case block: Block => blockToText(block) case If(cond, thenp, elsep) => @@ -558,13 +566,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else keywordText("{{") ~ keywordText("/* inlined from ") ~ toText(call) ~ keywordText(" */") ~ bodyText ~ keywordText("}}") case tpt: untpd.DerivedTypeTree => "" - case TypeTree() => + case tree: TypeTree => typeText(toText(tree.typeOpt)) - ~ Str("(inf)").provided(tree.isInstanceOf[InferredTypeTree] && printDebug) + ~ Str("(inf)").provided(tree.isInferred && printDebug) case SingletonTypeTree(ref) => toTextLocal(ref) ~ "." ~ keywordStr("type") case RefinedTypeTree(tpt, refines) => - toTextLocal(tpt) ~ " " ~ blockText(refines) + if defn.isFunctionSymbol(tpt.symbol) && tree.hasType && !printDebug + then changePrec(GlobalPrec) { toText(tree.typeOpt) } + else toTextLocal(tpt) ~ blockText(refines) case AppliedTypeTree(tpt, args) => if (tpt.symbol == defn.orType && args.length == 2) changePrec(OrTypePrec) { toText(args(0)) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(args(1)) } } @@ -747,7 +757,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - if Feature.enabled(Feature.modularity) then + if sourceVersion.isAtLeast(`3.6`) then def boundsText(bounds: Tree) = bounds match case ContextBoundTypeTree(tpt, _, ownName) => toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) @@ -789,11 +799,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val open = if (body.isTerm) keywordStr("{") else keywordStr("[") val close = if (body.isTerm) keywordStr("}") else keywordStr("]") keywordStr("'") ~ quotesText ~ open ~ bindingsText ~ toTextGlobal(body) ~ close - case SplicePattern(pattern, args) => + case SplicePattern(pattern, typeargs, args) => val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) keywordStr("$") ~ spliceTypeText ~ { - if args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") - else toText(pattern) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + if typeargs.isEmpty && args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") + else if typeargs.isEmpty then toText(pattern) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" + else toText(pattern) ~ "[" ~ toTextGlobal(typeargs, ", ")~ "]" ~ "(" ~ toTextGlobal(args, ", ") ~ ")" } case Hole(isTerm, idx, args, content) => val (prefix, postfix) = if isTerm then ("{{{", "}}}") else ("[[[", "]]]") @@ -988,7 +999,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { // - trailingUsing = List(`(using D)`) // - rest = List(`(g: G)`, `(using H)`) // we need to swap (rightTyParams ++ rightParam) with (leftParam ++ trailingUsing) - val (leftTyParams, rest1) = tree.paramss.span(isTypeParamClause) + val (leftTyParams, rest1) = tree.paramss match + case fst :: tail if isTypeParamClause(fst) => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(isUsingClause) val (rightTyParams, rest3) = rest2.span(isTypeParamClause) val (rightParam, rest4) = rest3.splitAt(1) @@ -1112,6 +1125,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { recur(fn) ~ "(" ~ toTextGlobal(explicitArgs, ", ") ~ ")" case TypeApply(fn, args) => recur(fn) ~ "[" ~ toTextGlobal(args, ", ") ~ "]" case Select(qual, nme.CONSTRUCTOR) => recur(qual) + case id @ Ident(tpnme.BOUNDTYPE_ANNOT) => "@" ~ toText(id.symbol.name) case New(tpt) => recur(tpt) case _ => val annotSym = sym.orElse(tree.symbol.enclosingClass) diff --git a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala new file mode 100644 index 000000000000..4950f439640f --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala @@ -0,0 +1,191 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } +} + +/** Allows writing a subset of captrue traces based on https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * Can be visualized using https://ui.perfetto.dev/, Chrome's about://tracing (outdated) or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.Stack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + @annotation.nowarn("cat=deprecation") + override def initialValue(): String = "%05d".format(Thread.currentThread().getId()) + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + private sealed abstract class JsonContext + private case class ArrayContext(var first: Boolean) extends JsonContext + private case class ObjectContext(var first: Boolean) extends JsonContext + private case object ValueContext extends JsonContext + private case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/FileUtils.scala b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala new file mode 100644 index 000000000000..4aec428c05bf --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala @@ -0,0 +1,204 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 + +/* +* Scala (https://www.scala-lang.org) +* +* Copyright EPFL and Lightbend, Inc. +* +* Licensed under Apache License 2.0 +* (http://www.apache.org/licenses/LICENSE-2.0). +* +* See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} +import scala.annotation.internal.sharable + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8.nn, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + @sharable private val Close = CharBuffer.allocate(0) + @sharable private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala new file mode 100644 index 000000000000..8777a95c33cf --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/JsonNameTransformer.scala @@ -0,0 +1,46 @@ +package dotty.tools.dotc.profile + +import scala.annotation.internal.sharable + +// Based on NameTransformer but dedicated for JSON encoding rules +object JsonNameTransformer { + private val nops = 128 + + @sharable private val op2code = new Array[String](nops) + private def enterOp(op: Char, code: String) = op2code(op.toInt) = code + + enterOp('\"', "\\\"") + enterOp('\\', "\\\\") + // enterOp('/', "\\/") // optional, no need for escaping outside of html context + enterOp('\b', "\\b") + enterOp('\f', "\\f") + enterOp('\n', "\\n") + enterOp('\r', "\\r") + enterOp('\t', "\\t") + + def encode(name: String): String = { + var buf: StringBuilder = null.asInstanceOf + val len = name.length + var i = 0 + while (i < len) { + val c = name(i) + if (c < nops && (op2code(c.toInt) ne null)) { + if (buf eq null) { + buf = new StringBuilder() + buf.append(name.subSequence(0, i)) + } + buf.append(op2code(c.toInt)) + } else if (c <= 0x1F || c >= 0x7F) { + if (buf eq null) { + buf = new StringBuilder() + buf.append(name.subSequence(0, i)) + } + buf.append("\\u%04X".format(c.toInt)) + } else if (buf ne null) { + buf.append(c) + } + i += 1 + } + if (buf eq null) name else buf.toString + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index a13c9d41b529..ab3e73468385 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -4,6 +4,7 @@ import scala.annotation.* import scala.language.unsafeNulls import java.io.{FileWriter, PrintWriter} +import java.nio.file.Paths import java.lang.management.{ManagementFactory, GarbageCollectorMXBean, RuntimeMXBean, MemoryMXBean, ClassLoadingMXBean, CompilationMXBean} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger @@ -12,8 +13,15 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Symbols.{Symbol, NoSymbol} +import dotty.tools.dotc.core.Flags +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import dotty.tools.io.AbstractFile import annotation.internal.sharable +import dotty.tools.dotc.core.Periods.InitialRunId +import scala.collection.mutable.UnrolledBuffer object Profiler { def apply()(using Context): Profiler = @@ -25,14 +33,19 @@ object Profiler { new RealProfiler(reporter) } - private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + final def NoOp: Profiler = NoOpProfiler + + private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long){ + val endNanos = System.nanoTime() +} case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + allocatedBytes:Long, heapBytes:Long, + totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) } @@ -66,22 +79,63 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) } +private opaque type TracedEventId <: String = String +private object TracedEventId: + def apply(stringValue: String): TracedEventId = stringValue + final val Empty: TracedEventId = "" + sealed trait Profiler { def finished(): Unit - def beforePhase(phase: Phase): ProfileSnap - - def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + inline def onPhase[T](phase: Phase)(inline body: T): T = + val (event, snapshot) = beforePhase(phase) + try body + finally afterPhase(event, phase, snapshot) + protected final val EmptyPhaseEvent = (TracedEventId.Empty, Profiler.emptySnap) + protected def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = EmptyPhaseEvent + protected def afterPhase(event: TracedEventId, phase: Phase, profileBefore: ProfileSnap): Unit = () + + inline def onUnit[T](phase: Phase, unit: CompilationUnit)(inline body: T): T = + val event = beforeUnit(phase, unit) + try body + finally afterUnit(event) + protected def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = TracedEventId.Empty + protected def afterUnit(event: TracedEventId): Unit = () + + inline def onTypedDef[T](sym: Symbol)(inline body: T): T = + val event = beforeTypedDef(sym) + try body + finally afterTypedDef(event) + protected def beforeTypedDef(sym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterTypedDef(token: TracedEventId): Unit = () + + inline def onImplicitSearch[T](pt: Type)(inline body: T): T = + val event = beforeImplicitSearch(pt) + try body + finally afterImplicitSearch(event) + protected def beforeImplicitSearch(pt: Type): TracedEventId = TracedEventId.Empty + protected def afterImplicitSearch(event: TracedEventId): Unit = () + + inline def onMacroSplice[T](macroSym: Symbol)(inline body: T): T = + val event = beforeMacroSplice(macroSym) + try body + finally afterMacroSplice(event) + protected def beforeMacroSplice(macroSym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterMacroSplice(event: TracedEventId): Unit = () + + inline def onCompletion[T](root: Symbol, associatedFile: => AbstractFile)(inline body: T): T = + val (event, completionName) = beforeCompletion(root, associatedFile) + try body + finally afterCompletion(event, completionName) + protected final val EmptyCompletionEvent = (TracedEventId.Empty, "") + protected def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = EmptyCompletionEvent + protected def afterCompletion(event: TracedEventId, completionName: String): Unit = () } private [profile] object NoOpProfiler extends Profiler { - - override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () - override def finished(): Unit = () } + private [profile] object RealProfiler { import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean @@ -92,17 +146,6 @@ private [profile] object RealProfiler { val threadMx: ExtendedThreadMxBean = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() -} - -private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { - def completeBackground(threadRange: ProfileRange): Unit = - reporter.reportBackground(this, threadRange) - - def outDir: AbstractFile = ctx.settings.outputDir.value - - val id: Int = RealProfiler.idGen.incrementAndGet() - - private val mainThread = Thread.currentThread() @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { @@ -117,13 +160,47 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime ) } private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed +} + +private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { + val id: Int = RealProfiler.idGen.incrementAndGet() + private val mainThread = Thread.currentThread() + private val gcEvents = UnrolledBuffer[GcEventData]() + private var nextAfterUnitSnap: Long = System.nanoTime() + + private final val GcThreadId = "GC" + + enum Category: + def name: String = this.toString().toLowerCase() + case Run, Phase, File, TypeCheck, Implicit, Macro, Completion + private [profile] val chromeTrace = + if ctx.settings.YprofileTrace.isDefault + then null + else + val filename = ctx.settings.YprofileTrace.value + // Compilation units requiring multi-stage compilation (macros) would create a new profiler instances + // We need to store the traces in the seperate file to prevent overriding its content. + // Alternatives: sharing ChromeTrace instance between all runs / manual concatation after all runs are done + // FIXME: The first assigned runId is equal to 2 instead of 1 (InitialRunId). + // Fix me when bug described in Compiler.runId is resolved by removing +/- 1 adjustments + val suffix = if ctx.runId > InitialRunId + 1 then s".${ctx.runId - 1}" else "" + ChromeTrace(Paths.get(s"$filename$suffix")) + + private val compilerRunEvent: TracedEventId = traceDurationStart(Category.Run, s"scalac-$id") + + def completeBackground(threadRange: ProfileRange): Unit = + reporter.reportBackground(this, threadRange) + + def outDir: AbstractFile = ctx.settings.outputDir.value @nowarn - private def doGC: Unit = { + private def doGC(): Unit = { System.gc() System.runFinalization() } @@ -142,6 +219,15 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) case gc => } reporter.close(this) + if chromeTrace != null then + traceDurationEnd(Category.Run, compilerRunEvent) + for gcEvent <- gcEvents + do { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, tid = GcThreadId) + } + chromeTrace.close() } @@ -150,10 +236,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) import java.lang.{Integer => jInt} val reportNs = System.nanoTime() val data = notification.getUserData - val seq = notification.getSequenceNumber - val message = notification.getMessage val tpe = notification.getType - val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString @@ -164,49 +247,132 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { gcEvents += gcEvent } + reporter.reportGc(gcEvent) } } - override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { + override def afterPhase(event: TracedEventId, phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook stop") ExternalToolHook.after() } val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { - doGC - initialSnap.updateHeap(readHeapUsage()) + doGC() + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap - + traceDurationEnd(Category.Phase, event) + traceThreadSnapshotCounters() reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = { assert(mainThread eq Thread.currentThread()) + traceThreadSnapshotCounters() + val eventId = traceDurationStart(Category.Phase, escapeSpecialChars(phase.phaseName)) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) - doGC + doGC() if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook start") ExternalToolHook.before() } - snapThread(0) + (eventId, RealProfiler.snapThread(0)) + } + + override def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceThreadSnapshotCounters() + traceDurationStart(Category.File, escapeSpecialChars(unit.source.name)) + else TracedEventId.Empty + } + + override def afterUnit(event: TracedEventId): Unit = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceDurationEnd(Category.File, event) + traceThreadSnapshotCounters() } -} + private def traceThreadSnapshotCounters(initialSnap: => ProfileSnap = RealProfiler.snapThread(0)) = + if chromeTrace != null && System.nanoTime() > nextAfterUnitSnap then { + val snap = initialSnap + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", snap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", snap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", snap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", snap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", snap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", snap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", snap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } -case class EventType(name: String) -object EventType { - //main thread with other tasks - val MAIN: EventType = EventType("main") - //other task ( background thread) - val BACKGROUND: EventType = EventType("background") - //total for compile - val GC: EventType = EventType("GC") + override def beforeTypedDef(sym: Symbol): TracedEventId = traceDurationStart(Category.TypeCheck, symbolName(sym)) + override def afterTypedDef(event: TracedEventId): Unit = traceDurationEnd(Category.TypeCheck, event) + + override def beforeImplicitSearch(pt: Type): TracedEventId = traceDurationStart(Category.Implicit, s"?[${symbolName(pt.typeSymbol)}]", colour = "yellow") + override def afterImplicitSearch(event: TracedEventId): Unit = traceDurationEnd(Category.Implicit, event, colour = "yellow") + + override def beforeMacroSplice(macroSym: Symbol): TracedEventId = traceDurationStart(Category.Macro, s"«${symbolName(macroSym)}»", colour = "olive") + override def afterMacroSplice(event: TracedEventId): Unit = traceDurationEnd(Category.Macro, event, colour = "olive") + + override def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = + if chromeTrace == null + then EmptyCompletionEvent + else + val completionName = this.completionName(root, associatedFile) + val event = TracedEventId(escapeSpecialChars(associatedFile.name)) + chromeTrace.traceDurationEventStart(Category.Completion.name, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File.name, event) + chromeTrace.traceDurationEventStart(Category.Completion.name, completionName) + (event, completionName) + + override def afterCompletion(event: TracedEventId, completionName: String): Unit = + if chromeTrace != null + then + chromeTrace.traceDurationEventEnd(Category.Completion.name, completionName) + chromeTrace.traceDurationEventEnd(Category.File.name, event) + chromeTrace.traceDurationEventEnd(Category.Completion.name, "↯", colour = "thread_state_sleeping") + + private inline def traceDurationStart(category: Category, inline eventName: String, colour: String = ""): TracedEventId = + if chromeTrace == null + then TracedEventId.Empty + else + val event = TracedEventId(eventName) + chromeTrace.traceDurationEventStart(category.name, event, colour) + event + + private inline def traceDurationEnd(category: Category, event: TracedEventId, colour: String = ""): Unit = + if chromeTrace != null then + chromeTrace.traceDurationEventEnd(category.name, event, colour) + + private inline def escapeSpecialChars(value: String): String = + JsonNameTransformer.encode(value) + + private def symbolName(sym: Symbol): String = escapeSpecialChars: + s"${sym.showKind} ${sym.showName}" + + private def completionName(root: Symbol, associatedFile: AbstractFile): String = escapeSpecialChars: + def isTopLevel = root.owner != NoSymbol && root.owner.is(Flags.Package) + if root.is(Flags.Package) || isTopLevel + then root.javaBinaryName + else + val enclosing = root.enclosingClass + s"${enclosing.javaBinaryName}::${root.name}" } +enum EventType(name: String): + // main thread with other tasks + case MAIN extends EventType("main") + // other task ( background thread) + case BACKGROUND extends EventType("background") + // total for compile + case GC extends EventType("GC") + sealed trait ProfileReporter { def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit @@ -259,9 +425,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala index e3ea69d9be06..1a81153b9b08 100644 --- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -94,9 +94,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 6d6e2ff01ad4..3ee52624710e 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -224,7 +224,7 @@ object PickledQuotes { if tree.span.exists then val positionWarnings = new mutable.ListBuffer[Message]() val reference = ctx.settings.sourceroot.value - PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + PositionPickler.picklePositions(pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, treePkl.typeAnnots, reference, ctx.compilationUnit.source, tree :: Nil, positionWarnings) positionWarnings.foreach(report.warning(_)) @@ -241,7 +241,9 @@ object PickledQuotes { treeOwner(tree) match case Some(owner) => // Copy the cached tree to make sure the all definitions are unique. - TreeTypeMap(oldOwners = List(owner), newOwners = List(owner)).apply(tree) + val treeCpy = TreeTypeMap(oldOwners = List(owner), newOwners = List(owner)).apply(tree) + // Then replace the symbol owner with the one pointed by the quote context. + treeCpy.changeNonLocalOwners(ctx.owner) case _ => tree diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala index 1ebf2ae5714b..82701dafd2c9 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala @@ -26,33 +26,93 @@ object QuotePatterns: import tpd.* /** Check for restricted patterns */ - def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = tree match { - case tree: SplicePattern => - if !tree.body.typeOpt.derivesFrom(defn.QuotedExprClass) then - report.error(i"Splice pattern must match an Expr[...]", tree.body.srcPos) - case tdef: TypeDef if tdef.symbol.isClass => - val kind = if tdef.symbol.is(Module) then "objects" else "classes" - report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) - case tree: NamedDefTree => - if tree.name.is(NameKinds.WildcardParamName) then - report.warning( - "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", - tree.srcPos) - if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name != nme.ANON_FUN && tree.name.startsWith("$") then - report.error("Names cannot start with $ quote pattern", tree.namePos) - traverseChildren(tree) - case _: Match => - report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) - case _: Try => - report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) - case _: Return => - report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) - case _ => - traverseChildren(tree) - } + def checkPattern(quotePattern: QuotePattern)(using Context): Unit = + def validatePatternAndCollectTypeVars(): Set[Symbol] = new tpd.TreeAccumulator[Set[Symbol]] { + override def apply(typevars: Set[Symbol], tree: tpd.Tree)(using Context): Set[Symbol] = + // Collect type variables + val typevars1 = tree match + case tree @ DefDef(_, paramss, _, _) => + typevars union paramss.flatMap{ params => params match + case TypeDefs(tdefs) => tdefs.map(_.symbol) + case _ => List.empty + }.toSet union typevars + case _ => typevars + + // Validate pattern + tree match + case tree: SplicePattern => + if !tree.body.typeOpt.derivesFrom(defn.QuotedExprClass) then + report.error(i"Splice pattern must match an Expr[...]", tree.body.srcPos) + typevars1 + case tdef: TypeDef if tdef.symbol.isClass => + val kind = if tdef.symbol.is(Module) then "objects" else "classes" + report.error(em"Implementation restriction: cannot match $kind", tree.srcPos) + typevars1 + case tree: NamedDefTree => + if tree.name.is(NameKinds.WildcardParamName) then + report.warning( + "Use of `_` for lambda in quoted pattern. Use explicit lambda instead or use `$_` to match any term.", + tree.srcPos) + if tree.name.isTermName && !tree.nameSpan.isSynthetic && tree.name != nme.ANON_FUN && tree.name.startsWith("$") then + report.error("Names cannot start with $ quote pattern", tree.namePos) + foldOver(typevars1, tree) + case _: Match => + report.error("Implementation restriction: cannot match `match` expressions", tree.srcPos) + typevars1 + case _: Try => + report.error("Implementation restriction: cannot match `try` expressions", tree.srcPos) + typevars1 + case _: Return => + report.error("Implementation restriction: cannot match `return` statements", tree.srcPos) + typevars1 + case _ => + foldOver(typevars1, tree) + }.apply(Set.empty, quotePattern.body) + + val boundTypeVars = validatePatternAndCollectTypeVars() - }.traverse(quotePattern.body) + /* + * This part checks well-formedness of arguments to hoas patterns. + * (1) Type arguments of a hoas patterns must be introduced in the quote pattern.ctxShow + * Examples + * well-formed: '{ [A] => (x : A) => $a[A](x) } // A is introduced in the quote pattern + * ill-formed: '{ (x : Int) => $a[Int](x) } // Int is defined outside of the quote pattern + * (2) If value arguments of a hoas pattern has a type with type variables that are introduced in + * the quote pattern, those type variables should be in type arguments to the hoas patternHole + * Examples + * well-formed: '{ [A] => (x : A) => $a[A](x) } // a : [A] => (x:A) => A + * ill-formed: '{ [A] => (x : A) => $a(x) } // a : (x:A) => A ...but A is undefined; hence ill-formed + */ + new tpd.TreeTraverser { + override def traverse(tree: tpd.Tree)(using Context): Unit = tree match { + case tree: SplicePattern => + def uncapturedTypeVars(arg: tpd.Tree, capturedTypeVars: List[tpd.Tree]): Set[Type] = + /* Sometimes arg is untyped when a splice pattern is ill-formed. + * Return early in such case. + * Refer to QuoteAndSplices::typedSplicePattern + */ + if !arg.hasType then return Set.empty + + val capturedTypeVarsSet = capturedTypeVars.map(_.symbol).toSet + new TypeAccumulator[Set[Type]] { + def apply(x: Set[Type], tp: Type): Set[Type] = + if boundTypeVars.contains(tp.typeSymbol) && !capturedTypeVarsSet.contains(tp.typeSymbol) then + foldOver(x + tp, tp) + else + foldOver(x, tp) + }.apply(Set.empty, arg.tpe) + + for (typearg <- tree.typeargs) // case (1) + do + if !boundTypeVars.contains(typearg.symbol) then + report.error("Type arguments of a hoas pattern needs to be defined inside the quoted pattern", typearg.srcPos) + for (arg <- tree.args) // case (2) + do + if !uncapturedTypeVars(arg, tree.typeargs).isEmpty then + report.error("Type variables that this argument depends on are not captured in this hoas pattern", arg.srcPos) + case _ => traverseChildren(tree) + } + }.traverse(quotePattern.body) /** Encode the quote pattern into an `unapply` that the pattern matcher can handle. * @@ -76,7 +136,7 @@ object QuotePatterns: * .ExprMatch // or TypeMatch * .unapply[ * KCons[t1 >: l1 <: b1, ...KCons[tn >: ln <: bn, KNil]...], // scala.quoted.runtime.{KCons, KNil} - * (T1, T2, (A1, ..., An) => T3, ...) + * (Expr[T1], Expr[T2], Expr[(A1, ..., An) => T3], ...) * ]( * '{ * type t1' >: l1' <: b1' @@ -199,16 +259,24 @@ object QuotePatterns: val patBuf = new mutable.ListBuffer[Tree] val shape = new tpd.TreeMap { override def transform(tree: Tree)(using Context) = tree match { - case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => + case Typed(splice @ SplicePattern(pat, Nil, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => transform(tpt) // Collect type bindings transform(splice) - case SplicePattern(pat, args) => + case SplicePattern(pat, typeargs, args) => val patType = pat.tpe.widen val patType1 = patType.translateFromRepeated(toArray = false) val pat1 = if (patType eq patType1) pat else pat.withType(patType1) patBuf += pat1 - if args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) - else ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef).appliedToType(tree.tpe).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))).withSpan(tree.span) + if typeargs.isEmpty && args.isEmpty then ref(defn.QuotedRuntimePatterns_patternHole.termRef).appliedToType(tree.tpe).withSpan(tree.span) + else if typeargs.isEmpty then + ref(defn.QuotedRuntimePatterns_higherOrderHole.termRef) + .appliedToType(tree.tpe) + .appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))) + .withSpan(tree.span) + else ref(defn.QuotedRuntimePatterns_higherOrderHoleWithTypes.termRef) + .appliedToTypeTrees(List(TypeTree(tree.tpe), tpd.hkNestedPairsTypeTree(typeargs))) + .appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))) + .withSpan(tree.span) case _ => super.transform(tree) } @@ -234,7 +302,7 @@ object QuotePatterns: fun match // .asInstanceOf[QuoteMatching].{ExprMatch,TypeMatch}.unapply[, ] case TypeApply(Select(Select(TypeApply(Select(quotes, _), _), _), _), typeBindings :: resTypes :: Nil) => - val bindings = unrollBindings(typeBindings) + val bindings = unrollHkNestedPairsTypeTree(typeBindings) val addPattenSplice = new TreeMap { private val patternIterator = patterns.iterator.filter { case pat: Bind => !pat.symbol.name.is(PatMatGivenVarName) @@ -242,9 +310,11 @@ object QuotePatterns: } override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case TypeApply(patternHole, _) if patternHole.symbol == defn.QuotedRuntimePatterns_patternHole => - cpy.SplicePattern(tree)(patternIterator.next(), Nil) + cpy.SplicePattern(tree)(patternIterator.next(), Nil, Nil) case Apply(patternHole, SeqLiteral(args, _) :: Nil) if patternHole.symbol == defn.QuotedRuntimePatterns_higherOrderHole => - cpy.SplicePattern(tree)(patternIterator.next(), args) + cpy.SplicePattern(tree)(patternIterator.next(), Nil, args) + case Apply(TypeApply(patternHole, List(_, targsTpe)), SeqLiteral(args, _) :: Nil) if patternHole.symbol == defn.QuotedRuntimePatterns_higherOrderHoleWithTypes => + cpy.SplicePattern(tree)(patternIterator.next(), unrollHkNestedPairsTypeTree(targsTpe), args) case _ => super.transform(tree) } val body = addPattenSplice.transform(shape) match @@ -262,7 +332,7 @@ object QuotePatterns: case body => body cpy.QuotePattern(tree)(bindings, body, quotes) - private def unrollBindings(tree: Tree)(using Context): List[Tree] = tree match + private def unrollHkNestedPairsTypeTree(tree: Tree)(using Context): List[Tree] = tree match case AppliedTypeTree(tupleN, bindings) if defn.isTupleClass(tupleN.symbol) => bindings // TupleN, 1 <= N <= 22 - case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollBindings(tail) // KCons or *: + case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollHkNestedPairsTypeTree(tail) // KCons or *: case _ => Nil // KNil or EmptyTuple diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 1d8ca5f208fa..2ccf918e12fa 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -23,8 +23,8 @@ object report: private def issueWarning(warning: Warning)(using Context): Unit = ctx.reporter.report(warning) - def deprecationWarning(msg: Message, pos: SrcPos)(using Context): Unit = - issueWarning(new DeprecationWarning(msg, pos.sourcePos)) + def deprecationWarning(msg: Message, pos: SrcPos, origin: String = "")(using Context): Unit = + issueWarning(new DeprecationWarning(msg, pos.sourcePos, origin)) def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) @@ -99,7 +99,7 @@ object report: def errorOrMigrationWarning(msg: Message, pos: SrcPos, migrationVersion: MigrationVersion)(using Context): Unit = if sourceVersion.isAtLeast(migrationVersion.errorFrom) then - if !sourceVersion.isMigrating then error(msg, pos) + if sourceVersion != migrationVersion.errorFrom.prevMigrating then error(msg, pos) else if ctx.settings.rewrite.value.isEmpty then migrationWarning(msg, pos) else if sourceVersion.isAtLeast(migrationVersion.warnFrom) then warning(msg, pos) @@ -165,13 +165,23 @@ object report: "compiler version" -> dotty.tools.dotc.config.Properties.versionString, "settings" -> settings.map(showSetting).mkString(" "), )) + val fileAReportMsg = + if ctx.phase.isInstanceOf[plugins.PluginPhase] + then + s"""| An unhandled exception was thrown in the compiler plugin named "${ctx.phase.megaPhase}". + | Please report the issue to the plugin's maintainers. + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |""".stripMargin + else + s"""| An unhandled exception was thrown in the compiler. + | Please file a crash report here: + | https://github.com/scala/scala3/issues/new/choose + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |""".stripMargin s""" | $errorMessage | - | An unhandled exception was thrown in the compiler. - | Please file a crash report here: - | https://github.com/scala/scala3/issues/new/choose - | For non-enriched exceptions, compile with -Xno-enrich-error-messages. + |$fileAReportMsg | |$info1 |""".stripMargin diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index 7a8edb233aee..6a2d88f4e82f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -75,7 +75,8 @@ object Diagnostic: class DeprecationWarning( msg: Message, - pos: SourcePosition + pos: SourcePosition, + val origin: String ) extends ConditionalWarning(msg, pos) { def enablingOption(using Context): Setting[Boolean] = ctx.settings.deprecation } diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 04380a7b8e4a..d3467fe70c52 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -211,6 +211,14 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case ConstructorProxyNotValueID // errorNumber: 195 case ContextBoundCompanionNotValueID // errorNumber: 196 case InlinedAnonClassWarningID // errorNumber: 197 + case UnusedSymbolID // errorNumber: 198 + case TailrecNestedCallID //errorNumber: 199 + case FinalLocalDefID // errorNumber: 200 + case NonNamedArgumentInJavaAnnotationID // errorNumber: 201 + case QuotedTypeMissingID // errorNumber: 202 + case DeprecatedAssignmentSyntaxID // errorNumber: 203 + case DeprecatedInfixNamedArgumentSyntaxID // errorNumber: 204 + case GivenSearchPriorityID // errorNumber: 205 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala index f039ed900a76..bb02a08d2e46 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala @@ -21,6 +21,8 @@ enum MessageKind: case MatchCaseUnreachable case Compatibility case PotentialIssue + case UnusedSymbol + case Staging /** Human readable message that will end up being shown to the user. * NOTE: This is only used in the situation where you have multiple words @@ -37,5 +39,7 @@ enum MessageKind: case PatternMatchExhaustivity => "Pattern Match Exhaustivity" case MatchCaseUnreachable => "Match case Unreachable" case PotentialIssue => "Potential Issue" + case UnusedSymbol => "Unused Symbol" + case Staging => "Staging Issue" case kind => kind.toString end MessageKind diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index 6881235e3dc1..7db5112b6674 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -15,7 +15,7 @@ import util.{ SourcePosition, NoSourcePosition } import util.Chars.{ LF, CR, FF, SU } import scala.annotation.switch -import scala.collection.mutable +import scala.collection.mutable.StringBuilder trait MessageRendering { import Highlight.* @@ -209,22 +209,27 @@ trait MessageRendering { sb.toString } - private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit = - import dia.* + private def appendFilterHelp(dia: Diagnostic, sb: StringBuilder): Unit = + import dia.msg val hasId = msg.errorId.errorNumber >= 0 - val category = dia match { - case _: UncheckedWarning => "unchecked" - case _: DeprecationWarning => "deprecation" - case _: FeatureWarning => "feature" - case _ => "" - } - if (hasId || category.nonEmpty) - sb.append(EOL).append("Matching filters for @nowarn or -Wconf:") - if (hasId) - sb.append(EOL).append(" - id=E").append(msg.errorId.errorNumber) - sb.append(EOL).append(" - name=").append(msg.errorId.productPrefix.stripSuffix("ID")) - if (category.nonEmpty) - sb.append(EOL).append(" - cat=").append(category) + val (category, origin) = dia match + case _: UncheckedWarning => ("unchecked", "") + case w: DeprecationWarning => ("deprecation", w.origin) + case _: FeatureWarning => ("feature", "") + case _ => ("", "") + var entitled = false + def addHelp(what: String)(value: String): Unit = + if !entitled then + sb.append(EOL).append("Matching filters for @nowarn or -Wconf:") + entitled = true + sb.append(EOL).append(" - ").append(what).append(value) + if hasId then + addHelp("id=E")(msg.errorId.errorNumber.toString) + addHelp("name=")(msg.errorId.productPrefix.stripSuffix("ID")) + if category.nonEmpty then + addHelp("cat=")(category) + if origin.nonEmpty then + addHelp("origin=")(origin) /** The whole message rendered from `msg` */ def messageAndPos(dia: Diagnostic)(using Context): String = { @@ -236,7 +241,7 @@ trait MessageRendering { else 0 given Level = Level(level) given Offset = Offset(maxLineNumber.toString.length + 2) - val sb = mutable.StringBuilder() + val sb = StringBuilder() val posString = posStr(pos, msg, diagnosticLevel(dia)) if (posString.nonEmpty) sb.append(posString).append(EOL) if (pos.exists) { diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index 54a6fc14e054..1896e5269d6c 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -19,23 +19,27 @@ enum MessageFilter: case Deprecated => message.isInstanceOf[Diagnostic.DeprecationWarning] case Feature => message.isInstanceOf[Diagnostic.FeatureWarning] case Unchecked => message.isInstanceOf[Diagnostic.UncheckedWarning] + case MessageID(errorId) => message.msg.errorId == errorId case MessagePattern(pattern) => val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty - case MessageID(errorId) => message.msg.errorId == errorId case SourcePattern(pattern) => val source = message.position.orElse(NoSourcePosition).source() val path = source.jfile() .map(_.toPath.toAbsolutePath.toUri.normalize().getRawPath) .orElse(source.path()) pattern.findFirstIn(path).nonEmpty - + case Origin(pattern) => + message match + case message: Diagnostic.DeprecationWarning => pattern.findFirstIn(message.origin).nonEmpty + case _ => false case None => false case Any, Deprecated, Feature, Unchecked, None case MessagePattern(pattern: Regex) case MessageID(errorId: ErrorMessageID) case SourcePattern(pattern: Regex) + case Origin(pattern: Regex) enum Action: case Error, Warning, Verbose, Info, Silent @@ -96,6 +100,7 @@ object WConf: case _ => Left(s"unknown category: $conf") case "src" => regex(conf).map(SourcePattern.apply) + case "origin" => regex(conf).map(Origin.apply) case _ => Left(s"unknown filter: $filter") case _ => Left(s"unknown filter: $s") diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 9a20f149a6d1..bb3194558cae 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -15,6 +15,8 @@ import printing.Formatting import ErrorMessageID.* import ast.Trees import config.{Feature, ScalaVersion} +import transform.patmat.Space +import transform.patmat.SpaceEngine import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope} import typer.ProtoTypes.{ViewProto, SelectionProto, FunProto} import typer.Implicits.* @@ -33,6 +35,7 @@ import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.SourcePosition import scala.jdk.CollectionConverters.* import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.config.SourceVersion import DidYouMean.* /** Messages @@ -105,6 +108,9 @@ end CyclicMsg abstract class ReferenceMsg(errorId: ErrorMessageID)(using Context) extends Message(errorId): def kind = MessageKind.Reference +abstract class StagingMessage(errorId: ErrorMessageID)(using Context) extends Message(errorId): + override final def kind = MessageKind.Staging + abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) extends SyntaxMsg(errNo) { def explain(using Context) = { @@ -301,6 +307,7 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre // these are usually easier to analyze. We exclude F-bounds since these would // lead to a recursive infinite expansion. object reported extends TypeMap, IdentityCaptRefMap: + var notes: String = "" def setVariance(v: Int) = variance = v val constraint = mapCtx.typerState.constraint var fbounded = false @@ -318,6 +325,15 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre case tp: LazyRef => fbounded = true tp + case tp @ TypeRef(pre, _) => + if pre != NoPrefix && !pre.member(tp.name).exists then + notes ++= + i""" + | + |Note that I could not resolve reference $tp. + |${MissingType(pre, tp.name).reason} + """ + mapOver(tp) case _ => mapOver(tp) @@ -329,7 +345,7 @@ class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tre else (found1, expected1) val (foundStr, expectedStr) = Formatting.typeDiff(found2, expected2) i"""|Found: $foundStr - |Required: $expectedStr""" + |Required: $expectedStr${reported.notes}""" end msg override def msgPostscript(using Context) = @@ -846,12 +862,13 @@ extends Message(LossyWideningConstantConversionID): |Write `.to$targetType` instead.""" def explain(using Context) = "" -class PatternMatchExhaustivity(uncoveredCases: Seq[String], tree: untpd.Match)(using Context) +class PatternMatchExhaustivity(uncoveredCases: Seq[Space], tree: untpd.Match)(using Context) extends Message(PatternMatchExhaustivityID) { def kind = MessageKind.PatternMatchExhaustivity private val hasMore = uncoveredCases.lengthCompare(6) > 0 - val uncovered = uncoveredCases.take(6).mkString(", ") + val uncovered = uncoveredCases.take(6).map(SpaceEngine.display).mkString(", ") + private val casesWithoutColor = inContext(ctx.withoutColors)(uncoveredCases.map(SpaceEngine.display)) def msg(using Context) = val addendum = if hasMore then "(More unmatched cases are elided)" else "" @@ -879,12 +896,12 @@ extends Message(PatternMatchExhaustivityID) { val pathes = List( ActionPatch( srcPos = endPos, - replacement = uncoveredCases.map(c => indent(s"case $c => ???", startColumn)) + replacement = casesWithoutColor.map(c => indent(s"case $c => ???", startColumn)) .mkString("\n", "\n", "") ), ) List( - CodeAction(title = s"Insert missing cases (${uncoveredCases.size})", + CodeAction(title = s"Insert missing cases (${casesWithoutColor.size})", description = None, patches = pathes ) @@ -1803,13 +1820,24 @@ class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) } class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): - def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" + inlineParamAddendum def explain(using Context) = i"""An immutable path is | - a reference to an immutable value, or | - a reference to `this`, or | - a selection of an immutable path with an immutable value.""" + def inlineParamAddendum(using Context) = + val sym = tp.termSymbol + if sym.isAllOf(Flags.InlineParam) then + i""" + |Inline parameters are not considered immutable paths and cannot be used as + |singleton types. + | + |Hint: Removing the `inline` qualifier from the `${sym.name}` parameter + |may help resolve this issue.""" + else "" + class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expectedCount: Int)(using Context) extends SyntaxMsg(WrongNumberOfParametersID) { def msg(using Context) = s"Wrong number of parameters, expected: $expectedCount" @@ -1828,7 +1856,7 @@ class WrongNumberOfParameters(tree: untpd.Tree, foundCount: Int, pt: Type, expec class DuplicatePrivateProtectedQualifier()(using Context) extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { - def msg(using Context) = "Duplicate private/protected qualifier" + def msg(using Context) = "Duplicate private/protected modifier" def explain(using Context) = i"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" } @@ -1837,7 +1865,13 @@ class ExpectedStartOfTopLevelDefinition()(using Context) extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { def msg(using Context) = "Expected start of definition" def explain(using Context) = - i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" + i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after modifiers" +} + +class FinalLocalDef()(using Context) + extends SyntaxMsg(FinalLocalDefID) { + def msg(using Context) = i"The ${hl("final")} modifier is not allowed on local definitions" + def explain(using Context) = "" } class NoReturnFromInlineable(owner: Symbol)(using Context) @@ -1908,6 +1942,20 @@ class TailrecNotApplicable(symbol: Symbol)(using Context) def explain(using Context) = "" } +class TailrecNestedCall(definition: Symbol, innerDef: Symbol)(using Context) + extends SyntaxMsg(TailrecNestedCallID) { + def msg(using Context) = { + s"The tail recursive def ${definition.name} contains a recursive call inside the non-inlined inner def ${innerDef.name}" + } + + def explain(using Context) = + """Tail recursion is only validated and optimised directly in the definition. + |Any calls to the recursive method via an inner def cannot be validated as + |tail recursive, nor optimised if they are. To enable tail recursion from + |inner calls, mark the inner def as inline. + |""".stripMargin +} + class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) extends Message(FailureToEliminateExistentialID) { def kind = MessageKind.Compatibility @@ -3239,3 +3287,115 @@ extends TypeMsg(ConstructorProxyNotValueID): |companion value with the (term-)name `A`. However, these context bound companions |are not values themselves, they can only be referred to in selections.""" +class UnusedSymbol(errorText: String)(using Context) +extends Message(UnusedSymbolID) { + def kind = MessageKind.UnusedSymbol + + override def msg(using Context) = errorText + override def explain(using Context) = "" +} + +object UnusedSymbol { + def imports(using Context): UnusedSymbol = new UnusedSymbol(i"unused import") + def localDefs(using Context): UnusedSymbol = new UnusedSymbol(i"unused local definition") + def explicitParams(using Context): UnusedSymbol = new UnusedSymbol(i"unused explicit parameter") + def implicitParams(using Context): UnusedSymbol = new UnusedSymbol(i"unused implicit parameter") + def privateMembers(using Context): UnusedSymbol = new UnusedSymbol(i"unused private member") + def patVars(using Context): UnusedSymbol = new UnusedSymbol(i"unused pattern variable") +} + +class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamedArgumentInJavaAnnotationID): + + override protected def msg(using Context): String = + "Named arguments are required for Java defined annotations" + + Message.rewriteNotice("This", version = SourceVersion.`3.6-migration`) + + override protected def explain(using Context): String = + i"""Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + |Java defined annotations don't have an exact constructor representation + |and we previously relied on the order of the fields to create one. + |One possible issue with this representation is the reordering of the fields. + |Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + |Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + """ + +end NonNamedArgumentInJavaAnnotation + +final class QuotedTypeMissing(tpe: Type)(using Context) extends StagingMessage(QuotedTypeMissingID): + + private def witness = defn.QuotedTypeClass.typeRef.appliedTo(tpe) + + override protected def msg(using Context): String = + i"Reference to $tpe within quotes requires a given ${witness} in scope" + + override protected def explain(using Context): String = + i"""Referencing `$tpe` inside a quoted expression requires a `${witness}` to be in scope. + |Since Scala is subject to erasure at runtime, the type information will be missing during the execution of the code. + |`${witness}` is therefore needed to carry `$tpe`'s type information into the quoted code. + |Without an implicit `${witness}`, the type `$tpe` cannot be properly referenced within the expression. + |To resolve this, ensure that a `${witness}` is available, either through a context-bound or explicitly. + |""" + +end QuotedTypeMissing + +final class DeprecatedAssignmentSyntax(key: Name, value: untpd.Tree)(using Context) extends SyntaxMsg(DeprecatedAssignmentSyntaxID): + override protected def msg(using Context): String = + i"""Deprecated syntax: in the future it would be interpreted as a named tuple with one element, + |not as an assignment. + | + |To assign a value, use curly braces: `{${key} = ${value}}`.""" + + Message.rewriteNotice("This", version = SourceVersion.`3.6-migration`) + + override protected def explain(using Context): String = "" + +class DeprecatedInfixNamedArgumentSyntax()(using Context) extends SyntaxMsg(DeprecatedInfixNamedArgumentSyntaxID): + def msg(using Context) = + i"""Deprecated syntax: infix named arguments lists are deprecated; in the future it would be interpreted as a single name tuple argument. + |To avoid this warning, either remove the argument names or use dotted selection.""" + + Message.rewriteNotice("This", version = SourceVersion.`3.6-migration`) + + def explain(using Context) = "" + +class GivenSearchPriorityWarning( + pt: Type, + cmp: Int, + prev: Int, + winner: TermRef, + loser: TermRef, + isLastOldVersion: Boolean +)(using Context) extends Message(GivenSearchPriorityID): + def kind = MessageKind.PotentialIssue + def choice(nth: String, c: Int) = + if c == 0 then "none - it's ambiguous" + else s"the $nth alternative" + val (change, whichChoice) = + if isLastOldVersion + then ("will change in the future release", "Current choice ") + else ("has changed", "Previous choice") + def warningMessage: String = + i"""Given search preference for $pt between alternatives + | ${loser} + |and + | ${winner} + |$change. + |$whichChoice : ${choice("first", prev)} + |Choice from Scala 3.7 : ${choice("second", cmp)}""" + def migrationHints: String = + i"""Suppress this warning by choosing -source 3.5, -source 3.7, or + |by using @annotation.nowarn("id=205")""" + def ambiguousNote: String = + i""" + | + |Note: $warningMessage""" + def msg(using Context) = + i"""$warningMessage + | + |$migrationHints""" + + def explain(using Context) = "" diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index fbbc3d990969..7f05cffb422a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -27,6 +27,18 @@ object trace extends TraceSyntax: object log extends TraceSyntax: inline def isEnabled: true = true protected val isForced = false + + def dumpStack(limit: Int = -1): Unit = { + val out = Console.out + val exc = new Exception("Dump Stack") + var stack = exc.getStackTrace + .filter(e => !e.getClassName.startsWith("dotty.tools.dotc.reporting.TraceSyntax")) + .filter(e => !e.getClassName.startsWith("dotty.tools.dotc.reporting.trace")) + if limit >= 0 then + stack = stack.take(limit) + exc.setStackTrace(stack) + exc.printStackTrace(out) + } end trace /** This module is carefully optimized to give zero overhead if Config.tracingEnabled @@ -84,6 +96,7 @@ trait TraceSyntax: (op: => T)(using Context): T = if ctx.mode.is(Mode.Printing) || !isForced && (printer eq Printers.noPrinter) then op else + val start = System.nanoTime // Avoid evaluating question multiple time, since each evaluation // may cause some extra logging output. val q = question @@ -97,7 +110,13 @@ trait TraceSyntax: def finalize(msg: String) = if !finalized then ctx.base.indent -= 1 - doLog(s"$margin$msg") + val stop = System.nanoTime + val diffNs = stop - start + val diffS = (diffNs / 1000 / 1000).toInt / 1000.0 + if diffS > 0.1 then + doLog(s"$margin$msg (${"%.2f".format(diffS)} s)") + else + doLog(s"$margin$msg") finalized = true try doLog(s"$margin$leading") diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index 75f04908ac55..75e859111932 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -295,7 +295,7 @@ private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol]) val selfType = apiType(sym.givenSelfType) - val name = sym.fullName.stripModuleClassSuffix.toString + val name = ExtractDependencies.classNameAsString(sym) // We strip module class suffix. Zinc relies on a class and its companion having the same name val tparams = sym.typeParams.map(apiTypeParameter).toArray diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index dfff5971889e..154d50f8ebc2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -105,8 +105,24 @@ object ExtractDependencies { val name: String = "sbt-deps" val description: String = "sends information on classes' dependencies to sbt" + /** Construct String name for the given sym. + * See https://github.com/sbt/zinc/blob/v1.9.6/internal/zinc-apiinfo/src/main/scala/sbt/internal/inc/ClassToAPI.scala#L86-L99 + * + * For a Java nested class M of a class C returns C's canonical name + "." + M's simple name. + */ def classNameAsString(sym: Symbol)(using Context): String = - sym.fullName.stripModuleClassSuffix.toString + def isJava(sym: Symbol)(using Context): Boolean = + Option(sym.source) match + case Some(src) => src.toString.endsWith(".java") + case None => false + def classNameAsString0(sym: Symbol)(using Context): String = + sym.fullName.stripModuleClassSuffix.toString + def javaClassNameAsString(sym: Symbol)(using Context): String = + if sym.owner.isClass && !sym.owner.isRoot then + javaClassNameAsString(sym.owner) + "." + sym.name.stripModuleClassSuffix.toString + else classNameAsString0(sym) + if isJava(sym) then javaClassNameAsString(sym) + else classNameAsString0(sym) /** Report an internal error in incremental compilation. */ def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 357202229e50..05dff8ffadbc 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -286,6 +286,14 @@ object ExtractSemanticDB: || sym.owner == defn.OpsPackageClass || qualifier.exists(excludeQual) + /** This block is created by lifting i.e. EtaExpansion */ + private def isProbablyLifted(block: Block)(using Context) = + def isSyntheticDef(t: Tree) = + t match + case t: (ValDef | DefDef) => t.symbol.isSyntheticWithIdent + case _ => false + block.stats.forall(isSyntheticDef) + private def traverseAnnotsOfDefinition(sym: Symbol)(using Context): Unit = for annot <- sym.annotations do if annot.tree.span.exists @@ -438,6 +446,12 @@ object ExtractSemanticDB: registerUseGuarded(None, sym, tree.span, tree.source) case _ => () + // If tree is lifted, ignore Synthetic status on all the definitions and traverse all childrens + case tree: Block if isProbablyLifted(tree) => + tree.stats.foreach: + case t: (ValDef | DefDef) if !excludeChildren(t.symbol) => traverseChildren(t) + case _ => () + traverse(tree.expr) case _ => traverseChildren(tree) @@ -458,14 +472,15 @@ object ExtractSemanticDB: def unapply(tree: ValDef)(using Context): Option[(Tree, Tree)] = tree.rhs match case Match(Typed(selected: Tree, tpt: TypeTree), CaseDef(pat: Tree, _, _) :: Nil) - if tpt.span.exists && !tpt.span.hasLength && tpt.tpe.isAnnotatedByUnchecked => + if tpt.span.exists && !tpt.span.hasLength && tpt.tpe.isAnnotatedByUncheckedOrRuntimeChecked => Some((pat, selected)) case _ => None extension (tpe: Types.Type) - private inline def isAnnotatedByUnchecked(using Context) = tpe match - case Types.AnnotatedType(_, annot) => annot.symbol == defn.UncheckedAnnot + private inline def isAnnotatedByUncheckedOrRuntimeChecked(using Context) = tpe match + case Types.AnnotatedType(_, annot) => + annot.symbol == defn.UncheckedAnnot || annot.symbol == defn.RuntimeCheckedAnnot case _ => false def collectPats(pat: Tree): List[Tree] = diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala index 50ea6ec48510..81f5d37f443f 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala @@ -91,7 +91,8 @@ class SemanticSymbolBuilder: case _ => end find val sig = sym.signature - find(_.signature == sig) + val targetName = sym.targetName + find(sym => sym.signature == sig && sym.targetName == targetName) def addDescriptor(sym: Symbol): Unit = if sym.is(ModuleClass) then diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 5a26803c8137..a73f884fbac9 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -1,17 +1,19 @@ package dotty.tools.dotc package staging -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.Flags.* -import dotty.tools.dotc.core.StdNames.* -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.* -import dotty.tools.dotc.staging.StagingLevel.* -import dotty.tools.dotc.staging.QuoteTypeTags.* +import reporting.* -import dotty.tools.dotc.typer.Implicits.SearchFailureType -import dotty.tools.dotc.util.SrcPos +import core.Contexts.* +import core.Decorators.* +import core.Flags.* +import core.StdNames.* +import core.Symbols.* +import core.Types.* +import StagingLevel.* +import QuoteTypeTags.* + +import typer.Implicits.SearchFailureType +import util.SrcPos class HealType(pos: SrcPos)(using Context) extends TypeMap { @@ -98,9 +100,7 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { pos) tp case _ => - report.error(em"""Reference to $tp within quotes requires a given $reqType in scope. - | - |""", pos) + report.error(QuotedTypeMissing(tp), pos) tp } diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 98ca8f2e2b5b..1a6ec307e289 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -76,7 +76,7 @@ class ArrayApply extends MiniPhase { tree.args match // (a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) but only for reference types case StripAscription(Apply(wrapArrayMeth, List(StripAscription(rest: JavaSeqLiteral)))) :: Nil - if defn.WrapArrayMethods().contains(wrapArrayMeth.symbol) => + if rest.elems.isEmpty || defn.WrapArrayMethods().contains(wrapArrayMeth.symbol) => Some(rest.elems) case _ => None else None diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 60c1bc7c61bb..16219055b8c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -76,10 +76,10 @@ object BetaReduce: val bindingsBuf = new ListBuffer[DefTree] def recur(fn: Tree, argss: List[List[Tree]]): Option[Tree] = fn match case Block((ddef : DefDef) :: Nil, closure: Closure) if ddef.symbol == closure.meth.symbol => - Some(reduceApplication(ddef, argss, bindingsBuf)) + reduceApplication(ddef, argss, bindingsBuf) case Block((TypeDef(_, template: Template)) :: Nil, Typed(Apply(Select(New(_), _), _), _)) if template.constr.rhs.isEmpty => template.body match - case (ddef: DefDef) :: Nil => Some(reduceApplication(ddef, argss, bindingsBuf)) + case (ddef: DefDef) :: Nil => reduceApplication(ddef, argss, bindingsBuf) case _ => None case Block(stats, expr) if stats.forall(isPureBinding) => recur(expr, argss).map(cpy.Block(fn)(stats, _)) @@ -106,12 +106,22 @@ object BetaReduce: case _ => tree - /** Beta-reduces a call to `ddef` with arguments `args` and registers new bindings */ - def reduceApplication(ddef: DefDef, argss: List[List[Tree]], bindings: ListBuffer[DefTree])(using Context): Tree = + /** Beta-reduces a call to `ddef` with arguments `args` and registers new bindings. + * @return optionally, the expanded call, or none if the actual argument + * lists do not match in shape the formal parameters + */ + def reduceApplication(ddef: DefDef, argss: List[List[Tree]], bindings: ListBuffer[DefTree]) + (using Context): Option[Tree] = val (targs, args) = argss.flatten.partition(_.isType) val tparams = ddef.leadingTypeParams val vparams = ddef.termParamss.flatten + def shapeMatch(paramss: List[ParamClause], argss: List[List[Tree]]): Boolean = (paramss, argss) match + case (params :: paramss1, args :: argss1) if params.length == args.length => + shapeMatch(paramss1, argss1) + case (Nil, Nil) => true + case _ => false + val targSyms = for (targ, tparam) <- targs.zip(tparams) yield targ.tpe.dealias match @@ -143,19 +153,26 @@ object BetaReduce: bindings += binding.withSpan(arg.span) bindingSymbol - val expansion = TreeTypeMap( - oldOwners = ddef.symbol :: Nil, - newOwners = ctx.owner :: Nil, - substFrom = (tparams ::: vparams).map(_.symbol), - substTo = targSyms ::: argSyms - ).transform(ddef.rhs) - - val expansion1 = new TreeMap { - override def transform(tree: Tree)(using Context) = tree.tpe.widenTermRefExpr match - case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) - case tpe: TypeRef if tree.isTerm && tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => - cpy.Literal(tree)(Constant(())) - case _ => super.transform(tree) - }.transform(expansion) - - expansion1 + if shapeMatch(ddef.paramss, argss) then + // We can't assume arguments always match. It's possible to construct a + // function with wrong apply method by hand which causes `shapeMatch` to fail. + // See neg/i21952.scala + val expansion = TreeTypeMap( + oldOwners = ddef.symbol :: Nil, + newOwners = ctx.owner :: Nil, + substFrom = (tparams ::: vparams).map(_.symbol), + substTo = targSyms ::: argSyms + ).transform(ddef.rhs) + + val expansion1 = new TreeMap { + override def transform(tree: Tree)(using Context) = tree.tpe.widenTermRefExpr match + case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) + case tpe: TypeRef if tree.isTerm && tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => + cpy.Literal(tree)(Constant(())) + case _ => super.transform(tree) + }.transform(expansion) + + Some(expansion1) + else None + end reduceApplication +end BetaReduce \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 073086ac5e2c..e8a402068bfc 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -43,7 +43,7 @@ class CheckReentrant extends MiniPhase { requiredClass("scala.annotation.internal.unshared")) private val scalaJSIRPackageClass = new CtxLazy( - getPackageClassIfDefined("org.scalajs.ir")) + getPackageClassIfDefined("dotty.tools.sjs.ir")) def isIgnored(sym: Symbol)(using Context): Boolean = sym.hasAnnotation(sharableAnnot()) || diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index ba77167de736..d647d50560d3 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -17,6 +17,7 @@ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.StdNames import dotty.tools.dotc.report import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.reporting.UnusedSymbol as UnusedSymbolMessage import dotty.tools.dotc.typer.ImportInfo import dotty.tools.dotc.util.{Property, SrcPos} import dotty.tools.dotc.core.Mode @@ -57,7 +58,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke override def isRunnable(using Context): Boolean = super.isRunnable && - ctx.settings.Wunused.value.nonEmpty && + ctx.settings.WunusedHas.any && !ctx.isJava // ========== SETUP ============ @@ -134,25 +135,22 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke } override def prepareForDefDef(tree: tpd.DefDef)(using Context): Context = - unusedDataApply{ ud => + unusedDataApply: ud => if !tree.symbol.is(Private) then tree.termParamss.flatten.foreach { p => ud.addIgnoredParam(p.symbol) } - import ud.registerTrivial - tree.registerTrivial + ud.registerTrivial(tree) traverseAnnotations(tree.symbol) ud.registerDef(tree) ud.addIgnoredUsage(tree.symbol) - } override def prepareForTypeDef(tree: tpd.TypeDef)(using Context): Context = - unusedDataApply{ ud => + unusedDataApply: ud => + traverseAnnotations(tree.symbol) if !tree.symbol.is(Param) then // Ignore type parameter (as Scala 2) - traverseAnnotations(tree.symbol) ud.registerDef(tree) ud.addIgnoredUsage(tree.symbol) - } override def prepareForBind(tree: tpd.Bind)(using Context): Context = traverseAnnotations(tree.symbol) @@ -295,21 +293,21 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke res.warnings.toList.sortBy(_.pos.span.point)(using Ordering[Int]).foreach { s => s match case UnusedSymbol(t, _, WarnTypes.Imports) => - report.warning(s"unused import", t) + report.warning(UnusedSymbolMessage.imports, t) case UnusedSymbol(t, _, WarnTypes.LocalDefs) => - report.warning(s"unused local definition", t) + report.warning(UnusedSymbolMessage.localDefs, t) case UnusedSymbol(t, _, WarnTypes.ExplicitParams) => - report.warning(s"unused explicit parameter", t) + report.warning(UnusedSymbolMessage.explicitParams, t) case UnusedSymbol(t, _, WarnTypes.ImplicitParams) => - report.warning(s"unused implicit parameter", t) + report.warning(UnusedSymbolMessage.implicitParams, t) case UnusedSymbol(t, _, WarnTypes.PrivateMembers) => - report.warning(s"unused private member", t) + report.warning(UnusedSymbolMessage.privateMembers, t) case UnusedSymbol(t, _, WarnTypes.PatVars) => - report.warning(s"unused pattern variable", t) + report.warning(UnusedSymbolMessage.patVars, t) case UnusedSymbol(t, _, WarnTypes.UnsetLocals) => - report.warning(s"unset local variable, consider using an immutable val instead", t) + report.warning("unset local variable, consider using an immutable val instead", t) case UnusedSymbol(t, _, WarnTypes.UnsetPrivates) => - report.warning(s"unset private variable, consider using an immutable val instead", t) + report.warning("unset private variable, consider using an immutable val instead", t) } end CheckUnused @@ -624,7 +622,7 @@ object CheckUnused: symbol.name.mangledString.contains("$") /** - * Is the the constructor of synthetic package object + * Is the constructor of synthetic package object * Should be ignored as it is always imported/used in package * Trigger false negative on used import * diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 89161cc8c013..58040c4ef89f 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -11,7 +11,7 @@ import scala.compiletime.uninitialized * with a different context. * * A typical use case is a lazy val in a phase object which exists once per root context where - * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. + * the expression initializing the lazy val depends only on the root context, but not any changes afterwards. */ class CtxLazy[T](expr: Context ?=> T) { private var myValue: T = uninitialized diff --git a/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala new file mode 100644 index 000000000000..1960568dc505 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.typer.Typer + +object DropParentRefinements: + val name: String = "dropParentRefinements" + val description: String = "drop parent refinements from a template" + +/** Drop parent refinements from a template, as they are generated without + * an implementation. These refinements are unusally required for tracked + * members with more specific types. + */ +class DropParentRefinements extends MiniPhase with IdentityDenotTransformer: + thisPhase => + import tpd.* + + override def phaseName: String = DropParentRefinements.name + + override def description: String = DropParentRefinements.description + + override def runsAfterGroupsOf: Set[String] = Set(CountOuterAccesses.name) + + override def changesMembers: Boolean = true // the phase drops parent refinements + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + val newBody = tree.body.filter(!_.hasAttachment(Typer.RefinementFromParent)) + tree.body.foreach { member => + if member.hasAttachment(Typer.RefinementFromParent) then + member.symbol.dropAfter(thisPhase) + } + cpy.Template(tree)(body = newBody) diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala index 0b0906148ba1..2deb50956537 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName object ElimErasedValueType { val name: String = "elimErasedValueType" - val description: String = "expand erased value types to their underlying implmementation types" + val description: String = "expand erased value types to their underlying implementation types" def elimEVT(tp: Type)(using Context): Type = tp match { case ErasedValueType(_, underlying) => diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index a25a2fcb5c6d..7414ca7e69c6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -36,6 +36,7 @@ import ExplicitOuter.* import core.Mode import util.Property import reporting.* +import scala.annotation.tailrec class Erasure extends Phase with DenotTransformer { @@ -764,7 +765,8 @@ object Erasure { (ctx.owner.enclosingPackageClass eq boundary) } - def recur(qual: Tree): Tree = { + @tailrec + def recur(qual: Tree): Tree = val qualIsPrimitive = qual.tpe.widen.isPrimitiveValueType val symIsPrimitive = sym.owner.isPrimitiveValueClass @@ -773,33 +775,34 @@ object Erasure { inContext(preErasureCtx): tree.qualifier.typeOpt.widen.finalResultType) - if (qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType) + if qualIsPrimitive && !symIsPrimitive || qual.tpe.widenDealias.isErasedValueType then recur(box(qual)) - else if (!qualIsPrimitive && symIsPrimitive) + else if !qualIsPrimitive && symIsPrimitive then recur(unbox(qual, sym.owner.typeRef)) - else if (sym.owner eq defn.ArrayClass) + else if sym.owner eq defn.ArrayClass then selectArrayMember(qual, originalQual) - else { - val qual1 = adaptIfSuper(qual) - if (qual1.tpe.derivesFrom(sym.owner) || qual1.isInstanceOf[Super]) - select(qual1, sym) - else - val castTarget = // Avoid inaccessible cast targets, see i8661 - if isJvmAccessible(sym.owner) && sym.owner.isType - then - sym.owner.typeRef - else - // If the owner is inaccessible, try going through the qualifier, - // but be careful to not go in an infinite loop in case that doesn't - // work either. - val tp = originalQual - if tp =:= qual1.tpe.widen then - return errorTree(qual1, - em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") - tp - recur(cast(qual1, castTarget)) - } - } + else + adaptIfSuper(qual) match + case qual1: Super => + select(qual1, sym) + case qual1 if !isJvmAccessible(qual1.tpe.typeSymbol) + || !qual1.tpe.derivesFrom(sym.owner) => + val castTarget = // Avoid inaccessible cast targets, see i8661 + if isJvmAccessible(sym.owner) && sym.owner.isType then + sym.owner.typeRef + else + // If the owner is inaccessible, try going through the qualifier, + // but be careful to not go in an infinite loop in case that doesn't + // work either. + val tp = originalQual + if tp =:= qual1.tpe.widen then + return errorTree(qual1, + em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") + tp + recur(cast(qual1, castTarget)) + case qual1 => + select(qual1, sym) + end recur checkNotErased(recur(qual1)) } @@ -945,6 +948,8 @@ object Erasure { vparams = vparams :+ param if crCount == 1 then meth.rhs.changeOwnerAfter(meth.symbol, sym, erasurePhase) else skipContextClosures(meth.rhs, crCount - 1) + case inlined: Inlined => + skipContextClosures(Inlines.dropInlined(inlined), crCount) var rhs1 = skipContextClosures(ddef.rhs.asInstanceOf[Tree], contextResultCount(sym)) diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index d0e90566f333..67bf1bebed87 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -69,10 +69,12 @@ class ExpandSAMs extends MiniPhase: val tpe1 = collectAndStripRefinements(tpe) val Seq(samDenot) = tpe1.possibleSamMethods cpy.Block(tree)(stats, - AnonClass(List(tpe1), - List(samDenot.symbol.asTerm.name -> fn.symbol.asTerm), - refinements.toList - ) + transformFollowingDeep: + AnonClass(List(tpe1), + List(samDenot.symbol.asTerm.name -> fn.symbol.asTerm), + refinements.toList, + adaptVarargs = true + ) ) } case _ => @@ -94,7 +96,7 @@ class ExpandSAMs extends MiniPhase: * } * ``` * - * is expanded to an anomymous class: + * is expanded to an anonymous class: * * ``` * val x: PartialFunction[A, B] = { diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index 15dfda845389..0db1ddc5750c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -101,7 +101,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => val parentCls = parent.tpe.classSymbol.asClass parent match // if we are in a regular class and first parent is also a regular class, - // make sure we have a contructor + // make sure we have a constructor case parent: TypeTree if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) => New(parent.tpe, Nil).withSpan(impl.span) @@ -454,7 +454,7 @@ object ExplicitOuter { val enclClass = ctx.owner.lexicallyEnclosingClass.asClass val outerAcc = atPhaseNoLater(lambdaLiftPhase) { // lambdalift mangles local class names, which means we cannot - // reliably find outer acessors anymore + // reliably find outer accessors anymore tree match case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) => outerParamAccessor(enclClass) diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index b5bc43ee762c..c66e6b9471cb 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -14,6 +14,7 @@ import Decorators.* import scala.collection.mutable import DenotTransformers.* import NameOps.* +import SymDenotations.SymDenotation import NameKinds.OuterSelectName import StdNames.* import config.Feature @@ -35,22 +36,26 @@ object FirstTransform { * if (true) A else B ==> A * if (false) A else B ==> B */ -class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => +class FirstTransform extends MiniPhase with SymTransformer { thisPhase => import ast.tpd.* override def phaseName: String = FirstTransform.name override def description: String = FirstTransform.description - /** eliminate self symbol in ClassInfo */ - override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { - case tp @ ClassInfo(_, _, _, _, self: Symbol) => - tp.derivedClassInfo(selfInfo = self.info) - case _ => - tp - } - - override protected def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isClass + /** eliminate self symbol in ClassInfo, reset Deferred for @native methods */ + override def transformSym(sym: SymDenotation)(using Context): SymDenotation = + if sym.isClass then + sym.info match + case tp @ ClassInfo(_, _, _, _, self: Symbol) => + val info1 = tp.derivedClassInfo(selfInfo = self.info) + sym.copySymDenotation(info = info1).copyCaches(sym, ctx.phase.next) + case _ => + sym + else if sym.isAllOf(DeferredMethod) && sym.hasAnnotation(defn.NativeAnnot) then + sym.copySymDenotation(initFlags = sym.flags &~ Deferred) + else + sym override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { @@ -121,7 +126,6 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => override def transformDefDef(ddef: DefDef)(using Context): Tree = val meth = ddef.symbol.asTerm if meth.hasAnnotation(defn.NativeAnnot) then - meth.resetFlag(Deferred) DefDef(meth, _ => ref(defn.Sys_error.termRef).withSpan(ddef.span) .appliedTo(Literal(Constant(s"native method stub")))) diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 217c843c4e50..1798d938272c 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -19,6 +19,7 @@ import config.Printers.transforms import reporting.trace import java.lang.StringBuilder +import scala.annotation.tailrec import scala.collection.mutable.ListBuffer /** Helper object to generate generic java signatures, as defined in @@ -64,7 +65,7 @@ object GenericSignatures { ps.foreach(boxedSig) } - def boxedSig(tp: Type): Unit = jsig(tp.widenDealias, primitiveOK = false) + def boxedSig(tp: Type): Unit = jsig(tp.widenDealias, unboxedVCs = false) /** The signature of the upper-bound of a type parameter. * @@ -232,7 +233,7 @@ object GenericSignatures { } @noinline - def jsig(tp0: Type, toplevel: Boolean = false, primitiveOK: Boolean = true): Unit = { + def jsig(tp0: Type, toplevel: Boolean = false, unboxedVCs: Boolean = true): Unit = { val tp = tp0.dealias tp match { @@ -241,7 +242,7 @@ object GenericSignatures { val erasedUnderlying = fullErasure(ref.underlying.bounds.hi) // don't emit type param name if the param is upper-bounded by a primitive type (including via a value class) if erasedUnderlying.isPrimitiveValueType then - jsig(erasedUnderlying, toplevel, primitiveOK) + jsig(erasedUnderlying, toplevel, unboxedVCs) else typeParamSig(ref.paramName.lastPart) case defn.ArrayOf(elemtp) => @@ -269,15 +270,14 @@ object GenericSignatures { else if (sym == defn.NullClass) builder.append("Lscala/runtime/Null$;") else if (sym.isPrimitiveValueClass) - if (!primitiveOK) jsig(defn.ObjectType) + if (!unboxedVCs) jsig(defn.ObjectType) else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) else if (sym.isDerivedValueClass) { - val erasedUnderlying = fullErasure(tp) - if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) - classSig(sym, pre, args) - else - jsig(erasedUnderlying, toplevel, primitiveOK) + if (unboxedVCs) { + val erasedUnderlying = fullErasure(tp) + jsig(erasedUnderlying, toplevel) + } else classSig(sym, pre, args) } else if (defn.isSyntheticFunctionClass(sym)) { val erasedSym = defn.functionTypeErasure(sym).typeSymbol @@ -286,7 +286,7 @@ object GenericSignatures { else if sym.isClass then classSig(sym, pre, args) else - jsig(erasure(tp), toplevel, primitiveOK) + jsig(erasure(tp), toplevel, unboxedVCs) case ExprType(restpe) if toplevel => builder.append("()") @@ -295,36 +295,13 @@ object GenericSignatures { case ExprType(restpe) => jsig(defn.FunctionType(0).appliedTo(restpe)) - case PolyType(tparams, mtpe: MethodType) => - assert(tparams.nonEmpty) + case mtd: MethodOrPoly => + val (tparams, vparams, rte) = collectMethodParams(mtd) if (toplevel && !sym0.isConstructor) polyParamSig(tparams) - jsig(mtpe) - - // Nullary polymorphic method - case PolyType(tparams, restpe) => - assert(tparams.nonEmpty) - if (toplevel) polyParamSig(tparams) - builder.append("()") - methodResultSig(restpe) - - case mtpe: MethodType => - // erased method parameters do not make it to the bytecode. - def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { - case t: MethodType if t.hasErasedParams => - t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } - :: effectiveParamInfoss(t.resType) - case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) - case _ => Nil - } - val params = effectiveParamInfoss(mtpe).flatten - val restpe = mtpe.finalResultType builder.append('(') - // TODO: Update once we support varargs - params.foreach { tp => - jsig(tp) - } + for vparam <- vparams do jsig(vparam) builder.append(')') - methodResultSig(restpe) + methodResultSig(rte) case tp: AndType => // Only intersections appearing as the upper-bound of a type parameter @@ -339,7 +316,7 @@ object GenericSignatures { val (reprParents, _) = splitIntersection(parents) val repr = reprParents.find(_.typeSymbol.is(TypeParam)).getOrElse(reprParents.head) - jsig(repr, primitiveOK = primitiveOK) + jsig(repr, unboxedVCs = unboxedVCs) case ci: ClassInfo => val tParams = tp.typeParams @@ -347,15 +324,15 @@ object GenericSignatures { superSig(ci.typeSymbol, ci.parents) case AnnotatedType(atp, _) => - jsig(atp, toplevel, primitiveOK) + jsig(atp, toplevel, unboxedVCs) case hktl: HKTypeLambda => - jsig(hktl.finalResultType, toplevel, primitiveOK) + jsig(hktl.finalResultType, toplevel, unboxedVCs) case _ => val etp = erasure(tp) if (etp eq tp) throw new UnknownSig - else jsig(etp, toplevel, primitiveOK) + else jsig(etp, toplevel, unboxedVCs) } } val throwsArgs = sym0.annotations flatMap ThrownException.unapply @@ -476,4 +453,23 @@ object GenericSignatures { } else x } + + private def collectMethodParams(mtd: MethodOrPoly)(using Context): (List[TypeParamInfo], List[Type], Type) = + val tparams = ListBuffer.empty[TypeParamInfo] + val vparams = ListBuffer.empty[Type] + + @tailrec def recur(tpe: Type): Type = tpe match + case mtd: MethodType => + vparams ++= mtd.paramInfos.filterNot(_.hasAnnotation(defn.ErasedParamAnnot)) + recur(mtd.resType) + case PolyType(tps, tpe) => + tparams ++= tps + recur(tpe) + case _ => + tpe + end recur + + val rte = recur(mtd) + (tparams.toList, vparams.toList, rte) + end collectMethodParams } diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index 43289209d146..a58dffa04223 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -103,7 +103,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => override def transformValDef(tree: ValDef)(using Context): Tree = val sym = tree.symbol if !sym.is(Method) then return tree - val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span) + val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span).withAttachmentsFrom(tree) if !sym.is(Mutable) then return getterDef ensureSetter(sym.asTerm) if !newSetters.contains(sym.setter) then return getterDef diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala index 18333ae506fd..d2a72e10fcfc 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala @@ -60,9 +60,11 @@ class InlinePatterns extends MiniPhase: template.body match case List(ddef @ DefDef(`name`, _, _, _)) => val bindings = new ListBuffer[DefTree]() - val expansion1 = BetaReduce.reduceApplication(ddef, argss, bindings) - val bindings1 = bindings.result() - seq(bindings1, expansion1) + BetaReduce.reduceApplication(ddef, argss, bindings) match + case Some(expansion1) => + val bindings1 = bindings.result() + seq(bindings1, expansion1) + case None => tree case _ => tree case _ => tree diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 335d5a38931a..751636c7d806 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -36,13 +36,7 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { override def run(using Context): Unit = if ctx.compilationUnit.needsInlining || ctx.compilationUnit.hasMacroAnnotations then - try super.run - catch case _: CompilationUnit.SuspendException => () - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - val newUnits = super.runOn(units).filterNot(_.suspended) - ctx.run.nn.checkSuspendedUnits(newUnits) - newUnits + super.run override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index 6625190661e3..fd901032de4b 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -127,6 +127,8 @@ object MacroAnnotations: // Replace this case with the nested cases. case ex0: InvocationTargetException => ex0.getCause match + case ex: CompilationUnit.SuspendException => + throw ex case ex: scala.quoted.runtime.StopMacroExpansion => if !ctx.reporter.hasErrors then report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) @@ -137,7 +139,7 @@ object MacroAnnotations: val stack0 = ex.getStackTrace.takeWhile(_.getClassName != this.getClass().getName()) val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) val msg = - em"""Failed to evaluate macro. + em"""Failed to evaluate macro annotation '$annot'. | Caused by ${ex.getClass}: ${if (ex.getMessage == null) "" else ex.getMessage} | ${stack.mkString("\n ")} |""" diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 4020291dded0..6529eed77fa0 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -34,7 +34,9 @@ object OverridingPairs: */ protected def exclude(sym: Symbol): Boolean = !sym.memberCanMatchInheritedSymbols - || isCaptureChecking && sym.is(Recheck.ResetPrivate) + || isCaptureChecking && atPhase(ctx.phase.prev)(sym.is(Private)) + // for capture checking we drop the private flag of certain parameter accessors + // but these still need no overriding checks /** The parents of base that are checked when deciding whether an overriding * pair has already been treated in a parent class. diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 0b8507f3b6c7..9750c41b7252 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -35,13 +35,6 @@ class PatternMatcher extends MiniPhase { override def runsAfter: Set[String] = Set(ElimRepeated.name) - private val InInlinedCode = new util.Property.Key[Boolean] - private def inInlinedCode(using Context) = ctx.property(InInlinedCode).getOrElse(false) - - override def prepareForInlined(tree: Inlined)(using Context): Context = - if inInlinedCode then ctx - else ctx.fresh.setProperty(InInlinedCode, true) - override def transformMatch(tree: Match)(using Context): Tree = if (tree.isInstanceOf[InlineMatch]) tree else { @@ -53,7 +46,8 @@ class PatternMatcher extends MiniPhase { case rt => tree.tpe val translated = new Translator(matchType, this).translateMatch(tree) - if !inInlinedCode then + // Skip analysis on inlined code (eg pos/i19157) + if !tpd.enclosingInlineds.nonEmpty then // check exhaustivity and unreachability SpaceEngine.checkMatch(tree) @@ -814,11 +808,11 @@ object PatternMatcher { */ private def collectSwitchCases(scrutinee: Tree, plan: SeqPlan): List[(List[Tree], Plan)] = { def isSwitchableType(tpe: Type): Boolean = - (tpe isRef defn.IntClass) || - (tpe isRef defn.ByteClass) || - (tpe isRef defn.ShortClass) || - (tpe isRef defn.CharClass) || - (tpe isRef defn.StringClass) + (tpe <:< defn.IntType) || + (tpe <:< defn.ByteType) || + (tpe <:< defn.ShortType) || + (tpe <:< defn.CharType) || + (tpe <:< defn.StringType) val seen = mutable.Set[Any]() @@ -868,7 +862,7 @@ object PatternMatcher { (Nil, plan) :: Nil } - if (isSwitchableType(scrutinee.tpe.widen)) recur(plan) + if (isSwitchableType(scrutinee.tpe)) recur(plan) else Nil } @@ -889,8 +883,8 @@ object PatternMatcher { */ val (primScrutinee, scrutineeTpe) = - if (scrutinee.tpe.widen.isRef(defn.IntClass)) (scrutinee, defn.IntType) - else if (scrutinee.tpe.widen.isRef(defn.StringClass)) (scrutinee, defn.StringType) + if (scrutinee.tpe <:< defn.IntType) (scrutinee, defn.IntType) + else if (scrutinee.tpe <:< defn.StringType) (scrutinee, defn.StringType) else (scrutinee.select(nme.toInt), defn.IntType) def primLiteral(lit: Tree): Tree = diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index 6c3dcc669877..c8c071064ab8 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -44,7 +44,7 @@ object Pickler { */ inline val ParallelPickling = true - /**A holder for syncronization points and reports when writing TASTy asynchronously. + /**A holder for synchronization points and reports when writing TASTy asynchronously. * The callbacks should only be called once. */ class AsyncTastyHolder private ( @@ -322,7 +322,7 @@ class Pickler extends Phase { if tree.span.exists then val reference = ctx.settings.sourceroot.value PositionPickler.picklePositions( - pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, reference, + pickler, treePkl.buf.addrOfTree, treePkl.treeAnnots, treePkl.typeAnnots, reference, unit.source, tree :: Nil, positionWarnings, scratch.positionBuffer, scratch.pickledIndices) diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index c6ad1bb860e8..0feee53ca50f 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -303,20 +303,19 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if !tree.symbol.is(Package) then tree else errorTree(tree, em"${tree.symbol} cannot be used as a type") - // Cleans up retains annotations in inferred type trees. This is needed because - // during the typer, it is infeasible to correctly infer the capture sets in most - // cases, resulting ill-formed capture sets that could crash the pickler later on. - // See #20035. - private def cleanupRetainsAnnot(symbol: Symbol, tpt: Tree)(using Context): Tree = + /** Make result types of ValDefs and DefDefs that override some other definitions + * declared types rather than InferredTypes. This is necessary since we otherwise + * clean retains annotations from such types. But for an overriding symbol the + * retains annotations come from the explicitly declared parent types, so should + * be kept. + */ + private def makeOverrideTypeDeclared(symbol: Symbol, tpt: Tree)(using Context): Tree = tpt match case tpt: InferredTypeTree - if !symbol.allOverriddenSymbols.hasNext => - // if there are overridden symbols, the annotation comes from an explicit type of the overridden symbol - // and should be retained. - val tm = new CleanupRetains - val tpe1 = tm(tpt.tpe) - tpt.withType(tpe1) - case _ => tpt + if symbol.allOverriddenSymbols.hasNext => + TypeTree(tpt.tpe, inferred = false).withSpan(tpt.span).withAttachmentsFrom(tpt) + case _ => + tpt override def transform(tree: Tree)(using Context): Tree = try tree match { @@ -432,7 +431,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) - val tree1 = cpy.ValDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.ValDef(tree)(tpt = makeOverrideTypeDeclared(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) @@ -441,7 +440,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) annotateContextResults(tree) - val tree1 = cpy.DefDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.DefDef(tree)(tpt = makeOverrideTypeDeclared(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => registerIfHasMacroAnnotations(tree) @@ -524,12 +523,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => report.error(em"type ${alias.tpe} outside bounds $bounds", tree.srcPos) super.transform(tree) case tree: TypeTree => - tree.withType( - tree.tpe match { - case AnnotatedType(tpe, annot) => AnnotatedType(tpe, transformAnnot(annot)) - case tpe => tpe - } - ) + val tpe = if tree.isInferred then CleanupRetains()(tree.tpe) else tree.tpe + tree.withType: + tpe match + case AnnotatedType(parent, annot) => + AnnotatedType(parent, transformAnnot(annot)) // TODO: Also map annotations embedded in type? + case _ => tpe case Typed(Ident(nme.WILDCARD), _) => withMode(Mode.Pattern)(super.transform(tree)) // The added mode signals that bounds in a pattern need not diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 482da0edb82b..359ec701d164 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -44,7 +44,10 @@ object ProtectedAccessors { /** Do we need a protected accessor for accessing sym from the current context's owner? */ def needsAccessor(sym: Symbol)(using Context): Boolean = needsAccessorIfNotInSubclass(sym) && - !ctx.owner.enclosingClass.derivesFrom(sym.owner) + !needsAccessorIsSubclass(sym) + + def needsAccessorIsSubclass(sym: Symbol)(using Context): Boolean = + ctx.owner.enclosingClass.derivesFrom(sym.owner) } class ProtectedAccessors extends MiniPhase { diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index f809fbd176ce..8df9e5966920 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -12,7 +12,7 @@ import DenotTransformers.{DenotTransformer, IdentityDenotTransformer, SymTransfo import NamerOps.linkConstructorParams import NullOpsDecorator.stripNull import typer.ErrorReporting.err -import typer.ProtoTypes.* +import typer.ProtoTypes.{AnySelectionProto, LhsProto} import typer.TypeAssigner.seqLitType import typer.ConstFold import typer.ErrorReporting.{Addenda, NothingToAdd} @@ -23,27 +23,23 @@ import reporting.trace import annotation.constructorOnly import cc.CaptureSet.IdempotentCaptRefMap import annotation.tailrec +import dotty.tools.dotc.cc.boxed object Recheck: import tpd.* - /** A flag used to indicate that a ParamAccessor has been temporarily made not-private - * Only used at the start of the Recheck phase, reset at its end. - * The flag repurposes the Scala2ModuleVar flag. No confusion is possible since - * Scala2ModuleVar cannot be also ParamAccessors. - */ - val ResetPrivate = Scala2ModuleVar - val ResetPrivateParamAccessor = ResetPrivate | ParamAccessor - /** Attachment key for rechecked types of TypeTrees */ val RecheckedType = Property.Key[Type] val addRecheckedTypes = new TreeMap: override def transform(tree: Tree)(using Context): Tree = - val tree1 = super.transform(tree) - tree.getAttachment(RecheckedType) match - case Some(tpe) => tree1.withType(tpe) - case None => tree1 + try + val tree1 = super.transform(tree) + tree.getAttachment(RecheckedType) match + case Some(tpe) => tree1.withType(tpe) + case None => tree1 + catch + case _:TypeError => tree extension (sym: Symbol)(using Context) @@ -210,11 +206,12 @@ abstract class Recheck extends Phase, SymTransformer: tree.tpe def recheckSelect(tree: Select, pt: Type)(using Context): Type = - val Select(qual, name) = tree - val proto = - if tree.symbol == defn.Any_asInstanceOf then WildcardType - else AnySelectionProto - recheckSelection(tree, recheck(qual, proto).widenIfUnstable, name, pt) + recheckSelection(tree, + recheck(tree.qualifier, selectionProto(tree, pt)).widenIfUnstable, + tree.name, pt) + + def selectionProto(tree: Select, pt: Type)(using Context): Type = + if tree.symbol == defn.Any_asInstanceOf then WildcardType else AnySelectionProto def recheckSelection(tree: Select, qualType: Type, name: Name, sharpen: Denotation => Denotation)(using Context): Type = @@ -272,7 +269,7 @@ abstract class Recheck extends Phase, SymTransformer: def recheckClassDef(tree: TypeDef, impl: Template, sym: ClassSymbol)(using Context): Type = recheck(impl.constr) - impl.parentsOrDerived.foreach(recheck(_)) + impl.parents.foreach(recheck(_)) recheck(impl.self) recheckStats(impl.body) sym.typeRef @@ -297,8 +294,26 @@ abstract class Recheck extends Phase, SymTransformer: /** A hook to massage the type of an applied method; currently not overridden */ protected def prepareFunction(funtpe: MethodType, meth: Symbol)(using Context): MethodType = funtpe + protected def recheckArg(arg: Tree, formal: Type)(using Context): Type = + recheck(arg, formal) + + /** A hook to check all the parts of an application: + * @param tree the application `fn(args)` + * @param qualType if the `fn` is a select `q.m`, the type of the qualifier `q`, + * otherwise NoType + * @param funType the method type of `fn` + * @param argTypes the types of the arguments + */ + protected def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = + constFold(tree, instantiate(funType, argTypes, tree.fun.symbol)) + def recheckApply(tree: Apply, pt: Type)(using Context): Type = - val funtpe0 = recheck(tree.fun) + val (funtpe0, qualType) = tree.fun match + case fun: Select => + val qualType = recheck(fun.qualifier, selectionProto(fun, WildcardType)).widenIfUnstable + (recheckSelection(fun, qualType, fun.name, WildcardType), qualType) + case _ => + (recheck(tree.fun), NoType) // reuse the tree's type on signature polymorphic methods, instead of using the (wrong) rechecked one val funtpe1 = if tree.fun.symbol.originalSignaturePolymorphic.exists then tree.fun.tpe else funtpe0 funtpe1.widen match @@ -311,7 +326,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheck(arg, normalizeByName(formals.head)) + val argType = recheckArg(arg, normalizeByName(formals.head)) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -321,7 +336,7 @@ abstract class Recheck extends Phase, SymTransformer: assert(formals.isEmpty) Nil val argTypes = recheckArgs(tree.args, formals, fntpe.paramRefs) - constFold(tree, instantiate(fntpe, argTypes, tree.fun.symbol)) + recheckApplication(tree, qualType, fntpe1, argTypes) //.showing(i"typed app $tree : $fntpe with ${tree.args}%, % : $argTypes%, % = $result") case tp => assert(false, i"unexpected type of ${tree.fun}: $tp") @@ -426,12 +441,16 @@ abstract class Recheck extends Phase, SymTransformer: val finalizerType = recheck(tree.finalizer, defn.UnitType) TypeComparer.lub(bodyType :: casesTypes) + def seqLiteralElemProto(tree: SeqLiteral, pt: Type, declared: Type)(using Context): Type = + declared.orElse: + pt.stripNull().elemType match + case NoType => WildcardType + case bounds: TypeBounds => WildcardType(bounds) + case elemtp => elemtp + def recheckSeqLiteral(tree: SeqLiteral, pt: Type)(using Context): Type = - val elemProto = pt.stripNull().elemType match - case NoType => WildcardType - case bounds: TypeBounds => WildcardType(bounds) - case elemtp => elemtp val declaredElemType = recheck(tree.elemtpt) + val elemProto = seqLiteralElemProto(tree, pt, declaredElemType) val elemTypes = tree.elems.map(recheck(_, elemProto)) seqLitType(tree, TypeComparer.lub(declaredElemType :: elemTypes)) @@ -462,12 +481,16 @@ abstract class Recheck extends Phase, SymTransformer: case _ => traverse(stats) + /** A hook to prevent rechecking a ValDef or DefDef. + * Typycally used when definitions are completed on first use. + */ + def skipRecheck(sym: Symbol)(using Context) = false + def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type = - inContext(ctx.localContext(tree, sym)) { + inContext(ctx.localContext(tree, sym)): tree match case tree: ValDef => recheckValDef(tree, sym) case tree: DefDef => recheckDefDef(tree, sym) - } /** Recheck tree without adapting it, returning its new type. * @param tree the original tree @@ -484,10 +507,8 @@ abstract class Recheck extends Phase, SymTransformer: case tree: ValOrDefDef => if tree.isEmpty then NoType else - if sym.isUpdatedAfter(preRecheckPhase) then - sym.ensureCompleted() // in this case the symbol's completer should recheck the right hand side - else - recheckDef(tree, sym) + sym.ensureCompleted() + if !skipRecheck(sym) then recheckDef(tree, sym) sym.termRef case tree: TypeDef => // TODO: Should we allow for completers as for ValDefs or DefDefs? diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala index 90c5ac85167c..f1603db0e5a0 100644 --- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala @@ -51,7 +51,7 @@ trait ReifiedReflect: .select(defn.Quotes_reflect_TypeApply_apply) .appliedTo(fn, argTrees) - /** Create tree for `quotes.reflect.Assing(, )` */ + /** Create tree for `quotes.reflect.Assign(, )` */ def Assign(lhs: Tree, rhs: Tree)(using Context) = self.select(defn.Quotes_reflect_Assign) .select(defn.Quotes_reflect_Assign_apply) diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 6dc718ef526b..36a40658ffa5 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.MegaPhase.* * Otherwise, the backend needs to be aware that some qualifiers need to be * dropped. * - * A tranformation similar to what this phase does seems to be performed by + * A transformation similar to what this phase does seems to be performed by * flatten in nsc. * * The side effects of the qualifier of a dropped `Select` is normally diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index e42f997e7265..b5386d5bd1df 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -47,13 +47,14 @@ object Splicer { def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { case Quote(quotedTree, Nil) => quotedTree case _ => - val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) + val owner = ctx.owner + val macroOwner = newSymbol(owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try val sliceContext = SpliceScope.contextWithNewSpliceScope(splicePos.sourcePos).withOwner(macroOwner) inContext(sliceContext) { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) - try { + try ctx.profiler.onMacroSplice(owner){ val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index ce2b8fa591d8..5a63235fc3c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -195,7 +195,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { * Otherwise, we need to go through an accessor, * which the implementing class will provide an implementation for. */ - if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if ProtectedAccessors.needsAccessorIsSubclass(sym) then if sym.is(JavaDefined) then report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) sel diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 43c740ce7d38..b8052721ff27 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -429,8 +429,21 @@ class TailRec extends MiniPhase { assert(false, "We should never have gotten inside a pattern") tree - case tree: ValOrDefDef => - if (isMandatory) noTailTransform(tree.rhs) + case tree: ValDef => + // This could contain a return statement in a code block, so we do have to go into it. + cpy.ValDef(tree)(rhs = noTailTransform(tree.rhs)) + + case tree: DefDef => + if (isMandatory) + if (tree.symbol.is(Synthetic)) + noTailTransform(tree.rhs) + else + // We can't tail recurse through nested definitions, so don't want to propagate to child nodes + // We don't want to fail if there is a call that would recurse (as this would be a non self recurse), so don't + // want to call noTailTransform + // We can however warn in this case, as its likely in this situation that someone would expect a tail + // recursion optimization and enabling this to optimise would be a simple case of inlining the inner method + new NestedTailRecAlerter(method, tree.symbol).traverse(tree) tree case _: Super | _: This | _: Literal | _: TypeTree | _: TypeDef | EmptyTree => @@ -446,7 +459,8 @@ class TailRec extends MiniPhase { case Return(expr, from) => val fromSym = from.symbol - val inTailPosition = !fromSym.is(Label) || tailPositionLabeledSyms.contains(fromSym) + val inTailPosition = tailPositionLabeledSyms.contains(fromSym) // Label returns are only tail if the label is in tail position + || (fromSym eq method) // Method returns are only tail if we are looking at the original method cpy.Return(tree)(transform(expr, inTailPosition), from) case _ => @@ -454,6 +468,19 @@ class TailRec extends MiniPhase { } } } + + class NestedTailRecAlerter(method: Symbol, inner: Symbol) extends TreeTraverser { + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match { + case a: Apply => + if (a.fun.symbol eq method) { + report.warning(new TailrecNestedCall(method, inner), a.srcPos) + } + traverseChildren(tree) + case _ => + traverseChildren(tree) + } + } } object TailRec { diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index c4e1c7892e8d..c35dc80c04a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -236,7 +236,7 @@ object TreeChecker { private[TreeChecker] def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) - class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { + class Checker(phasesToCheck: Seq[Phase]) extends ReTyper { import ast.tpd.* protected val nowDefinedSyms = util.HashSet[Symbol]() @@ -432,19 +432,8 @@ object TreeChecker { promote(tree) case _ => val tree1 = super.typedUnadapted(tree, pt, locked) - def isSubType(tp1: Type, tp2: Type) = - (tp1 eq tp2) || // accept NoType / NoType - (tp1 <:< tp2) - def divergenceMsg(tp1: Type, tp2: Type) = - s"""Types differ - |Original type : ${tree.typeOpt.show} - |After checking: ${tree1.tpe.show} - |Original tree : ${tree.show} - |After checking: ${tree1.show} - |Why different : - """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) - if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted - assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) + if tree.hasType then // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted + checkType(tree1.tpe, tree.typeOpt, tree, "typedUnadapted") tree1 checkNoOrphans(res.tpe) phasesToCheck.foreach(_.checkPostCondition(res)) @@ -824,16 +813,20 @@ object TreeChecker { && !isPrimaryConstructorReturn && !pt.isInstanceOf[FunOrPolyProto] then - assert(tree.tpe <:< pt, { - val mismatch = TypeMismatch(tree.tpe, pt, Some(tree)) - i"""|Type Mismatch: - |${mismatch.message} - |tree = $tree ${tree.className}""".stripMargin - }) + checkType(tree.tpe, pt, tree, "adapt") tree } override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree + + private def checkType(tp1: Type, tp2: Type, tree: untpd.Tree, step: String)(using Context) = + // Accept NoType <:< NoType as true + assert((tp1 eq tp2) || (tp1 <:< tp2), { + val mismatch = TypeMismatch(tp1, tp2, None) + i"""|Type Mismatch (while checking $step): + |${mismatch.message}${mismatch.explanation} + |tree = $tree ${tree.className}""".stripMargin + }) } /** Tree checker that can be applied to a local tree. */ diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 082c239c6443..c1dd6bc6509e 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -256,7 +256,8 @@ object TypeTestsCasts { else foundClasses.exists(check) end checkSensical - if (expr.tpe <:< testType) && inMatch then + val tp = if expr.tpe.isPrimitiveValueType then defn.boxedType(expr.tpe) else expr.tpe + if tp <:< testType && inMatch then if expr.tpe.isNotNull then constant(expr, Literal(Constant(true))) else expr.testNotNull else { diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 9e78bd5474a3..4d5c467cf4fe 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -29,7 +29,7 @@ class Checker extends Phase: override val runsAfter = Set(Pickler.name) override def isEnabled(using Context): Boolean = - super.isEnabled && (ctx.settings.WcheckInit.value || ctx.settings.YcheckInitGlobal.value) + super.isEnabled && (ctx.settings.Whas.checkInit || ctx.settings.YcheckInitGlobal.value) def traverse(traverser: InitTreeTraverser)(using Context): Boolean = monitor(phaseName): val unit = ctx.compilationUnit @@ -50,7 +50,7 @@ class Checker extends Phase: cancellable { val classes = traverser.getClasses() - if ctx.settings.WcheckInit.value then + if ctx.settings.Whas.checkInit then Semantic.checkClasses(classes)(using checkCtx) if ctx.settings.YcheckInitGlobal.value then diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 52e90c0857ed..52760cf8b6c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -29,7 +29,6 @@ import scala.collection.mutable import scala.annotation.tailrec import scala.annotation.constructorOnly import dotty.tools.dotc.core.Flags.AbstractOrTrait -import Decorators.* /** Check initialization safety of static objects * @@ -524,6 +523,8 @@ class Objects(using Context @constructorOnly): def getHeapData()(using mutable: MutableData): Data = mutable.heap + def setHeap(newHeap: Data)(using mutable: MutableData): Unit = mutable.heap = newHeap + /** Cache used to terminate the check */ object Cache: case class Config(thisV: Value, env: Env.Data, heap: Heap.Data) @@ -539,6 +540,7 @@ class Objects(using Context @constructorOnly): val result = super.cachedEval(config, expr, cacheResult, default = Res(Bottom, Heap.getHeapData())) { expr => Res(fun(expr), Heap.getHeapData()) } + Heap.setHeap(result.heap) result.value end Cache @@ -703,6 +705,9 @@ class Objects(using Context @constructorOnly): val arr = OfArray(State.currentObject, summon[Regions.Data]) Heap.writeJoin(arr.addr, args.map(_.value).join) arr + else if target.equals(defn.Predef_classOf) then + // Predef.classOf is a stub method in tasty and is replaced in backend + Bottom else if target.hasSource then val cls = target.owner.enclosingClass.asClass val ddef = target.defTree.asInstanceOf[DefDef] @@ -865,7 +870,7 @@ class Objects(using Context @constructorOnly): Bottom case Bottom => - if field.isStaticObject then ObjectRef(field.moduleClass.asClass) + if field.isStaticObject then accessObject(field.moduleClass.asClass) else Bottom case ValueSet(values) => @@ -909,7 +914,10 @@ class Objects(using Context @constructorOnly): Bottom } - /** Handle new expression `new p.C(args)`. + /** + * Handle new expression `new p.C(args)`. + * The actual instance might be cached without running the constructor. + * See tests/init-global/pos/cache-constructor.scala * * @param outer The value for `p`. * @param klass The symbol of the class `C`. @@ -951,7 +959,6 @@ class Objects(using Context @constructorOnly): val instance = OfClass(klass, outerWidened, ctor, args.map(_.value), envWidened) callConstructor(instance, ctor, args) - instance case ValueSet(values) => values.map(ref => instantiate(ref, klass, ctor, args)).join @@ -1225,11 +1232,12 @@ class Objects(using Context @constructorOnly): extendTrace(id) { evalType(prefix, thisV, klass) } val value = eval(rhs, thisV, klass) + val widened = widenEscapedValue(value, rhs) if isLocal then - writeLocal(thisV, lhs.symbol, value) + writeLocal(thisV, lhs.symbol, widened) else - withTrace(trace2) { assign(receiver, lhs.symbol, value, rhs.tpe) } + withTrace(trace2) { assign(receiver, lhs.symbol, widened, rhs.tpe) } case closureDef(ddef) => Fun(ddef, thisV, klass, summon[Env.Data]) @@ -1486,12 +1494,12 @@ class Objects(using Context @constructorOnly): if isWildcardStarArgList(pats) then if pats.size == 1 then // call .toSeq - val toSeqDenot = scrutineeType.member(nme.toSeq).suchThat(_.info.isParameterless) + val toSeqDenot = getMemberMethod(scrutineeType, nme.toSeq, toSeqType(elemType)) val toSeqRes = call(scrutinee, toSeqDenot.symbol, Nil, scrutineeType, superType = NoType, needResolve = true) evalPattern(toSeqRes, pats.head) else // call .drop - val dropDenot = getMemberMethod(scrutineeType, nme.drop, applyType(elemType)) + val dropDenot = getMemberMethod(scrutineeType, nme.drop, dropType(elemType)) val dropRes = call(scrutinee, dropDenot.symbol, ArgInfo(Bottom, summon[Trace], EmptyTree) :: Nil, scrutineeType, superType = NoType, needResolve = true) for pat <- pats.init do evalPattern(applyRes, pat) evalPattern(dropRes, pats.last) @@ -1567,6 +1575,36 @@ class Objects(using Context @constructorOnly): throw new Exception("unexpected type: " + tp + ", Trace:\n" + Trace.show) } + /** Widen the escaped value (a method argument or rhs of an assignment) + * + * The default widening is 1 for most values, 2 for function values. + * User-specified widening annotations are repected. + */ + def widenEscapedValue(value: Value, annotatedTree: Tree): Contextual[Value] = + def parseAnnotation: Option[Int] = + annotatedTree.tpe.getAnnotation(defn.InitWidenAnnot).flatMap: annot => + annot.argument(0).get match + case arg @ Literal(c: Constants.Constant) => + val height = c.intValue + if height < 0 then + report.warning("The argument should be positive", arg) + None + else + Some(height) + case arg => + report.warning("The argument should be a constant integer value", arg) + None + end parseAnnotation + + parseAnnotation match + case Some(i) => + value.widen(i) + + case None => + if value.isInstanceOf[Fun] + then value.widen(2) + else value.widen(1) + /** Evaluate arguments of methods and constructors */ def evalArgs(args: List[Arg], thisV: ThisValue, klass: ClassSymbol): Contextual[List[ArgInfo]] = val argInfos = new mutable.ArrayBuffer[ArgInfo] @@ -1577,23 +1615,7 @@ class Objects(using Context @constructorOnly): else eval(arg.tree, thisV, klass) - val widened = - arg.tree.tpe.getAnnotation(defn.InitWidenAnnot) match - case Some(annot) => - annot.argument(0).get match - case arg @ Literal(c: Constants.Constant) => - val height = c.intValue - if height < 0 then - report.warning("The argument should be positive", arg) - res.widen(1) - else - res.widen(c.intValue) - case arg => - report.warning("The argument should be a constant integer value", arg) - res.widen(1) - case _ => - if res.isInstanceOf[Fun] then res.widen(2) else res.widen(1) - + val widened = widenEscapedValue(res, arg.tree) argInfos += ArgInfo(widened, trace.add(arg.tree), arg.tree) } argInfos.toList diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index caf3435608d2..85b2764ff0f3 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -548,9 +548,23 @@ object Semantic: value.promote(msg) value + def filterClass(sym: Symbol)(using Context): Value = + if !sym.isClass then value + else + val klass = sym.asClass + value match + case Cold => Cold + case Hot => Hot + case ref: Ref => if ref.klass.isSubClass(klass) then ref else Hot + case RefSet(values) => values.map(v => v.filterClass(klass)).join + case fun: Fun => + if klass.isOneOf(Flags.AbstractOrTrait) && klass.baseClasses.exists(defn.isFunctionClass) + then fun + else Hot + def select(field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value, printer, (_: Value).show) { if promoted.isCurrentObjectPromoted then Hot - else value match + else value.filterClass(field.owner) match case Hot => Hot @@ -588,13 +602,8 @@ object Semantic: reporter.report(error) Hot else - if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) - Hot - else - // This is possible due to incorrect type cast. - // See tests/init/pos/Type.scala - Hot + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", field = " + field.show + Trace.show, Trace.position) + Hot case fun: Fun => report.warning("[Internal error] unexpected tree in selecting a function, fun = " + fun.expr.show + Trace.show, fun.expr) @@ -645,11 +654,16 @@ object Semantic: } (errors, allArgsHot) + def filterValue(value: Value): Value = + // methods of polyfun does not have denotation + if !meth.exists then value + else value.filterClass(meth.owner) + // fast track if the current object is already initialized if promoted.isCurrentObjectPromoted then Hot else if isAlwaysSafe(meth) then Hot else if meth eq defn.Any_asInstanceOf then value - else value match { + else filterValue(value) match { case Hot => if isSyntheticApply(meth) && meth.hasSource then val klass = meth.owner.companionClass.asClass @@ -724,13 +738,8 @@ object Semantic: else value.select(target, receiver, needResolve = false) else - if ref.klass.isSubClass(receiver.widenSingleton.classSymbol) then - report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) - Hot - else - // This is possible due to incorrect type cast. - // See tests/init/pos/Type.scala - Hot + report.warning("[Internal error] Unexpected resolution failure: ref.klass = " + ref.klass.show + ", meth = " + meth.show + Trace.show, Trace.position) + Hot case Fun(body, thisV, klass) => // meth == NoSymbol for poly functions @@ -822,7 +831,7 @@ object Semantic: warm if promoted.isCurrentObjectPromoted then Hot - else value match { + else value.filterClass(klass.owner) match { case Hot => var allHot = true val args2 = args.map { arg => diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index e1603761f08b..1ee402deded0 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -3,15 +3,18 @@ package dotc package transform package patmat -import core.*, Constants.*, Contexts.*, Decorators.*, Flags.*, Names.*, NameOps.*, StdNames.*, Symbols.*, Types.* +import core.* +import Constants.*, Contexts.*, Decorators.*, Flags.*, NullOpsDecorator.*, Symbols.*, Types.* +import Names.*, NameOps.*, StdNames.* import ast.*, tpd.* -import config.Printers.* +import config.Printers.exhaustivity import printing.{ Printer, * }, Texts.* import reporting.* import typer.*, Applications.*, Inferencing.*, ProtoTypes.* import util.* import scala.annotation.internal.sharable +import scala.annotation.tailrec import scala.collection.mutable import SpaceEngine.* @@ -113,6 +116,7 @@ object SpaceEngine { def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b) def canDecompose(typ: Typ)(using Context): Boolean = typ.canDecompose def decompose(typ: Typ)(using Context): List[Typ] = typ.decompose + def nullSpace(using Context): Space = Typ(ConstantType(Constant(null)), decomposed = false) /** Simplify space such that a space equal to `Empty` becomes `Empty` */ def computeSimplify(space: Space)(using Context): Space = trace(i"simplify($space)")(space match { @@ -350,7 +354,7 @@ object SpaceEngine { val funRef = fun1.tpe.asInstanceOf[TermRef] if (fun.symbol.name == nme.unapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos) - if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe) + if fun.symbol.owner == defn.SeqFactoryClass && pat.tpe.hasClassSymbol(defn.ListClass) then // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses) // and product types (into its components). To get better counter-examples for patterns that are of type // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil. @@ -522,14 +526,37 @@ object SpaceEngine { val mt: MethodType = unapp.widen match { case mt: MethodType => mt case pt: PolyType => + scrutineeTp match + case AppliedType(tycon, targs) + if unappSym.is(Synthetic) + && (pt.resultType.asInstanceOf[MethodType].paramInfos.head.typeConstructor eq tycon) => + // Special case synthetic unapply/unapplySeq's + // Provided the shapes of the types match: + // the scrutinee type being unapplied and + // the unapply parameter type + pt.instantiate(targs).asInstanceOf[MethodType] + case _ => + val locked = ctx.typerState.ownedVars val tvars = constrained(pt) val mt = pt.instantiate(tvars).asInstanceOf[MethodType] - scrutineeTp <:< mt.paramInfos(0) + val unapplyArgType = mt.paramInfos.head + scrutineeTp <:< unapplyArgType // force type inference to infer a narrower type: could be singleton // see tests/patmat/i4227.scala - mt.paramInfos(0) <:< scrutineeTp - instantiateSelected(mt, tvars) - isFullyDefined(mt, ForceDegree.all) + unapplyArgType <:< scrutineeTp + maximizeType(unapplyArgType, Spans.NoSpan) + if !(ctx.typerState.ownedVars -- locked).isEmpty then + // constraining can create type vars out of wildcard types + // (in legalBound, by using a LevelAvoidMap) + // maximise will only do one pass at maximising the type vars in the target type + // which means we can maximise to types that include other type vars + // this fails TreeChecker's "non-empty constraint at end of $fusedPhase" check + // e.g. run-macros/string-context-implicits + // I can't prove that a second call won't also create type vars, + // but I'd rather have an unassigned new-new type var, than an infinite loop. + // After all, there's nothing strictly "wrong" with unassigned type vars, + // it just fails TreeChecker's linting. + maximizeType(unapplyArgType, Spans.NoSpan) mt } @@ -543,7 +570,7 @@ object SpaceEngine { // Case unapplySeq: // 1. return the type `List[T]` where `T` is the element type of the unapplySeq return type `Seq[T]` - val resTp = ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType + val resTp = wildApprox(ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType) val sig = if (resTp.isRef(defn.BooleanClass)) @@ -564,20 +591,14 @@ object SpaceEngine { if (arity > 0) productSelectorTypes(resTp, unappSym.srcPos) else { - val getTp = resTp.select(nme.get).finalResultType match - case tp: TermRef if !tp.isOverloaded => - // Like widenTermRefExpr, except not recursively. - // For example, in i17184 widen Option[foo.type]#get - // to Option[foo.type] instead of Option[Int]. - tp.underlying.widenExpr - case tp => tp + val getTp = extractorMemberType(resTp, nme.get, unappSym.srcPos) if (argLen == 1) getTp :: Nil else productSelectorTypes(getTp, unappSym.srcPos) } } } - sig.map(_.annotatedToRepeated) + sig.map { case tp: WildcardType => tp.bounds.hi case tp => tp } } /** Whether the extractor covers the given type */ @@ -616,23 +637,62 @@ object SpaceEngine { case tp if tp.classSymbol.isAllOf(JavaEnum) => tp.classSymbol.children.map(_.termRef) // the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely - case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty => + case Childless(tp @ AppliedType(Parts(parts), targs)) => // It might not obvious that it's OK to apply the type arguments of a parent type to child types. // But this is guarded by `tp.classSymbol.children.isEmpty`, // meaning we'll decompose to the same class, just not the same type. // For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`. parts.map(tp.derivedAppliedType(_, targs)) - case tp if tp.isDecomposableToChildren => - def getChildren(sym: Symbol): List[Symbol] = + case tpOriginal if tpOriginal.isDecomposableToChildren => + // isDecomposableToChildren uses .classSymbol.is(Sealed) + // But that classSymbol could be from an AppliedType + // where the type constructor is a non-class type + // E.g. t11620 where `?1.AA[X]` returns as "sealed" + // but using that we're not going to infer A1[X] and A2[X] + // but end up with A1[] and A2[]. + // So we widen (like AppliedType superType does) away + // non-class type constructors. + // + // Can't use `tpOriginal.baseType(cls)` because it causes + // i15893 to return exhaustivity warnings, because instead of: + // <== refineUsingParent(N, class Succ, []) = Succ[] + // <== isSub(Succ[] <:< Succ[Succ[]]) = true + // we get + // <== refineUsingParent(NatT, class Succ, []) = Succ[NatT] + // <== isSub(Succ[NatT] <:< Succ[Succ[]]) = false + def getAppliedClass(tp: Type): (Type, List[Type]) = tp match + case tp @ AppliedType(_: HKTypeLambda, _) => (tp, Nil) + case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => (tp, tp.args) + case tp @ AppliedType(tycon: TypeProxy, _) => getAppliedClass(tycon.superType.applyIfParameterized(tp.args)) + case tp => (tp, Nil) + val (tp, typeArgs) = getAppliedClass(tpOriginal) + // This function is needed to get the arguments of the types that will be applied to the class. + // This is necessary because if the arguments of the types contain Nothing, + // then this can affect whether the class will be taken into account during the exhaustiveness check + def getTypeArgs(parent: Symbol, child: Symbol, typeArgs: List[Type]): List[Type] = + val superType = child.typeRef.superType + if typeArgs.exists(_.isBottomType) && superType.isInstanceOf[ClassInfo] then + val parentClass = superType.asInstanceOf[ClassInfo].declaredParents.find(_.classSymbol == parent).get + val paramTypeMap = Map.from(parentClass.argTypes.map(_.typeSymbol).zip(typeArgs)) + val substArgs = child.typeRef.typeParamSymbols.map(param => paramTypeMap.getOrElse(param, WildcardType)) + substArgs + else Nil + def getChildren(sym: Symbol, typeArgs: List[Type]): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... else if tp.classSymbol == defn.TupleClass || tp.classSymbol == defn.NonEmptyTupleClass then List(child) // TupleN and TupleXXL classes are used for Tuple, but they aren't Tuple's children - else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then getChildren(child) - else List(child) + else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then + getChildren(child, getTypeArgs(sym, child, typeArgs)) + else + val childSubstTypes = child.typeRef.applyIfParameterized(getTypeArgs(sym, child, typeArgs)) + // if a class contains a field of type Nothing, + // then it can be ignored in pattern matching, because it is impossible to obtain an instance of it + val existFieldWithBottomType = childSubstTypes.fields.exists(_.info.isBottomType) + if existFieldWithBottomType then Nil else List(child) } - val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol)) + val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol, typeArgs)) val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym @@ -649,7 +709,6 @@ object SpaceEngine { else NoType }.filter(_.exists) parts - case _ => ListOfNoType end rec @@ -676,6 +735,12 @@ object SpaceEngine { final class PartsExtractor(val get: List[Type]) extends AnyVal: def isEmpty: Boolean = get == ListOfNoType + object Childless: + def unapply(tp: Type)(using Context): Result = + Result(if tp.classSymbol.children.isEmpty then tp else NoType) + class Result(val get: Type) extends AnyVal: + def isEmpty: Boolean = !get.exists + /** Show friendly type name with current scope in mind * * E.g. C.this.B --> B if current owner is C @@ -772,12 +837,15 @@ object SpaceEngine { doShow(s) } - private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { + extension (self: Type) private def stripUnsafeNulls()(using Context): Type = + if Nullables.unsafeNullsEnabled then self.stripNull() else self + + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = trace(i"exhaustivityCheckable($sel ${sel.className})") { val seen = collection.mutable.Set.empty[Symbol] // Possible to check everything, but be compatible with scalac by default - def isCheckable(tp: Type): Boolean = - val tpw = tp.widen.dealias + def isCheckable(tp: Type): Boolean = trace(i"isCheckable($tp ${tp.className})"): + val tpw = tp.widen.dealias.stripUnsafeNulls() val classSym = tpw.classSymbol classSym.is(Sealed) && !tpw.isLargeGenericTuple || // exclude large generic tuples from exhaustivity // requires an unknown number of changes to make work @@ -794,6 +862,7 @@ object SpaceEngine { } !sel.tpe.hasAnnotation(defn.UncheckedAnnot) + && !sel.tpe.hasAnnotation(defn.RuntimeCheckedAnnot) && { ctx.settings.YcheckAllPatmat.value || isCheckable(sel.tpe) @@ -812,18 +881,19 @@ object SpaceEngine { /** Return the underlying type of non-module, non-constant, non-enum case singleton types. * Also widen ExprType to its result type, and rewrap any annotation wrappers. * For example, with `val opt = None`, widen `opt.type` to `None.type`. */ - def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)")(tp match { + def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp ${tp.className})")(tp match { case _: ConstantType => tp case tp: TermRef if tp.symbol.is(Module) => tp case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp case tp: SingletonType => toUnderlying(tp.underlying) case tp: ExprType => toUnderlying(tp.resultType) case AnnotatedType(tp, annot) => AnnotatedType(toUnderlying(tp), annot) + case tp: FlexibleType => tp.derivedFlexibleType(toUnderlying(tp.underlying)) case _ => tp }) def checkExhaustivity(m: Match)(using Context): Unit = trace(i"checkExhaustivity($m)") { - val selTyp = toUnderlying(m.selector.tpe).dealias + val selTyp = toUnderlying(m.selector.tpe.stripUnsafeNulls()).dealias val targetSpace = trace(i"targetSpace($selTyp)")(project(selTyp)) val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) => @@ -840,7 +910,7 @@ object SpaceEngine { if uncovered.nonEmpty then val deduped = dedup(uncovered) - report.warning(PatternMatchExhaustivity(deduped.map(display), m), m.selector) + report.warning(PatternMatchExhaustivity(deduped, m), m.selector) } private def reachabilityCheckable(sel: Tree)(using Context): Boolean = @@ -853,52 +923,53 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)") { - val cases = m.cases.toIndexedSeq - + def checkReachability(m: Match)(using Context): Unit = trace(i"checkReachability($m)"): val selTyp = toUnderlying(m.selector.tpe).dealias - - val isNullable = selTyp.classSymbol.isNullableClass - val targetSpace = trace(i"targetSpace($selTyp)")(if isNullable + val isNullable = selTyp.isInstanceOf[FlexibleType] || selTyp.classSymbol.isNullableClass + val targetSpace = trace(i"targetSpace($selTyp)"): + if isNullable && !ctx.mode.is(Mode.SafeNulls) then project(OrType(selTyp, ConstantType(Constant(null)), soft = false)) else project(selTyp) - ) - - var i = 0 - val len = cases.length - var prevs = List.empty[Space] - var deferred = List.empty[Tree] - - while (i < len) { - val CaseDef(pat, guard, _) = cases(i) - - val curr = trace(i"project($pat)")(project(pat)) - - val covered = trace("covered")(simplify(intersect(curr, targetSpace))) - - val prev = trace("prev")(simplify(Or(prevs))) - - if prev == Empty && covered == Empty then // defer until a case is reachable - deferred ::= pat - else { - for (pat <- deferred.reverseIterator) - report.warning(MatchCaseUnreachable(), pat.srcPos) - if pat != EmptyTree // rethrow case of catch uses EmptyTree - && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase - && isSubspace(covered, prev) - then { - val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) - val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() - report.warning(msg, pat.srcPos) - } - deferred = Nil - } - - // in redundancy check, take guard as false in order to soundly approximate - prevs ::= (if guard.isEmpty then covered else Empty) - i += 1 - } - } + var hadNullOnly = false + def projectPat(pat: Tree): Space = + // Project toplevel wildcard pattern to nullable + if isNullable && isWildcardArg(pat) then Or(project(pat) :: nullSpace :: Nil) + else project(pat) + @tailrec def recur(cases: List[CaseDef], prevs: List[Space], deferred: List[Tree]): Unit = + cases match + case Nil => + case CaseDef(pat, guard, _) :: rest => + val curr = trace(i"project($pat)")(projectPat(pat)) + val covered = trace("covered")(simplify(intersect(curr, targetSpace))) + val prev = trace("prev")(simplify(Or(prevs))) + if prev == Empty && covered == Empty then // defer until a case is reachable + recur(rest, prevs, pat :: deferred) + else + for pat <- deferred.reverseIterator + do report.warning(MatchCaseUnreachable(), pat.srcPos) + + if pat != EmptyTree // rethrow case of catch uses EmptyTree + && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase + then + if isSubspace(covered, prev) then + report.warning(MatchCaseUnreachable(), pat.srcPos) + else if isNullable && !hadNullOnly && isWildcardArg(pat) + && isSubspace(covered, Or(prev :: nullSpace :: Nil)) then + // Issue OnlyNull warning only if: + // 1. The target space is nullable; + // 2. OnlyNull warning has not been issued before; + // 3. The pattern is a wildcard pattern; + // 4. The pattern is not covered by the previous cases, + // but covered by the previous cases with null. + hadNullOnly = true + report.warning(MatchCaseOnlyNullWarning(), pat.srcPos) + + // in redundancy check, take guard as false in order to soundly approximate + val newPrev = if guard.isEmpty then covered :: prevs else prevs + recur(rest, newPrev, Nil) + + recur(m.cases, Nil, Nil) + end checkReachability def checkMatch(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then checkExhaustivity(m) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 936b6958fb33..87ee2be91465 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -17,7 +17,7 @@ import Types.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import org.scalajs.ir.{Trees => js} +import dotty.tools.sjs.ir.{Trees => js} /** Additional extensions for `Symbol`s that are only relevant for Scala.js. */ object JSSymUtils { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index f66141bff8ad..5aa35a277cb5 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -21,8 +21,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn import JSExportUtils.* import JSSymUtils.* -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { import tpd.* diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 1b8fdd268ece..c7316482c193 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -23,7 +23,7 @@ import Types.* import JSSymUtils.* -import org.scalajs.ir.Trees.JSGlobalRef +import dotty.tools.sjs.ir.Trees.JSGlobalRef import dotty.tools.backend.sjs.JSDefinitions.jsdefn diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 9a5db44b15ca..5fb91694b8a6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -571,7 +571,7 @@ trait Applications extends Compatibility { fail(TypeMismatch(methType.resultType, resultType, None)) // match all arguments with corresponding formal parameters - matchArgs(orderedArgs, methType.paramInfos, 0) + if success then matchArgs(orderedArgs, methType.paramInfos, 0) case _ => if (methType.isError) ok = false else fail(em"$methString does not take parameters") @@ -666,7 +666,7 @@ trait Applications extends Compatibility { * @param n The position of the first parameter in formals in `methType`. */ def matchArgs(args: List[Arg], formals: List[Type], n: Int): Unit = - if (success) formals match { + formals match { case formal :: formals1 => def checkNoVarArg(arg: Arg) = @@ -696,6 +696,10 @@ trait Applications extends Compatibility { fail(MissingArgument(methodType.paramNames(n), methString)) def tryDefault(n: Int, args1: List[Arg]): Unit = { + if !success then + missingArg(n) // fail fast before forcing the default arg tpe, to avoid cyclic errors + return + val sym = methRef.symbol val testOnly = this.isInstanceOf[TestApplication[?]] @@ -878,12 +882,14 @@ trait Applications extends Compatibility { init() def addArg(arg: Tree, formal: Type): Unit = - typedArgBuf += adapt(arg, formal.widenExpr) + val typedArg = adapt(arg, formal.widenExpr) + typedArgBuf += typedArg + ok = ok & !typedArg.tpe.isError def makeVarArg(n: Int, elemFormal: Type): Unit = { val args = typedArgBuf.takeRight(n).toList typedArgBuf.dropRightInPlace(n) - val elemtpt = TypeTree(elemFormal) + val elemtpt = TypeTree(elemFormal, inferred = true) typedArgBuf += seqToRepeated(SeqLiteral(args, elemtpt)) } @@ -943,14 +949,19 @@ trait Applications extends Compatibility { var typedArgs = typedArgBuf.toList def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later val app1 = - if (!success || typedArgs.exists(_.tpe.isError)) app0.withType(UnspecifiedErrorType) + if !success then app0.withType(UnspecifiedErrorType) else { - if !sameSeq(args, orderedArgs) - && !isJavaAnnotConstr(methRef.symbol) - && !typedArgs.forall(isSafeArg) - then + if isJavaAnnotConstr(methRef.symbol) then + // #19951 Make sure all arguments are NamedArgs for Java annotations + if typedArgs.exists(!_.isInstanceOf[NamedArg]) then + typedArgs = typedArgs.lazyZip(methType.asInstanceOf[MethodType].paramNames).map { + case (arg: NamedArg, _) => arg + case (arg, name) => NamedArg(name, arg) + } + else if !sameSeq(args, orderedArgs) && !typedArgs.forall(isSafeArg) then // need to lift arguments to maintain evaluation order in the // presence of argument reorderings. + // (never do this for Java annotation constructors, hence the 'else if') liftFun() @@ -1227,6 +1238,8 @@ trait Applications extends Compatibility { } else { val app = tree.fun match + case untpd.TypeApply(_: untpd.SplicePattern, _) if Feature.quotedPatternsWithPolymorphicFunctionsEnabled => + typedAppliedSpliceWithTypes(tree, pt) case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) case _ => realApply app match { @@ -1278,9 +1291,16 @@ trait Applications extends Compatibility { if (ctx.mode.is(Mode.Pattern)) return errorTree(tree, em"invalid pattern") + tree.fun match { + case _: untpd.SplicePattern if Feature.quotedPatternsWithPolymorphicFunctionsEnabled => + return errorTree(tree, em"Implementation restriction: A higher-order pattern must carry value arguments") + case _ => + } + val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") + typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { case fun: TypeApply if !ctx.isAfterTyper => val function = fun.fun @@ -1328,7 +1348,7 @@ trait Applications extends Compatibility { tree } - /** Is `tp` a unary function type or an overloaded type with with only unary function + /** Is `tp` a unary function type or an overloaded type with only unary function * types as alternatives? */ def isUnary(tp: Type)(using Context): Boolean = tp match { @@ -1942,7 +1962,12 @@ trait Applications extends Compatibility { def widenPrefix(alt: TermRef): Type = alt.prefix.widen match case pre: (TypeRef | ThisType) if pre.typeSymbol.is(Module) => - pre.parents.reduceLeft(TypeComparer.andType(_, _)) + val ps = pre.parents + if ps.isEmpty then + // The parents of a module class are non-empty, unless the module is a package. + assert(pre.typeSymbol.is(Package), pre) + pre + else ps.reduceLeft(TypeComparer.andType(_, _)) case wpre => wpre /** If two alternatives have the same symbol, we pick the one with the most @@ -2198,19 +2223,38 @@ trait Applications extends Compatibility { case untpd.Function(args: List[untpd.ValDef] @unchecked, body) => // If ref refers to a method whose parameter at index `idx` is a function type, - // the arity of that function, otherise -1. - def paramCount(ref: TermRef) = + // the parameters of that function, otherwise Nil. + // We return Nil for both nilary functions and non-functions, + // because we won't be making tupled functions for nilary functions anyways, + // seeing as there is no Tuple0. + def params(ref: TermRef) = val formals = ref.widen.firstParamTypes if formals.length > idx then formals(idx).dealias match - case defn.FunctionNOf(args, _, _) => args.length - case _ => -1 - else -1 + case defn.FunctionNOf(args, _, _) => args + case _ => Nil + else Nil + + def isCorrectUnaryFunction(alt: TermRef): Boolean = + val formals = params(alt) + formals.length == 1 && { + formals.head match + case formal: TypeParamRef => + // While `formal` isn't a tuple type of the correct arity, + // it's a type parameter (a method type parameter presumably) + // so check its bounds allow for a tuple type of the correct arity. + // See i21682 for an example. + val tup = defn.tupleType(args.map(v => if v.tpt.isEmpty then WildcardType else typedAheadType(v.tpt).tpe)) + val TypeBounds(lo, hi) = formal.paramInfo + lo <:< tup && tup <:< hi + case formal => + ptIsCorrectProduct(formal, args) + } val numArgs = args.length - if numArgs != 1 - && !alts.exists(paramCount(_) == numArgs) - && alts.exists(paramCount(_) == 1) + if numArgs > 1 + && !alts.exists(params(_).lengthIs == numArgs) + && alts.exists(isCorrectUnaryFunction) then desugar.makeTupledFunction(args, body, isGenericTuple = true) // `isGenericTuple = true` is the safe choice here. It means the i'th tuple @@ -2379,6 +2423,13 @@ trait Applications extends Compatibility { } end resolveOverloaded1 + /** Is `formal` a product type which is elementwise compatible with `params`? */ + def ptIsCorrectProduct(formal: Type, params: List[untpd.ValDef])(using Context): Boolean = + isFullyDefined(formal, ForceDegree.flipBottom) + && defn.isProductSubType(formal) + && tupleComponentTypes(formal).corresponds(params): (argType, param) => + param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe + /** The largest suffix of `paramss` that has the same first parameter name as `t`, * plus the number of term parameters in `paramss` that come before that suffix. */ diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 1f82b9ddc084..1cd531046753 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -611,6 +611,8 @@ object Checking { val mods = mdef.mods def flagSourcePos(flag: FlagSet) = mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos + if mods.is(Open) then + report.error(ModifierNotAllowedForDefinition(Open), flagSourcePos(Open)) if mods.is(Abstract) then report.error(ModifierNotAllowedForDefinition(Abstract), flagSourcePos(Abstract)) if mods.is(Sealed) then @@ -727,7 +729,7 @@ object Checking { report.error(ValueClassesMayNotDefineNonParameterField(clazz, stat.symbol), stat.srcPos) case _: DefDef if stat.symbol.isConstructor => report.error(ValueClassesMayNotDefineASecondaryConstructor(clazz, stat.symbol), stat.srcPos) - case _: MemberDef | _: Import | EmptyTree => + case _: MemberDef | _: Import | _: Export | EmptyTree => // ok case _ => report.error(ValueClassesMayNotContainInitalization(clazz), stat.srcPos) @@ -804,20 +806,20 @@ object Checking { * */ def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = - def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = - def isNonExperimentalTopLevelDefinition(sym: Symbol) = - sym.isDefinedInCurrentRun - && sym.source == ctx.compilationUnit.source - && !sym.isConstructor // not constructor of package object - && !sym.is(Package) && !sym.name.isPackageObjectName - && !sym.isExperimental - - pack.info.decls.toList.iterator.flatMap: sym => - if sym.isClass && (sym.is(Package) || sym.isPackageObject) then - nonExperimentalTopLevelDefs(sym) - else if isNonExperimentalTopLevelDefinition(sym) then - sym :: Nil - else Nil + def nonExperimentalTopLevelDefs(): List[Symbol] = + new TreeAccumulator[List[Symbol]] { + override def apply(x: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = + def addIfNotExperimental(sym: Symbol) = + if !sym.isExperimental then sym :: x + else x + tree match { + case tpd.PackageDef(_, contents) => apply(x, contents) + case typeDef @ tpd.TypeDef(_, temp: Template) if typeDef.symbol.isPackageObject => + apply(x, temp.body) + case mdef: tpd.MemberDef => addIfNotExperimental(mdef.symbol) + case _ => x + } + }.apply(Nil, ctx.compilationUnit.tpdTree) def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = @@ -835,7 +837,7 @@ object Checking { if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then def markTopLevelDefsAsExperimental(why: String): Unit = - for sym <- nonExperimentalTopLevelDefs(ctx.owner) do + for sym <- nonExperimentalTopLevelDefs() do sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) unitExperimentalLanguageImports match @@ -883,6 +885,38 @@ object Checking { templ.parents.find(_.tpe.derivesFrom(defn.PolyFunctionClass)) match case Some(parent) => report.error(s"`PolyFunction` marker trait is reserved for compiler generated refinements", parent.srcPos) case None => + + /** check that parameters of a java defined annotations are all named arguments if we have more than one parameter */ + def checkNamedArgumentForJavaAnnotation(annot: untpd.Tree, sym: ClassSymbol)(using Context): untpd.Tree = + assert(sym.is(JavaDefined)) + + def annotationHasValueField: Boolean = + sym.info.decls.exists(_.name == nme.value) + + lazy val annotationFieldNamesByIdx: Map[Int, TermName] = + sym.info.decls.filter: decl => + decl.is(Method) && decl.name != nme.CONSTRUCTOR + .map(_.name.toTermName) + .zipWithIndex + .map(_.swap) + .toMap + + annot match + case untpd.Apply(fun, List(param)) if !param.isInstanceOf[untpd.NamedArg] && annotationHasValueField => + untpd.cpy.Apply(annot)(fun, List(untpd.cpy.NamedArg(param)(nme.value, param))) + case untpd.Apply(_, params) => + for + (param, paramIdx) <- params.zipWithIndex + if !param.isInstanceOf[untpd.NamedArg] + do + report.errorOrMigrationWarning(NonNamedArgumentInJavaAnnotation(), param, MigrationVersion.NonNamedArgumentInJavaAnnotation) + if MigrationVersion.NonNamedArgumentInJavaAnnotation.needsPatch then + annotationFieldNamesByIdx.get(paramIdx).foreach: paramName => + patch(param.span.startPos, s"$paramName = ") + annot + case _ => annot + end checkNamedArgumentForJavaAnnotation + } trait Checking { @@ -981,6 +1015,7 @@ trait Checking { def recur(pat: Tree, pt: Type): Boolean = !sourceVersion.isAtLeast(`3.2`) || pt.hasAnnotation(defn.UncheckedAnnot) + || pt.hasAnnotation(defn.RuntimeCheckedAnnot) || { patmatch.println(i"check irrefutable $pat: ${pat.tpe} against $pt") pat match diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 5ce1b02733d0..6020431672b9 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -78,7 +78,7 @@ class CrossVersionChecks extends MiniPhase: do val msg = annot.argumentConstantString(0).map(msg => s": $msg").getOrElse("") val since = annot.argumentConstantString(1).map(version => s" (since: $version)").getOrElse("") - report.deprecationWarning(em"inheritance from $psym is deprecated$since$msg", parent.srcPos) + report.deprecationWarning(em"inheritance from $psym is deprecated$since$msg", parent.srcPos, origin=psym.showFullName) } override def transformValDef(tree: ValDef)(using Context): ValDef = @@ -171,7 +171,7 @@ object CrossVersionChecks: def maybeWarn(annotee: Symbol, annot: Annotation) = if !skipWarning(sym) then val message = annot.argumentConstantString(0).filter(!_.isEmpty).map(": " + _).getOrElse("") val since = annot.argumentConstantString(1).filter(!_.isEmpty).map(" since " + _).getOrElse("") - report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos) + report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos, origin=annotee.showFullName) sym.getAnnotation(defn.DeprecatedAnnot) match case Some(annot) => maybeWarn(sym, annot) case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 619dfcf4d7cb..60148319a61c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -292,10 +292,21 @@ trait Deriving { val companion = companionRef(resultType) val module = untpd.ref(companion).withSpan(sym.span) val rhs = untpd.Select(module, nme.derived) - if companion.termSymbol.exists then typed(rhs, resultType) - else errorTree(rhs, em"$resultType cannot be derived since ${resultType.typeSymbol} has no companion object") + val derivedMember = companion.member(nme.derived) + + if !companion.termSymbol.exists then + errorTree(rhs, em"$resultType cannot be derived since ${resultType.typeSymbol} has no companion object") + else if hasExplicitParams(derivedMember.symbol) then + errorTree(rhs, em"""derived instance $resultType failed to generate: + |method `derived` from object ${module} takes explicit term parameters""") + else + typed(rhs, resultType) end typeclassInstance + // checks whether any of the params of 'sym' is explicit + def hasExplicitParams(sym: Symbol) = + !sym.paramSymss.flatten.forall(sym => sym.isType || sym.is(Flags.Given) || sym.is(Flags.Implicit)) + def syntheticDef(sym: Symbol): Tree = inContext(ctx.fresh.setOwner(sym).setNewScope) { if sym.is(Method) then tpd.DefDef(sym.asTerm, typeclassInstance(sym)) else tpd.ValDef(sym.asTerm, typeclassInstance(sym)(Nil)) diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 68143dfd2ba0..13e75be75838 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -110,7 +110,11 @@ object ErrorReporting { case tp => i" and expected result type $tp" } i"(${tp.typedArgs().tpes}%, %)$result" - s"arguments ${argStr(tp)}" + def hasNames = tp.args.exists: + case tree: untpd.Tuple => tree.trees.exists(_.isInstanceOf[NamedArg]) + case _ => false + val addendum = if hasNames then " (a named tuple)" else "" + s"arguments ${argStr(tp)}$addendum" case _ => i"expected type $tp" } diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index b09580d51943..26d03db4b7dc 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -39,9 +39,6 @@ abstract class Lifter { /** The tree of a lifted definition */ protected def liftedDef(sym: TermSymbol, rhs: Tree)(using Context): MemberDef = ValDef(sym, rhs) - /** Is lifting performed on erased terms? */ - protected def isErased = false - private def lift(defs: mutable.ListBuffer[Tree], expr: Tree, prefix: TermName = EmptyTermName)(using Context): Tree = if (noLift(expr)) expr else { @@ -117,8 +114,7 @@ abstract class Lifter { case Apply(fn, args) => val fn1 = liftApp(defs, fn) val args1 = liftArgs(defs, fn.tpe, args) - if isErased then untpd.cpy.Apply(tree)(fn1, args1).withType(tree.tpe) // application may be partial - else cpy.Apply(tree)(fn1, args1) + cpy.Apply(tree)(fn1, args1) case TypeApply(fn, targs) => cpy.TypeApply(tree)(liftApp(defs, fn), targs) case Select(pre, name) if isPureRef(tree) => @@ -141,7 +137,7 @@ abstract class Lifter { * * unless `pre` is idempotent. */ - def liftNonIdempotentPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + private def liftNonIdempotentPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = if (isIdempotentExpr(tree)) tree else lift(defs, tree) /** Lift prefix `pre` of an application `pre.f(...)` to @@ -154,7 +150,7 @@ abstract class Lifter { * Note that default arguments will refer to the prefix, we do not want * to re-evaluate a complex expression each time we access a getter. */ - def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = + private def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match case tree: Literal => tree case tree: This => tree @@ -218,9 +214,6 @@ object LiftCoverage extends LiftImpure { } } -object LiftErased extends LiftComplex: - override def isErased = true - /** Lift all impure or complex arguments to `def`s */ object LiftToDefs extends LiftComplex { override def liftedFlags: FlagSet = Method diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5ca5ac5bb59d..9d273ebca866 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -549,10 +549,10 @@ object Implicits: /** An ambiguous implicits failure */ class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree, val nested: Boolean = false) extends SearchFailureType: - private[Implicits] var priorityChangeWarnings: List[Message] = Nil + private[Implicits] var priorityChangeWarnings: List[GivenSearchPriorityWarning] = Nil def priorityChangeWarningNote(using Context): String = - priorityChangeWarnings.map(msg => s"\n\nNote: $msg").mkString + priorityChangeWarnings.map(_.ambiguousNote).mkString def msg(using Context): Message = var str1 = err.refStr(alt1.ref) @@ -636,7 +636,7 @@ trait ImplicitRunInfo: private def isAnchor(sym: Symbol) = sym.isClass && !isExcluded(sym) || sym.isOpaqueAlias - || sym.is(Deferred) + || sym.is(Deferred, butNot = Param) || sym.info.isMatchAlias private def computeIScope(rootTp: Type): OfTypeImplicits = @@ -821,6 +821,10 @@ trait ImplicitRunInfo: override def stopAt = StopAt.Static private val seen = util.HashSet[Type]() + override def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = + if lo.exists && hi.exists then super.derivedTypeBounds(tp, lo, hi) + else NoType // Survive inaccessible types, for instance in i21543.scala. + def applyToUnderlying(t: TypeProxy) = if seen.contains(t) then WildcardType @@ -1078,7 +1082,7 @@ trait Implicits: * it should be applied, EmptyTree otherwise. * @param span The position where errors should be reported. */ - def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = + def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = ctx.profiler.onImplicitSearch(pt): trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, @@ -1172,7 +1176,7 @@ trait Implicits: case _ => info.derivesFrom(defn.ConversionClass) def tryConversion(using Context) = { val untpdConv = - if ref.symbol.is(Given) && producesConversion(ref.symbol.info) then + if ref.symbol.isOneOf(GivenOrImplicit) && producesConversion(ref.symbol.info) then untpd.Select( untpd.TypedSplice( adapt(generated, @@ -1308,7 +1312,7 @@ trait Implicits: // A map that associates a priority change warning (between -source 3.6 and 3.7) // with the candidate refs mentioned in the warning. We report the associated // message if one of the critical candidates is part of the result of the implicit search. - val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], GivenSearchPriorityWarning)]() val sv = Feature.sourceVersion val isLastOldVersion = sv.stable == SourceVersion.`3.6` @@ -1349,21 +1353,7 @@ trait Implicits: cmp match case 1 => (alt2, alt1) case -1 => (alt1, alt2) - def choice(nth: String, c: Int) = - if c == 0 then "none - it's ambiguous" - else s"the $nth alternative" - val (change, whichChoice) = - if isLastOldVersion - then ("will change", "Current choice ") - else ("has changed", "Previous choice") - val msg = - em"""Given search preference for $pt between alternatives - | ${loser.ref} - |and - | ${winner.ref} - |$change. - |$whichChoice : ${choice("first", prev)} - |New choice from Scala 3.7: ${choice("second", cmp)}""" + val msg = GivenSearchPriorityWarning(pt, cmp, prev, winner.ref, loser.ref, isLastOldVersion) val critical = alt1.ref :: alt2.ref :: Nil priorityChangeWarnings += ((critical, msg)) if isLastOldVersion then prev else cmp @@ -1385,6 +1375,7 @@ trait Implicits: def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => var diff = compareAlternatives(alt1, alt2, disambiguate = true) + assert(diff <= 0 || isWarnPriorityChangeVersion) // diff > 0 candidates should already have been eliminated in `rank` if diff == 0 && alt1.ref =:= alt2.ref then diff = 1 // See i12951 for a test where this happens @@ -1795,7 +1786,7 @@ trait Implicits: SearchSuccess(tpd.ref(ref).withSpan(span.startPos), ref, 0)(ctx.typerState, ctx.gadt) case _ => searchImplicit(ctx.implicits, - if sourceVersion.isAtLeast(SourceVersion.future) then SearchMode.New + if sourceVersion.isAtLeast(SourceVersion.`3.6`) then SearchMode.New else if sourceVersion.isAtLeast(SourceVersion.`3.5`) then SearchMode.CompareErr else if sourceVersion.isAtLeast(SourceVersion.`3.4`) then SearchMode.CompareWarn else SearchMode.Old) diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 5ab6a4a5fae6..3ae533d58b2e 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -148,9 +148,9 @@ trait ImportSuggestions: * `name` that are applicable to `T`. */ private def importSuggestions(pt: Type)(using Context): (List[TermRef], List[TermRef]) = - val timer = new Timer() val allotted = ctx.run.nn.importSuggestionBudget if allotted <= 1 then return (Nil, Nil) + val timer = new Timer() implicits.println(i"looking for import suggestions, timeout = ${allotted}ms") val start = System.currentTimeMillis() val deadLine = start + allotted @@ -264,7 +264,7 @@ trait ImportSuggestions: end importSuggestions /** Reduce next timeout for import suggestions by the amount of time it took - * for current search, but but never less than to half of the previous budget. + * for current search, but never less than to half of the previous budget. */ private def reduceTimeBudget(used: Int)(using Context) = val run = ctx.run.nn diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index ed37a869d612..2ebcd96d5bde 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -180,7 +180,15 @@ object Inferencing { t match case t: TypeRef => if t.symbol == defn.NothingClass then - newTypeVar(TypeBounds.empty, nestingLevel = tvar.nestingLevel) + val notExactlyNothing = LazyRef(_ => defn.NothingType) + val bounds = TypeBounds(notExactlyNothing, defn.AnyType) + // The new type variale has a slightly disguised lower bound Nothing. + // This foils the `isExactlyNothing` test in `hasLowerBound` and + // therefore makes the new type variable have a lower bound. That way, + // we favor in `apply` below instantiating from below to `Nothing` instead + // of from above to `Any`. That avoids a spurious flip of the original `Nothing` + // instance to `Any`. See i21275 for a test case. + newTypeVar(bounds, nestingLevel = tvar.nestingLevel) else if t.symbol.is(ModuleClass) then tryWidened(t.parents.filter(!_.isTransparent()) .foldLeft(defn.AnyType: Type)(TypeComparer.andType(_, _))) @@ -232,25 +240,12 @@ object Inferencing { && { var fail = false var skip = false - val direction = instDirection(tvar.origin) - if minimizeSelected then - if direction <= 0 && tvar.hasLowerBound then - skip = instantiate(tvar, fromBelow = true) - else if direction >= 0 && tvar.hasUpperBound then - skip = instantiate(tvar, fromBelow = false) - // else hold off instantiating unbounded unconstrained variable - else if direction != 0 then - skip = instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && tvar.hasLowerBound then - skip = instantiate(tvar, fromBelow = true) - else if (variance > 0 || variance == 0 && !tvar.hasUpperBound) - && force.ifBottom == IfBottom.ok - then // if variance == 0, prefer upper bound if one is given - skip = instantiate(tvar, fromBelow = true) - else if variance >= 0 && force.ifBottom == IfBottom.fail then - fail = true - else - toMaximize = tvar :: toMaximize + instDecision(tvar, variance, minimizeSelected, force.ifBottom) match + case Decision.Min => skip = instantiate(tvar, fromBelow = true) + case Decision.Max => skip = instantiate(tvar, fromBelow = false) + case Decision.Skip => // hold off instantiating unbounded unconstrained variable + case Decision.Fail => fail = true + case Decision.ToMax => toMaximize ::= tvar !fail && (skip || foldOver(x, tvar)) } case tp => foldOver(x, tp) @@ -444,9 +439,32 @@ object Inferencing { if (!cmp.isSubTypeWhenFrozen(constrained.lo, original.lo)) 1 else 0 val approxAbove = if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0 + //println(i"instDirection($param) = $approxAbove - $approxBelow original=[$original] constrained=[$constrained]") approxAbove - approxBelow } + /** The instantiation decision for given poly param computed from the constraint. */ + enum Decision { case Min; case Max; case ToMax; case Skip; case Fail } + private def instDecision(tvar: TypeVar, v: Int, minimizeSelected: Boolean, ifBottom: IfBottom)(using Context): Decision = + import Decision.* + val direction = instDirection(tvar.origin) + val dec = if minimizeSelected then + if direction <= 0 && tvar.hasLowerBound then Min + else if direction >= 0 && tvar.hasUpperBound then Max + else Skip + else if direction != 0 then if direction < 0 then Min else Max + else if tvar.hasLowerBound then if v >= 0 then Min else ToMax + else ifBottom match + // What's left are unconstrained tvars with at most a non-Any param upperbound: + // * IfBottom.flip will always maximise to the param upperbound, for all variances + // * IfBottom.fail will fail the IFD check, for covariant or invariant tvars, maximise contravariant tvars + // * IfBottom.ok will minimise to Nothing covariant and unbounded invariant tvars, and max to Any the others + case IfBottom.ok => if v > 0 || v == 0 && !tvar.hasUpperBound then Min else ToMax // prefer upper bound if one is given + case IfBottom.fail => if v >= 0 then Fail else ToMax + case ifBottom_flip => ToMax + //println(i"instDecision($tvar, v=v, minimizedSelected=$minimizeSelected, $ifBottom) dir=$direction = $dec") + dec + /** Following type aliases and stripping refinements and annotations, if one arrives at a * class type reference where the class has a companion module, a reference to * that companion module. Otherwise NoType @@ -643,7 +661,7 @@ trait Inferencing { this: Typer => val ownedVars = state.ownedVars if (ownedVars ne locked) && !ownedVars.isEmpty then - val qualifying = ownedVars -- locked + val qualifying = (ownedVars -- locked).toList if (!qualifying.isEmpty) { typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = @@ -679,6 +697,10 @@ trait Inferencing { this: Typer => def constraint = state.constraint + trace(i"interpolateTypeVars($tree: ${tree.tpe}, $pt, $qualifying)", typr, (_: Any) => i"$qualifying\n$constraint\n${ctx.gadt}") { + //println(i"$constraint") + //println(i"${ctx.gadt}") + /** Values of this type report type variables to instantiate with variance indication: * +1 variable appears covariantly, can be instantiated from lower bound * -1 variable appears contravariantly, can be instantiated from upper bound @@ -706,7 +728,9 @@ trait Inferencing { this: Typer => else typr.println(i"no interpolation for nonvariant $tvar in $state") ) - buf.toList + // constrainIfDependentParamRef could also have instantiated tvars added to buf before the check + buf.filterNot(_._1.isInstantiated).toList + end toInstantiate def typeVarsIn(xs: ToInstantiate): TypeVars = xs.foldLeft(SimpleIdentitySet.empty: TypeVars)((tvs, tvi) => tvs + tvi._1) @@ -794,6 +818,7 @@ trait Inferencing { this: Typer => end doInstantiate doInstantiate(filterByDeps(toInstantiate)) + } } end if tree diff --git a/compiler/src/dotty/tools/dotc/typer/Linter.scala b/compiler/src/dotty/tools/dotc/typer/Linter.scala index c0ba581b3732..4c02bf80df63 100644 --- a/compiler/src/dotty/tools/dotc/typer/Linter.scala +++ b/compiler/src/dotty/tools/dotc/typer/Linter.scala @@ -55,7 +55,7 @@ object Linter: && !isJavaApplication(t) // Java methods are inherently side-effecting // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? - if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then + if ctx.settings.Whas.nonUnitStatement && !ctx.isAfterTyper && checkInterestingShapes(t) then val where = t match case Block(_, res) => res case If(_, thenpart, Literal(Constant(()))) => @@ -119,7 +119,7 @@ object Linter: // still compute `canEqual(A & B, B & A) = true`. canEqual(a, b.tp1) || canEqual(a, b.tp2) - if ctx.settings.WimplausiblePatterns.value && !canEqual(pat.tpe, selType) then + if ctx.settings.Whas.implausiblePatterns && !canEqual(pat.tpe, selType) then report.warning(ImplausiblePatternWarning(pat, selType), pat.srcPos) end warnOnImplausiblePattern diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala index 8d468fd68bba..f0d1d235a19c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Migrations.scala +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -57,7 +57,7 @@ trait Migrations: val nestedCtx = ctx.fresh.setNewTyperState() val res = typed(qual, pt1)(using nestedCtx) res match { - case closure(_, _, _) => + case blockEndingInClosure(_, _, _) => case _ => val recovered = typed(qual)(using ctx.fresh.setExploreTyperState()) val msg = OnlyFunctionsCanBeFollowedByUnderscore(recovered.tpe.widen, tree) @@ -71,10 +71,13 @@ trait Migrations: } nestedCtx.typerState.commit() + def functionPrefixSuffix(arity: Int) = if (arity > 0) ("", "") else ("(() => ", "())") + lazy val (prefix, suffix) = res match { - case Block(mdef @ DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => - val arity = vparams.length - if (arity > 0) ("", "") else ("(() => ", "())") + case Block(DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => + functionPrefixSuffix(vparams.length) + case Block(ValDef(_, _, _) :: Nil, Block(DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure)) => + functionPrefixSuffix(vparams.length) case _ => ("(() => ", ")") } @@ -113,6 +116,12 @@ trait Migrations: em"""Context bounds will map to context parameters. |A `using` clause is needed to pass explicit arguments to them.$rewriteMsg""", tree.srcPos, mversion) + tree match + case Apply(ta @ TypeApply(Select(New(_), _), _), Nil) => + // Remove empty arguments for calls to new that may precede the context bound. + // They are no longer necessary. + patch(Span(ta.span.end, pt.args.head.span.start - 1), "") + case _ => () if mversion.needsPatch && pt.args.nonEmpty then patch(Span(pt.args.head.span.start), "using ") end contextBoundParams diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 83964417a6f1..87dec93f8040 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -395,7 +395,7 @@ class Namer { typer: Typer => def recur(stat: Tree): Context = stat match { case pcl: PackageDef => val pkg = createPackageSymbol(pcl.pid) - index(pcl.stats)(using ctx.fresh.setOwner(pkg.moduleClass)) + index(pcl.stats)(using ctx.packageContext(pcl, pkg)) invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded)) setDocstring(pkg, stat) ctx @@ -822,8 +822,11 @@ class Namer { typer: Typer => if (sym.is(Module)) moduleValSig(sym) else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) case original: DefDef => - val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1) - nestedTyper(sym) = typer1 + // For the primary constructor DefDef, it is: + // * indexed as a part of completing the class, with indexConstructor; and + // * typed ahead when completing the constructor + // So we need to make sure to reuse the same local/nested typer. + val typer1 = nestedTyper.getOrElseUpdate(sym, ctx.typer.newLikeThis(ctx.nestingLevel + 1)) typer1.defDefSig(original, sym, this)(using localContext(sym).setTyper(typer1)) case imp: Import => try @@ -833,6 +836,12 @@ class Namer { typer: Typer => typr.println(s"error while completing ${imp.expr}") throw ex + /** Context setup for indexing the constructor. */ + def indexConstructor(constr: DefDef, sym: Symbol): Unit = + val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1) + nestedTyper(sym) = typer1 + typer1.indexConstructor(constr, sym)(using localContext(sym).setTyper(typer1)) + final override def complete(denot: SymDenotation)(using Context): Unit = { if (Config.showCompletions && ctx.typerState != creationContext.typerState) { def levels(c: Context): Int = @@ -848,7 +857,7 @@ class Namer { typer: Typer => else try completeInCreationContext(denot) - if (denot.isCompleted) registerIfChild(denot) + if (denot.isCompleted) registerIfChildInCreationContext(denot) catch case ex: CompilationUnit.SuspendException => val completer = SuspendCompleter() @@ -868,15 +877,20 @@ class Namer { typer: Typer => protected def addAnnotations(sym: Symbol): Unit = original match { case original: untpd.MemberDef => lazy val annotCtx = annotContext(original, sym) - for (annotTree <- original.mods.annotations) { - val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) - if (cls eq sym) - report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) - else { - val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) - sym.addAnnotation(ann) - } - } + original.setMods: + original.mods.withAnnotations : + original.mods.annotations.mapConserve: annotTree => + val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) + if (cls eq sym) + report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) + annotTree + else + val ann = + if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) + else annotTree + val ann1 = Annotation.deferred(cls)(typedAheadExpr(ann)(using annotCtx)) + sym.addAnnotation(ann1) + ann case _ => } @@ -937,10 +951,12 @@ class Namer { typer: Typer => denot.markAbsent() end invalidateIfClashingSynthetic - /** If completed symbol is an enum value or a named class, register it as a child + /** Intentionally left without `using Context` parameter. + * This action should be performed in the context of where the completer was created. + * If completed symbol is an enum value or a named class, register it as a child * in all direct parent classes which are sealed. */ - def registerIfChild(denot: SymDenotation)(using Context): Unit = { + def registerIfChildInCreationContext(denot: SymDenotation): Unit = { val sym = denot.symbol def register(child: Symbol, parentCls: ClassSymbol) = { @@ -964,7 +980,7 @@ class Namer { typer: Typer => end if } - /** Intentionally left without `implicit ctx` parameter. We need + /** Intentionally left without `using Context` parameter. We need * to pick up the context at the point where the completer was created. */ def completeInCreationContext(denot: SymDenotation): Unit = { @@ -986,7 +1002,7 @@ class Namer { typer: Typer => /** If completion of the owner of the to be completed symbol has not yet started, * complete the owner first and check again. This prevents cyclic references - * where we need to copmplete a type parameter that has an owner that is not + * where we need to complete a type parameter that has an owner that is not * yet completed. Test case is pos/i10967.scala. */ override def needsCompletion(symd: SymDenotation)(using Context): Boolean = @@ -994,7 +1010,11 @@ class Namer { typer: Typer => !owner.exists || owner.is(Touched) || { - owner.ensureCompleted() + // Only complete the owner if it's a type (eg. the class that owns a type parameter) + // This avoids completing primary constructor methods while completing the type of one of its type parameters + // See i15177.scala. + if owner.isType then + owner.ensureCompleted() !symd.isCompleted } @@ -1106,7 +1126,7 @@ class Namer { typer: Typer => class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) { withDecls(newScope(using ictx)) - protected implicit val completerCtx: Context = localContext(cls) + protected given completerCtx: Context = localContext(cls) private var localCtx: Context = uninitialized @@ -1432,10 +1452,11 @@ class Namer { typer: Typer => forwarders.derivedCons(forwarder2, avoidClashes(forwarders2)) case Nil => forwarders - addForwarders(selectors, Nil) - val forwarders = avoidClashes(buf.toList) - exp.pushAttachment(ExportForwarders, forwarders) - forwarders + exp.getAttachment(ExportForwarders).getOrElse: + addForwarders(selectors, Nil) + val forwarders = avoidClashes(buf.toList) + exp.pushAttachment(ExportForwarders, forwarders) + forwarders end exportForwarders /** Add forwarders as required by the export statements in this class */ @@ -1519,12 +1540,9 @@ class Namer { typer: Typer => index(constr) index(rest)(using localCtx) - symbolOfTree(constr).info.stripPoly match // Completes constr symbol as a side effect - case mt: MethodType if cls.is(Case) && mt.isParamDependent => - // See issue #8073 for background - report.error( - em"""Implementation restriction: case classes cannot have dependencies between parameters""", - cls.srcPos) + val constrSym = symbolOfTree(constr) + constrSym.infoOrCompleter match + case completer: Completer => completer.indexConstructor(constr, constrSym) case _ => tempInfo = denot.asClass.classInfo.integrateOpaqueMembers.asInstanceOf[TempClassInfo] @@ -1755,6 +1773,17 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym + /** Index the primary constructor of a class, as a part of completing that class. + * This allows the rest of the constructor completion to be deferred, + * which avoids non-cyclic classes failing, e.g. pos/i15177. + */ + def indexConstructor(constr: DefDef, sym: Symbol)(using Context): Unit = + index(constr.leadingTypeParams) + sym.owner.typeParams.foreach(_.ensureCompleted()) + completeTrailingParamss(constr, sym, indexingCtor = true) + if Feature.enabled(modularity) then + constr.termParamss.foreach(_.foreach(setTracked)) + /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1853,31 +1882,6 @@ class Namer { typer: Typer => // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR - // A map from context-bounded type parameters to associated evidence parameter names - val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() - if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then - for params <- ddef.paramss; case tdef: TypeDef <- params do - for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do - witnessNamesOfParam(tdef) = ws - - /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` - * that have been typed ahead (i.e. that carry the TypedAhead attachment)? - */ - def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = - (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty - - /** Enter and typecheck parameter list. - * Once all witness parameters for a context bound are seen, create a - * context bound companion for it. - */ - def completeParams(params: List[MemberDef])(using Context): Unit = - index(params) - for param <- params do - typedAheadExpr(param) - for (tdef, wnames) <- witnessNamesOfParam do - if wnames.contains(param.name) && allParamsSeen(wnames, params) then - addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) - // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: @@ -1901,39 +1905,16 @@ class Namer { typer: Typer => // 3. Info of CP is computed (to be copied to DP). // 4. CP is completed. // 5. Info of CP is copied to DP and DP is completed. - index(ddef.leadingTypeParams) - if (isConstructor) sym.owner.typeParams.foreach(_.ensureCompleted()) + if !sym.isPrimaryConstructor then + index(ddef.leadingTypeParams) val completedTypeParams = for tparam <- ddef.leadingTypeParams yield typedAheadExpr(tparam).symbol if completedTypeParams.forall(_.isType) then completer.setCompletedTypeParams(completedTypeParams.asInstanceOf[List[TypeSymbol]]) - ddef.trailingParamss.foreach(completeParams) + completeTrailingParamss(ddef, sym, indexingCtor = false) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) - /** Under x.modularity, we add `tracked` to context bound witnesses - * that have abstract type members - */ - def needsTracked(sym: Symbol, param: ValDef)(using Context) = - !sym.is(Tracked) - && param.hasAttachment(ContextBoundParam) - && sym.info.memberNames(abstractTypeNameFilter).nonEmpty - - /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, - * provided it has a type that has an abstract type member. Reset private and local flags - * so that the parameter becomes a `val`. - */ - def setTracked(param: ValDef): Unit = - val sym = symbolOfTree(param) - sym.maybeOwner.maybeOwner.infoOrCompleter match - case info: TempClassInfo if needsTracked(sym, param) => - typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") - for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do - acc.resetFlag(PrivateLocal) - acc.setFlag(Tracked) - sym.setFlag(Tracked) - case _ => - def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) @@ -1942,11 +1923,11 @@ class Namer { typer: Typer => wrapMethType(addParamRefinements(restpe, paramSymss)) if isConstructor then - if sym.isPrimaryConstructor && Feature.enabled(modularity) then - ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) - wrapMethType(effectiveResultType(sym, paramSymss)) + val mt = wrapMethType(effectiveResultType(sym, paramSymss)) + if sym.isPrimaryConstructor then checkCaseClassParamDependencies(mt, sym.owner) + mt else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then // set every context bound evidence parameter of a given companion method // to be tracked, provided it has a type that has an abstract type member. @@ -1959,6 +1940,75 @@ class Namer { typer: Typer => valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) end defDefSig + /** Complete the trailing parameters of a DefDef, + * as a part of indexing the primary constructor or + * as a part of completing a DefDef, including the primary constructor. + */ + def completeTrailingParamss(ddef: DefDef, sym: Symbol, indexingCtor: Boolean)(using Context): Unit = + // A map from context-bounded type parameters to associated evidence parameter names + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) && (indexingCtor || !sym.isPrimaryConstructor) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Is each name in `wnames` defined somewhere in the previous parameters? */ + def allParamsSeen(wnames: List[TermName], prevParams: Set[Name]) = + (wnames.toSet[Name] -- prevParams).isEmpty + + /** Enter and typecheck parameter list. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + if indexingCtor || !sym.isPrimaryConstructor then + index(params) + var prevParams = Set.empty[Name] + for param <- params do + if !indexingCtor then + typedAheadExpr(param) + + prevParams += param.name + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, prevParams) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + + ddef.trailingParamss.foreach(completeParams) + end completeTrailingParamss + + /** Checks an implementation restriction on case classes. */ + def checkCaseClassParamDependencies(mt: Type, cls: Symbol)(using Context): Unit = + mt.stripPoly match + case mt: MethodType if cls.is(Case) && mt.isParamDependent => + // See issue #8073 for background + report.error( + em"""Implementation restriction: case classes cannot have dependencies between parameters""", + cls.srcPos) + case _ => + + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef)(using Context): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: ClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def inferredResultType( mdef: ValOrDefDef, sym: Symbol, @@ -2080,6 +2130,11 @@ class Namer { typer: Typer => val pt = inherited.orElse(expectedDefaultArgType).orElse(fallbackProto).widenExpr val tp = typedAheadRhs(pt).tpe if (defaultTp eq pt) && (tp frozen_<:< defaultTp) then + // See i21558, the default argument new A(1.0) is of type A[?T] + // With an uninterpolated, invariant ?T type variable. + // So before we return the default getter parameter type (A[? <: Double]) + // we want to force ?T to instantiate, so it's poly is removed from the constraint + isFullyDefined(tp, ForceDegree.all) // When possible, widen to the default getter parameter type to permit a // larger choice of overrides (see `default-getter.scala`). // For justification on the use of `@uncheckedVariance`, see diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index ecf1da30cac1..85f44ead5f28 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,16 +11,18 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* -import Flags.Method +import Flags.{Method, Transparent} import inlines.Inlines +import config.{Feature, SourceVersion} import config.Printers.typr import Inferencing.* import ErrorReporting.* import util.SourceFile +import util.Spans.{NoSpan, Span} import TypeComparer.necessarySubType +import reporting.* import scala.annotation.internal.sharable -import dotty.tools.dotc.util.Spans.{NoSpan, Span} object ProtoTypes { @@ -69,6 +71,13 @@ object ProtoTypes { |constraint was: ${ctx.typerState.constraint} |constraint now: ${newctx.typerState.constraint}""") if result && (ctx.typerState.constraint ne newctx.typerState.constraint) then + // Remove all type lambdas and tvars introduced by testCompat + for tvar <- newctx.typerState.ownedVars do + inContext(newctx): + if !tvar.isInstantiated then + tvar.instantiate(fromBelow = false) // any direction + + // commit any remaining changes in typer state newctx.typerState.commit() result case _ => testCompat @@ -82,6 +91,7 @@ object ProtoTypes { * fits the given expected result type. */ def constrainResult(mt: Type, pt: Type)(using Context): Boolean = + trace(i"constrainResult($mt, $pt)", typr): val savedConstraint = ctx.typerState.constraint val res = pt.widenExpr match { case pt: FunProto => @@ -108,7 +118,7 @@ object ProtoTypes { res /** Constrain result with two special cases: - * 1. If `meth` is an inlineable method in an inlineable context, + * 1. If `meth` is a transparent inlineable method in an inlineable context, * we should always succeed and not constrain type parameters in the expected type, * because the actual return type can be a subtype of the currently known return type. * However, we should constrain parameters of the declared return type. This distinction is @@ -128,11 +138,30 @@ object ProtoTypes { case _ => false - if Inlines.isInlineable(meth) then - constrainResult(mt, wildApprox(pt)) - true - else - constFoldException(pt) || constrainResult(mt, pt) + constFoldException(pt) || { + if Inlines.isInlineable(meth) then + // Stricter behavisour in 3.4+: do not apply `wildApprox` to non-transparent inlines + // unless their return type is a MatchType. In this case there's no reason + // not to constrain type variables in the expected type. For transparent inlines + // we do not want to constrain type variables in the expected type since the + // actual return type might be smaller after instantiation. For inlines returning + // MatchTypes we do not want to constrain because the MatchType might be more + // specific after instantiation. TODO: Should we also use Wildcards for non-inline + // methods returning MatchTypes? + if Feature.sourceVersion.isAtLeast(SourceVersion.`3.4`) then + if meth.is(Transparent) || mt.resultType.isMatchAlias then + constrainResult(mt, wildApprox(pt)) + // do not constrain the result type of transparent inline methods + true + else + constrainResult(mt, pt) + else + // Best-effort to fix https://github.com/scala/scala3/issues/9685 in the 3.3.x series + // while preserving source compatibility as much as possible + constrainResult(mt, wildApprox(pt)) || meth.is(Transparent) + else constrainResult(mt, pt) + } + end constrainResult end Compatibility @@ -302,6 +331,8 @@ object ProtoTypes { case tp: UnapplyFunProto => new UnapplySelectionProto(name, nameSpan) case tp => SelectionProto(name, IgnoredProto(tp), typer, privateOK = true, nameSpan) + class WildcardSelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + /** A prototype for expressions [] that are in some unspecified selection operation * * [].?: ? @@ -310,9 +341,9 @@ object ProtoTypes { * operation is further selection. In this case, the expression need not be a value. * @see checkValue */ - @sharable object AnySelectionProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + @sharable object AnySelectionProto extends WildcardSelectionProto - @sharable object SingletonTypeProto extends SelectionProto(nme.WILDCARD, WildcardType, NoViewsAllowed, true, NoSpan) + @sharable object SingletonTypeProto extends WildcardSelectionProto /** A prototype for selections in pattern constructors */ class UnapplySelectionProto(name: Name, nameSpan: Span) extends SelectionProto(name, WildcardType, NoViewsAllowed, true, nameSpan) diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index fb9176526e42..59993a69797d 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -119,14 +119,31 @@ trait QuotesAndSplices { EmptyTree } } + val typedTypeargs = tree.typeargs.map { + case typearg: untpd.Ident => + val typedTypearg = typedType(typearg) + val bounds = ctx.gadt.fullBounds(typedTypearg.symbol) + if bounds != null && bounds != TypeBounds.empty then + report.error("Implementation restriction: Type arguments to Open pattern are expected to have no bounds", typearg.srcPos) + typedTypearg + case arg => + report.error("Open pattern expected an identifier", arg.srcPos) + EmptyTree + } for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) - val patType = if tree.args.isEmpty then pt else defn.FunctionNOf(argTypes, pt) + val patType = (tree.typeargs.isEmpty, tree.args.isEmpty) match + case (true, true) => pt + case (true, false) => + defn.FunctionNOf(argTypes, pt) + case (false, _) => + PolyFunctionOf(typedTypeargs.tpes, argTypes, pt) + val pat = typedPattern(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patType))(using quotePatternSpliceContext) val baseType = pat.tpe.baseType(defn.QuotedExprClass) val argType = if baseType.exists then baseType.argTypesHi.head else defn.NothingType - untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) + untpd.cpy.SplicePattern(tree)(pat, typedTypeargs, typedArgs).withType(pt) else errorTree(tree, em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.body.srcPos) } @@ -153,7 +170,34 @@ trait QuotesAndSplices { else // $x(...) higher-order quasipattern if args.isEmpty then report.error("Missing arguments for open pattern", tree.srcPos) - typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, Nil, args), pt) + } + + /** Types a splice applied to some type arguments and arguments + * `$f[targs1, ..., targsn](arg1, ..., argn)` in a quote pattern. + * + * Refer to: typedAppliedSplice + */ + def typedAppliedSpliceWithTypes(tree: untpd.Apply, pt: Type)(using Context): Tree = { + assert(ctx.mode.isQuotedPattern) + val untpd.Apply(typeApplyTree @ untpd.TypeApply(splice: untpd.SplicePattern, typeargs), args) = tree: @unchecked + def isInBraces: Boolean = splice.span.end != splice.body.span.end + if isInBraces then // ${x}[...](...) match an application + val typedTypeargs = typeargs.map(arg => typedType(arg)) + val typedArgs = args.map(arg => typedExpr(arg)) + val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) + val splice1 = typedSplicePattern(splice, ProtoTypes.PolyProto(typedArgs, defn.FunctionOf(argTypes, pt))) + val typedTypeApply = untpd.cpy.TypeApply(typeApplyTree)(splice1.select(nme.apply), typedTypeargs) + untpd.cpy.Apply(tree)(typedTypeApply, typedArgs).withType(pt) + else // $x[...](...) higher-order quasipattern + // Empty args is allowed + if typeargs.isEmpty then + report.error("Missing type arguments for open pattern", tree.srcPos) + typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, typeargs, args), pt) + } + + def typedTypeAppliedSplice(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { + typedAppliedSpliceWithTypes(untpd.Apply(tree, Nil), pt) } /** Type check a type binding reference in a quoted pattern. @@ -322,4 +366,22 @@ object QuotesAndSplices { case _ => super.transform(tree) end TreeMapWithVariance + + object PolyFunctionOf { + /** + * Return a poly-type + method type [$typeargs] => ($args) => ($resultType) + * where typeargs occur in args and resulttype + */ + def apply(typeargs: List[Type], args: List[Type], resultType: Type)(using Context): Type = + val typeargs1 = PolyType.syntheticParamNames(typeargs.length) + + val bounds = typeargs map (_ => TypeBounds.empty) + val resultTypeExp = (pt: PolyType) => { + val fromSymbols = typeargs map (_.typeSymbol) + val args1 = args map (_.subst(fromSymbols, pt.paramRefs)) + val resultType1 = resultType.subst(fromSymbols, pt.paramRefs) + MethodType(args1, resultType1) + } + defn.PolyFunctionOf(PolyType(typeargs1)(_ => bounds, resultTypeExp)) + } } diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 7a5c838848ac..ed8919661860 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -130,14 +130,15 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = assertTyped(tree) + val typeargs1 = tree.typeargs.mapconserve(typedType(_)) val args1 = tree.args.mapconserve(typedExpr(_)) val patternTpe = - if args1.isEmpty then tree.typeOpt + if !typeargs1.isEmpty then QuotesAndSplices.PolyFunctionOf(typeargs1.map(_.tpe), args1.map(_.tpe), tree.typeOpt) + else if args1.isEmpty then tree.typeOpt else defn.FunctionType(args1.size).appliedTo(args1.map(_.tpe) :+ tree.typeOpt) val bodyCtx = spliceContext.addMode(Mode.Pattern).retractMode(Mode.QuotedPatternBits) val body1 = typed(tree.body, defn.QuotedExprClass.typeRef.appliedTo(patternTpe))(using bodyCtx) - val args = tree.args.mapconserve(typedExpr(_)) - untpd.cpy.SplicePattern(tree)(body1, args1).withType(tree.typeOpt) + untpd.cpy.SplicePattern(tree)(body1, typeargs1, args1).withType(tree.typeOpt) override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = promote(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index cb1aea27c444..0ec9458cac5c 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -250,12 +250,15 @@ object RefChecks { */ def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = true + protected def additionalChecks(overriding: Symbol, overridden: Symbol)(using Context): Unit = () + private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = while hasNext do if needsCheck(overriding, overridden) then checkOverride(subtypeChecker, overriding, overridden) + additionalChecks(overriding, overridden) next() // The OverridingPairs cursor does assume that concrete overrides abstract @@ -481,7 +484,9 @@ object RefChecks { def overrideDeprecation(what: String, member: Symbol, other: Symbol, fix: String): Unit = report.deprecationWarning( em"overriding $what${infoStringWithLocation(other)} is deprecated;\n ${infoString(member)} should be $fix.", - if member.owner == clazz then member.srcPos else clazz.srcPos) + if member.owner == clazz then member.srcPos else clazz.srcPos, + origin = other.showFullName + ) def autoOverride(sym: Symbol) = sym.is(Synthetic) && ( @@ -520,7 +525,6 @@ object RefChecks { // todo: align accessibility implication checking with isAccessible in Contexts def isOverrideAccessOK = - val memberIsPublic = (member.flags & AccessFlags).isEmpty && !member.privateWithin.exists def protectedOK = !other.is(Protected) || member.is(Protected) // if o is protected, so is m def accessBoundaryOK = val ob = other.accessBoundary(member.owner) @@ -529,7 +533,7 @@ object RefChecks { def companionBoundaryOK = ob.isClass && !ob.isLocalToBlock && mb.is(Module) && (ob.companionModule eq mb.companionModule) ob.isContainedIn(mb) || companionBoundaryOK // m relaxes o's access boundary, def otherIsJavaProtected = other.isAllOf(JavaProtected) // or o is Java defined and protected (see #3946) - memberIsPublic || protectedOK && (accessBoundaryOK || otherIsJavaProtected) + member.isPublic || protectedOK && (accessBoundaryOK || otherIsJavaProtected) end isOverrideAccessOK if !member.hasTargetName(other.targetName) then @@ -695,6 +699,15 @@ object RefChecks { && withMode(Mode.IgnoreCaptures)(mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true))) .exists + /** Filter out symbols from `syms` that are overridden by a symbol appearing later in the list. + * Symbols that are not overridden are kept. */ + def lastOverrides(syms: List[Symbol]): List[Symbol] = + val deduplicated = + syms.foldLeft(List.empty[Symbol]): + case (acc, sym) if acc.exists(s => isOverridingPair(s, sym, clazz.thisType)) => acc + case (acc, sym) => sym :: acc + deduplicated.reverse + /** The term symbols in this class and its baseclasses that are * abstract in this class. We can't use memberNames for that since * a concrete member might have the same signature as an abstract @@ -717,7 +730,8 @@ object RefChecks { val missingMethods = grouped.toList flatMap { case (name, syms) => - syms.filterConserve(!_.isSetter) + lastOverrides(syms) + .filterConserve(!_.isSetter) .distinctBy(_.signature) // Avoid duplication for similar definitions (#19731) } @@ -1154,16 +1168,18 @@ object RefChecks { target.nonPrivateMember(sym.name) .filterWithPredicate: member => - val memberIsImplicit = member.info.hasImplicitParams - val paramTps = - if memberIsImplicit then methTp.stripPoly.firstParamTypes - else methTp.firstExplicitParamTypes - - paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { - val memberParamTps = member.info.stripPoly.firstParamTypes - !memberParamTps.isEmpty - && memberParamTps.lengthCompare(paramTps) == 0 - && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) + member.symbol.isPublic && { + val memberIsImplicit = member.info.hasImplicitParams + val paramTps = + if memberIsImplicit then methTp.stripPoly.firstParamTypes + else methTp.firstExplicitParamTypes + + paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { + val memberParamTps = member.info.stripPoly.firstParamTypes + !memberParamTps.isEmpty + && memberParamTps.lengthCompare(paramTps) == 0 + && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) + } } .exists if !target.typeSymbol.denot.isAliasType && !target.typeSymbol.denot.isOpaqueAlias && hidden diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 6b18540b6551..c935e8d6b3cf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -187,7 +187,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // val x: String = null.asInstanceOf[String] // if (x == null) {} // error: x is non-nullable // if (x.asInstanceOf[String|Null] == null) {} // ok - cls1 == defn.NullClass && cls1 == cls2 + if cls1 == defn.NullClass || cls2 == defn.NullClass then cls1 == cls2 + else cls1 == defn.NothingClass || cls2 == defn.NothingClass else if cls1 == defn.NullClass then cls1 == cls2 || cls2.derivesFrom(defn.ObjectClass) else if cls2 == defn.NullClass then @@ -455,7 +456,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): MirrorSource.reduce(mirroredType) match case Right(msrc) => msrc match case MirrorSource.Singleton(_, tref) => - val singleton = tref.termSymbol // prefer alias name over the orignal name + val singleton = tref.termSymbol // prefer alias name over the original name val singletonPath = tpd.singleton(tref).withSpan(span) if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. val mirrorType = formal.constrained_& { @@ -536,7 +537,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): else refineAtPrefix(childPre, childClass, childClass.primaryConstructor.info) match case info: PolyType => - // Compute the the full child type by solving the subtype constraint + // Compute the full child type by solving the subtype constraint // `C[X1, ..., Xn] <: P`, where // // - P is the current `mirroredType` diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 64722d51708c..8751bd7dc9bb 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -51,7 +51,7 @@ trait TypeAssigner { else sym.info private def toRepeated(tree: Tree, from: ClassSymbol)(using Context): Tree = - Typed(tree, TypeTree(tree.tpe.widen.translateToRepeated(from))) + Typed(tree, TypeTree(tree.tpe.widen.translateToRepeated(from), inferred = true)) def seqToRepeated(tree: Tree)(using Context): Tree = toRepeated(tree, defn.SeqClass) @@ -85,7 +85,7 @@ trait TypeAssigner { defn.FromJavaObjectType else tpe match case tpe: NamedType => - val tpe1 = TypeOps.makePackageObjPrefixExplicit(tpe) + val tpe1 = tpe.makePackageObjPrefixExplicit if tpe1 ne tpe then accessibleType(tpe1, superAccess) else @@ -261,7 +261,7 @@ trait TypeAssigner { else if (ctx.erasedTypes) cls.info.firstParent.typeConstructor else { val ps = cls.classInfo.parents - if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y) + if ps.isEmpty then defn.AnyType else ps.reduceLeft(AndType(_, _)) } SuperType(cls.thisType, owntype) @@ -360,20 +360,21 @@ trait TypeAssigner { resultType1) } } + else if !args.hasSameLengthAs(paramNames) then + wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos) else { // Make sure arguments don't contain the type `pt` itself. - // make a copy of the argument if that's the case. + // Make a copy of `pt` if that's the case. // This is done to compensate for the fact that normally every // reference to a polytype would have to be a fresh copy of that type, // but we want to avoid that because it would increase compilation cost. // See pos/i6682a.scala for a test case where the defensive copying matters. - val ensureFresh = new TypeMap with CaptureSet.IdempotentCaptRefMap: - def apply(tp: Type) = mapOver( - if tp eq pt then pt.newLikeThis(pt.paramNames, pt.paramInfos, pt.resType) - else tp) - val argTypes = args.tpes.mapConserve(ensureFresh) - if (argTypes.hasSameLengthAs(paramNames)) pt.instantiate(argTypes) - else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos) + val needsFresh = new ExistsAccumulator(_ eq pt, StopAt.None, forceLazy = false) + val argTypes = args.tpes + val pt1 = if argTypes.exists(needsFresh(false, _)) then + pt.newLikeThis(pt.paramNames, pt.paramInfos, pt.resType) + else pt + pt1.instantiate(argTypes) } } case err: ErrorType => diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2a877a45b550..5c5ca8af46c6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,7 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} +import config.Feature, Feature.{migrateTo3, modularity, sourceVersion, warnOnMigration} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -78,6 +78,9 @@ object Typer { /** An attachment for GADT constraints that were inferred for a pattern. */ val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint] + /** Indicates that a definition was copied over from the parent refinements */ + val RefinementFromParent = new Property.StickyKey[Unit] + /** An attachment on a Select node with an `apply` field indicating that the `apply` * was inserted by the Typer. */ @@ -341,7 +344,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so we ignore that import. if reallyExists(denot) && !isScalaJsPseudoUnion then if unimported.isEmpty || !unimported.contains(pre.termSymbol) then - return pre.select(name, denot) + return pre.select(name, denot).makePackageObjPrefixExplicit case _ => if imp.importSym.isCompleting then report.warning(i"cyclic ${imp.importSym}, ignored", pos) @@ -501,7 +504,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer defDenot.symbol.owner else curOwner - effectiveOwner.thisType.select(name, defDenot) + effectiveOwner.thisType.select(name, defDenot).makePackageObjPrefixExplicit } if !curOwner.is(Package) || isDefinedInCurrentUnit(defDenot) then result = checkNewOrShadowed(found, Definition) // no need to go further out, we found highest prec entry @@ -724,136 +727,189 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer then report.error(StableIdentPattern(tree, pt), tree.srcPos) - def typedSelect(tree0: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + def typedSelectWithAdapt(tree0: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = val selName = tree0.name val tree = cpy.Select(tree0)(qual, selName) val superAccess = qual.isInstanceOf[Super] val rawType = selectionType(tree, qual) - val checkedType = accessibleType(rawType, superAccess) - - def finish(tree: untpd.Select, qual: Tree, checkedType: Type): Tree = - val select = toNotNullTermRef(assignType(tree, checkedType), pt) - if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") - checkLegalValue(select, pt) - ConstFold(select) - // If regular selection is typeable, we are done - if checkedType.exists then - return finish(tree, qual, checkedType) + def tryType(tree: untpd.Select, qual: Tree, rawType: Type) = + val checkedType = accessibleType(rawType, superAccess) + // If regular selection is typeable, we are done + if checkedType.exists then + val select = toNotNullTermRef(assignType(tree, checkedType), pt) + if selName.isTypeName then checkStable(qual.tpe, qual.srcPos, "type prefix") + checkLegalValue(select, pt) + ConstFold(select) + else EmptyTree // Otherwise, simplify `m.apply(...)` to `m(...)` - if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then - return qual + def trySimplifyApply() = + if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then + qual + else EmptyTree // Otherwise, if there's a simply visible type variable in the result, try again // with a more defined qualifier type. There's a second trial where we try to instantiate // all type variables in `qual.tpe.widen`, but that is done only after we search for // extension methods or conversions. - if couldInstantiateTypeVar(qual.tpe.widen) then - // there's a simply visible type variable in the result; try again with a more defined qualifier type - // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, - // but that is done only after we search for extension methods or conversions. - return typedSelect(tree, pt, qual) + def tryInstantiateTypeVar() = + if couldInstantiateTypeVar(qual.tpe.widen) then + // there's a simply visible type variable in the result; try again with a more defined qualifier type + // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, + // but that is done only after we search for extension methods or conversions. + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree + + // Otherwise, heal member selection on an opaque reference, + // reusing the logic in TypeComparer. + def tryLiftToThis() = + val wtp = qual.tpe.widen + val liftedTp = comparing(_.liftToThis(wtp)) + if liftedTp ne wtp then + val qual1 = qual.cast(liftedTp) + val tree1 = cpy.Select(tree0)(qual1, selName) + val rawType1 = selectionType(tree1, qual1) + val adapted = tryType(tree1, qual1, rawType1) + if !adapted.isEmpty && sourceVersion == `3.6-migration` then + val adaptedOld = tryExt(tree, qual) + if !adaptedOld.isEmpty then + val symOld = adaptedOld.symbol + val underlying = liftedTp match + case tp: TypeProxy => i" ${tp.translucentSuperType}" + case _ => "" + report.migrationWarning( + em"""Previously this selected the extension ${symOld}${symOld.showExtendedLocation} + |Now it selects $selName on the opaque type's underlying type$underlying + | + |You can change this back by selecting $adaptedOld + |Or by defining the extension method outside of the opaque type's scope. + |""", tree0) + adapted + else EmptyTree // Otherwise, try to expand a named tuple selection - val namedTupleElems = qual.tpe.widen.namedTupleElementTypes - val nameIdx = namedTupleElems.indexWhere(_._1 == selName) - if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then - return typed( - untpd.Apply( - untpd.Select(untpd.TypedSplice(qual), nme.apply), - untpd.Literal(Constant(nameIdx))), - pt) + def tryNamedTupleSelection() = + val namedTupleElems = qual.tpe.widenDealias.namedTupleElementTypes + val nameIdx = namedTupleElems.indexWhere(_._1 == selName) + if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + typed( + untpd.Apply( + untpd.Select(untpd.TypedSplice(qual), nme.apply), + untpd.Literal(Constant(nameIdx))), + pt) + else EmptyTree // Otherwise, map combinations of A *: B *: .... EmptyTuple with nesting levels <= 22 // to the Tuple class of the right arity and select from that one - if qual.tpe.isSmallGenericTuple then - val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - return typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) + def trySmallGenericTuple(qual: Tree, withCast: Boolean) = + if qual.tpe.isSmallGenericTuple then + if withCast then + val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) + typedSelectWithAdapt(tree, pt, qual.cast(defn.tupleType(elems))) + else + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree // Otherwise try an extension or conversion - if selName.isTermName then - val tree1 = tryExtensionOrConversion( - tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - if !tree1.isEmpty then - return tree1 + def tryExt(tree: untpd.Select, qual: Tree) = + if selName.isTermName then + tryExtensionOrConversion( + tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) + else EmptyTree // Otherwise, try a GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - if ctx.gadt.isNarrowing then - val wtp = qual.tpe.widen - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - val qual1 = qual.cast(gadtApprox) - val tree1 = cpy.Select(tree0)(qual1, selName) - val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) - if checkedType1.exists then - gadts.println(i"Member selection healed by GADT approximation") - return finish(tree1, qual1, checkedType1) - - if qual1.tpe.isSmallGenericTuple then - gadts.println(i"Tuple member selection healed by GADT approximation") - return typedSelect(tree, pt, qual1) - - val tree2 = tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) - if !tree2.isEmpty then - return tree2 + def tryGadt() = + if ctx.gadt.isNarrowing then + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + tryType(tree1, qual1, selectionType(tree1, qual1)) + .orElse(trySmallGenericTuple(qual1, withCast = false)) + .orElse(tryExt(tree1, qual1)) + else EmptyTree // Otherwise, if there are uninstantiated type variables in the qualifier type, // instantiate them and try again - if canDefineFurther(qual.tpe.widen) then - return typedSelect(tree, pt, qual) + def tryDefineFurther() = + if canDefineFurther(qual.tpe.widen) then + typedSelectWithAdapt(tree, pt, qual) + else EmptyTree def dynamicSelect(pt: Type) = - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then - assignType(tree2, TryDynamicCallType) - else - typedDynamicSelect(tree2, Nil, pt) + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then + assignType(tree2, TryDynamicCallType) + else + typedDynamicSelect(tree2, Nil, pt) // Otherwise, if the qualifier derives from class Dynamic, expand to a // dynamic dispatch using selectDynamic or applyDynamic - if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then - return dynamicSelect(pt) + def tryDynamic() = + if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then + dynamicSelect(pt) + else EmptyTree // Otherwise, if the qualifier derives from class Selectable, // and the selector name matches one of the element of the `Fields` type member, - // and the selector is neither applied nor assigned to, + // and the selector is not assigned to, // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast - if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) - && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto - then - val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified - val fields = fieldsType.namedTupleElementTypes - typr.println(i"try dyn select $qual, $selName, $fields") - fields.find(_._1 == selName) match - case Some((_, fieldType)) => - val dynSelected = dynamicSelect(fieldType) - dynSelected match - case Apply(sel: Select, _) if !sel.denot.symbol.exists => - // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. - report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) - case _ => - return dynSelected.ensureConforms(fieldType) - case _ => + def trySelectable() = + if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) + && pt != LhsProto + then + val pre = if !TypeOps.isLegalPrefix(qual.tpe) then SkolemType(qual.tpe) else qual.tpe + val fieldsType = pre.select(tpnme.Fields).dealias.simplified + val fields = fieldsType.namedTupleElementTypes + typr.println(i"try dyn select $qual, $selName, $fields") + fields.find(_._1 == selName) match + case Some((_, fieldType)) => + val dynSelected = dynamicSelect(fieldType) + dynSelected match + case Apply(sel: Select, _) if !sel.denot.symbol.exists => + // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. + report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) + case _ => + dynSelected.ensureConforms(fieldType) + case _ => EmptyTree + else EmptyTree // Otherwise, if the qualifier is a context bound companion, handle // by selecting a witness in typedCBSelect - if qual.tpe.typeSymbol == defn.CBCompanion then - val witnessSelection = typedCBSelect(tree0, pt, qual) - if !witnessSelection.isEmpty then return witnessSelection + def tryCBCompanion() = + if qual.tpe.typeSymbol == defn.CBCompanion then + typedCBSelect(tree0, pt, qual) + else EmptyTree // Otherwise, report an error - assignType(tree, - rawType match - case rawType: NamedType => - inaccessibleErrorType(rawType, superAccess, tree.srcPos) - case _ => - notAMemberErrorType(tree, qual, pt)) - end typedSelect + def reportAnError() = + assignType(tree, + rawType match + case rawType: NamedType => + inaccessibleErrorType(rawType, superAccess, tree.srcPos) + case _ => + notAMemberErrorType(tree, qual, pt)) + + tryType(tree, qual, rawType) + .orElse(trySimplifyApply()) + .orElse(tryInstantiateTypeVar()) + .orElse(tryLiftToThis()) + .orElse(tryNamedTupleSelection()) + .orElse(trySmallGenericTuple(qual, withCast = true)) + .orElse(tryExt(tree, qual)) + .orElse(tryGadt()) + .orElse(tryDefineFurther()) + .orElse(tryDynamic()) + .orElse(trySelectable()) + .orElse(tryCBCompanion()) + .orElse(reportAnError()) + end typedSelectWithAdapt /** Expand a selection A.m on a context bound companion A with type * `[ref_1 | ... | ref_N]` as described by @@ -905,7 +961,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case witness: TermRef => val altQual = tpd.ref(witness).withSpan(qual.span) val altCtx = ctx.fresh.setNewTyperState() - val alt = typedSelect(tree, pt, altQual)(using altCtx) + val alt = typedSelectWithAdapt(tree, pt, altQual)(using altCtx) def current = (alt, altCtx.typerState, witness) if altCtx.reporter.hasErrors then prevs else @@ -937,7 +993,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if ctx.isJava then javaSelection(qual) else - typedSelect(tree, pt, qual).withSpan(tree.span).computeNullable() + typedSelectWithAdapt(tree, pt, qual).withSpan(tree.span).computeNullable() def javaSelection(qual: Tree)(using Context) = val tree1 = assignType(cpy.Select(tree)(qual, tree.name), qual) @@ -1244,7 +1300,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor * of annotation defined as `@interface Annot { int[] value() }` * We assume that calling `typedNamedArg` in context of Java implies that we are dealing - * with annotation contructor, as named arguments are not allowed anywhere else in Java. + * with annotation constructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ val arg1 = pt.stripNull() match { @@ -1334,12 +1390,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val setter = toSetter(lhsCore) if setter.isEmpty then reassignmentToVal - else tryEither { + else val assign = untpd.Apply(setter, tree.rhs :: Nil) typed(assign, IgnoredProto(pt)) - } { - (_, _) => reassignmentToVal - } case _ => lhsCore.tpe match { case ref: TermRef => val lhsVal = lhsCore.denot.suchThat(!_.is(Method)) @@ -1422,7 +1475,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant case _ => val target = pt.simplified - val targetTpt = InferredTypeTree().withType(target) + val targetTpt = TypeTree(target, inferred = true) if tree.tpe <:< target then Typed(tree, targetTpt) else // This case should not normally arise. It currently does arise in test cases @@ -1648,10 +1701,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else val resTpt = TypeTree(mt.nonDependentResultApprox).withSpan(body.span) val paramTpts = appDef.termParamss.head.map(p => TypeTree(p.tpt.tpe).withSpan(p.tpt.span)) - val funSym = defn.FunctionSymbol(numArgs, isContextual, isImpure) + val funSym = defn.FunctionSymbol(numArgs, isContextual) val tycon = TypeTree(funSym.typeRef) AppliedTypeTree(tycon, paramTpts :+ resTpt) - RefinedTypeTree(core, List(appDef), ctx.owner.asClass) + val res = RefinedTypeTree(core, List(appDef), ctx.owner.asClass) + if isImpure then + typed(untpd.makeRetaining(untpd.TypedSplice(res), Nil, tpnme.retainsCap), pt) + else + res end typedDependent args match { @@ -1783,19 +1840,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (protoFormals.length == params.length) (protoFormals(i), isDefinedErased(i)) else (errorType(WrongNumberOfParameters(tree, params.length, pt, protoFormals.length), tree.srcPos), false) - /** Is `formal` a product type which is elementwise compatible with `params`? */ - def ptIsCorrectProduct(formal: Type) = - isFullyDefined(formal, ForceDegree.flipBottom) && - defn.isProductSubType(formal) && - tupleComponentTypes(formal).corresponds(params) { - (argType, param) => - param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe - } - var desugared: untpd.Tree = EmptyTree if protoFormals.length == 1 && params.length != 1 then val firstFormal = protoFormals.head.loBound - if ptIsCorrectProduct(firstFormal) then + if ptIsCorrectProduct(firstFormal, params) then val isGenericTuple = firstFormal.derivesFrom(defn.TupleClass) && !defn.isTupleClass(firstFormal.typeSymbol) @@ -1849,9 +1897,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if knownFormal then formal0 else errorType(AnonymousFunctionMissingParamType(param, tree, inferredType = formal, expectedType = pt), param.srcPos) ) + val untpdTpt = formal match + case _: WildcardType => + // In this case we have a situation like f(_), where we expand in the end to + // (x: T) => f(x) and `T` is taken from `f`'s declared parameters. In this case + // we treat the type as declared instead of inferred. InferredType is used for + // types that are inferred from the context. + untpd.TypeTree() + case _ => InferredTypeTree() val paramTpt = untpd.TypedSplice( - (if knownFormal then InferredTypeTree() else untpd.TypeTree()) - .withType(paramType.translateFromRepeated(toArray = false)) + untpdTpt.withType(paramType.translateFromRepeated(toArray = false)) .withSpan(param.span.endPos) ) val param0 = cpy.ValDef(param)(tpt = paramTpt) @@ -1862,43 +1917,44 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .showing(i"desugared fun $tree --> $desugared with pt = $pt", typr) } - def typedPolyFunction(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val tree1 = desugar.normalizePolyFunction(tree) if (ctx.mode is Mode.Type) typed(desugar.makePolyFunctionType(tree1), pt) - else typedPolyFunctionValue(tree1, pt) + else typedPolyFunctionValue(desugar.elimContextBounds(tree1).asInstanceOf[untpd.PolyFunction], pt) def typedPolyFunctionValue(tree: untpd.PolyFunction, pt: Type)(using Context): Tree = val untpd.PolyFunction(tparams: List[untpd.TypeDef] @unchecked, fun) = tree: @unchecked val untpd.Function(vparams: List[untpd.ValDef] @unchecked, body) = fun: @unchecked val dpt = pt.dealias - // If the expected type is a polymorphic function with the same number of - // type and value parameters, then infer the types of value parameters from the expected type. - val inferredVParams = dpt match - case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) - if tparams.lengthCompare(poly.paramNames) == 0 && vparams.lengthCompare(mt.paramNames) == 0 => - vparams.zipWithConserve(mt.paramInfos): (vparam, formal) => - // Unlike in typedFunctionValue, `formal` cannot be a TypeBounds since - // it must be a valid method parameter type. - if vparam.tpt.isEmpty && isFullyDefined(formal, ForceDegree.failBottom) then - cpy.ValDef(vparam)(tpt = new untpd.InLambdaTypeTree(isResult = false, (tsyms, vsyms) => - // We don't need to substitute `mt` by `vsyms` because we currently disallow - // dependencies between value parameters of a closure. - formal.substParams(poly, tsyms.map(_.typeRef))) - ) - else vparam - case _ => - vparams - - val resultTpt = dpt match + dpt match case defn.PolyFunctionOf(poly @ PolyType(_, mt: MethodType)) => - untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => - mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) - case _ => untpd.TypeTree() - - val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) - typed(desugared, pt) + if tparams.lengthCompare(poly.paramNames) == 0 && vparams.lengthCompare(mt.paramNames) == 0 then + // If the expected type is a polymorphic function with the same number of + // type and value parameters, then infer the types of value parameters from the expected type. + val inferredVParams = vparams.zipWithConserve(mt.paramInfos): (vparam, formal) => + // Unlike in typedFunctionValue, `formal` cannot be a TypeBounds since + // it must be a valid method parameter type. + if vparam.tpt.isEmpty && isFullyDefined(formal, ForceDegree.failBottom) then + cpy.ValDef(vparam)(tpt = new untpd.InLambdaTypeTree(isResult = false, (tsyms, vsyms) => + // We don't need to substitute `mt` by `vsyms` because we currently disallow + // dependencies between value parameters of a closure. + formal.substParams(poly, tsyms.map(_.typeRef))) + ) + else vparam + val resultTpt = + untpd.InLambdaTypeTree(isResult = true, (tsyms, vsyms) => + mt.resultType.substParams(mt, vsyms.map(_.termRef)).substParams(poly, tsyms.map(_.typeRef))) + val desugared = desugar.makeClosure(tparams, inferredVParams, body, resultTpt, tree.span) + typed(desugared, pt) + else + val msg = + em"""|Provided polymorphic function value doesn't match the expected type $dpt. + |Expected type should be a polymorphic function with the same number of type and value parameters.""" + errorTree(EmptyTree, msg, tree.srcPos) + case _ => + val desugared = desugar.makeClosure(tparams, vparams, body, untpd.TypeTree(), tree.span) + typed(desugared, pt) end typedPolyFunctionValue def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { @@ -1942,8 +1998,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Polymorphic SAMs are not currently supported (#6904). EmptyTree case tp => - if !tp.isErroneous then - throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.span}") TypeTree(defn.AnyType) } else typed(tree.tpt) @@ -2009,7 +2063,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => false } - val result = pt.underlyingMatchType match { + val result = pt.underlyingNormalizable match { case mt: MatchType if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => @@ -2032,7 +2086,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer result match { case result @ Match(sel, CaseDef(pat, _, _) :: _) => tree.selector.removeAttachment(desugar.CheckIrrefutable) match { - case Some(checkMode) if !sel.tpe.hasAnnotation(defn.UncheckedAnnot) => + case Some(checkMode) if !(sel.tpe.hasAnnotation(defn.UncheckedAnnot) || sel.tpe.hasAnnotation(defn.RuntimeCheckedAnnot)) => val isPatDef = checkMode == desugar.MatchCheck.IrrefutablePatDef if !checkIrrefutable(sel, pat, isPatDef) && sourceVersion.isAtLeast(`3.2`) @@ -2051,7 +2105,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // TODO: move the check above to patternMatcher phase val uncheckedTpe = AnnotatedType(sel.tpe.widen, Annotation(defn.UncheckedAnnot, tree.selector.span)) tpd.cpy.Match(result)( - selector = tpd.Typed(sel, new tpd.InferredTypeTree().withType(uncheckedTpe)), + selector = tpd.Typed(sel, tpd.TypeTree(uncheckedTpe, inferred = true)), cases = result.cases ) case _ => @@ -2079,14 +2133,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case1 } .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt) + var nni = sel.notNullInfo + if cases1.nonEmpty then nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt).withNotNullInfo(nni) } // Overridden in InlineTyper for inline matches def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(using Context): Tree = { val cases1 = harmonic(harmonize, pt)(typedCases(cases, sel, wideSelType, pt.dropIfProto)) .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), sel, cases1) + var nni = sel.notNullInfo + if cases1.nonEmpty then nni = nni.seq(cases1.map(_.notNullInfo).reduce(_.alt(_))) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1).withNotNullInfo(nni) } def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType0: Type, pt: Type)(using Context): List[CaseDef] = @@ -2141,7 +2199,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Type a case. */ def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = { val originalCtx = ctx - val gadtCtx: Context = ctx.fresh.setFreshGADTBounds.setNewScope + val gadtCtx: Context = ctx.fresh.setFreshGADTBounds def caseRest(pat: Tree)(using Context) = { val pt1 = instantiateMatchTypeProto(pat, pt) match { @@ -2150,7 +2208,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } val pat1 = indexPattern(tree).transform(pat) val guard1 = typedExpr(tree.guard, defn.BooleanType) - var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt1), pt1, ctx.scope.toList) + var body1 = ensureNoLocalRefs( + typedExpr(tree.body, pt1)(using ctx.addNotNullInfo(guard1.notNullInfoIf(true))), + pt1, ctx.scope.toList) if ctx.gadt.isNarrowing then // Store GADT constraint to later retrieve it (in PostTyper, for now). // GADT constraints are necessary to correctly check bounds of type app, @@ -2158,15 +2218,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // will end up taking too much memory. If it does, we should just limit // how much GADT constraints we infer - it's always sound to infer less. pat1.putAttachment(InferredGadtConstraints, ctx.gadt) - if (pt1.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds + if pt1.isValueType then // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt1)(using originalCtx) - assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1) + val nni = pat1.notNullInfo + .seq(guard1.notNullInfoIf(true)) + .seq(body1.notNullInfo) + assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1).withNotNullInfo(nni) } val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx) caseRest(pat1)( using Nullables.caseContext(sel, pat1)( - using gadtCtx)) + using gadtCtx.fresh.setNewScope)) } def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = { @@ -2221,7 +2284,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // because we do not know the internal type params and method params. // Hence no adaptation is possible, and we assume WildcardType as prototype. (from, proto) - val expr1 = typedExpr(tree.expr orElse untpd.unitLiteral.withSpan(tree.span), proto) + val expr1 = typedExpr(tree.expr orElse untpd.syntheticUnitLiteral.withSpan(tree.span), proto) assignType(cpy.Return(tree)(expr1, from)) end typedReturn @@ -2265,13 +2328,27 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = { val expr2 :: cases2x = harmonic(harmonize, pt) { - val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto) - val expr1 = typed(addCanThrowCapabilities(tree.expr, cases1), pt.dropIfProto) + // We want to type check tree.expr first to comput NotNullInfo, but `addCanThrowCapabilities` + // uses the types of patterns in `tree.cases` to determine the capabilities. + // Hence, we create a copy of cases with empty body and type check that first, then type check + // the rest of the tree in order. + // It may seem that invalid references can be created if the type of the pattern contains + // type binds, but this is not a valid `CanThrow` capability (checked by `addCanThrowCapabilities`), + // so it is not a problem. + val casesEmptyBody1 = tree.cases.mapconserve(cpy.CaseDef(_)(body = EmptyTree)) + val casesEmptyBody2 = typedCases(casesEmptyBody1, EmptyTree, defn.ThrowableType, WildcardType) + val expr1 = typed(addCanThrowCapabilities(tree.expr, casesEmptyBody2), pt.dropIfProto) + val casesCtx = ctx.addNotNullInfo(expr1.notNullInfo.retractedInfo) + val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto)(using casesCtx) expr1 :: cases1 }: @unchecked - val finalizer1 = typed(tree.finalizer, defn.UnitType) val cases2 = cases2x.asInstanceOf[List[CaseDef]] - assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2) + + var nni = expr2.notNullInfo.retractedInfo + if cases2.nonEmpty then nni = nni.seq(cases2.map(_.notNullInfo.retractedInfo).reduce(_.alt(_))) + val finalizer1 = typed(tree.finalizer, defn.UnitType)(using ctx.addNotNullInfo(nni)) + nni = nni.seq(finalizer1.notNullInfo) + assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2).withNotNullInfo(nni) } def typedTry(tree: untpd.ParsedTry, pt: Type)(using Context): Try = @@ -2289,7 +2366,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val res = Throw(expr1).withSpan(tree.span) if Feature.ccEnabled && !cap.isEmpty && !ctx.isAfterTyper then // Record access to the CanThrow capabulity recovered in `cap` by wrapping - // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotatoon. + // the type of the `throw` (i.e. Nothing) in a `@requiresCapability` annotation. Typed(res, TypeTree( AnnotatedType(res.tpe, @@ -2395,7 +2472,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked val refinements1 = impl.body val seen = mutable.Set[Symbol]() - for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions + for refinement <- refinements1 do // TODO: get clarity whether we want to enforce these conditions typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol @@ -2409,7 +2486,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val member = refineCls.info.member(rsym.name) if (member.isOverloaded) report.error(OverloadInRefinement(rsym), refinement.srcPos) - } assignType(cpy.RefinedTypeTree(tree)(tpt1, refinements1), tpt1, refinements1, refineCls) } @@ -2475,12 +2551,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer (arg, tparamBounds) else (arg, WildcardType) - if (tpt1.symbol.isClass) - tparam match { - case tparam: Symbol => - tparam.ensureCompleted() // This is needed to get the test `compileParSetSubset` to work - case _ => - } if (desugaredArg.isType) arg match { case untpd.WildcardTypeBoundsTree() @@ -2569,7 +2639,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) - assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) + val bound2 = if tree.bound.isEmpty then + val lub = cases1.foldLeft(defn.NothingType: Type): (acc, case1) => + if !acc.exists then NoType + else if case1.body.tpe.isProvisional then NoType + else acc | case1.body.tpe + if lub.exists then + if !lub.isAny then + val msg = em"Match type upper bound inferred as $lub, where previously it was defaulted to Any" + warnOnMigration(msg, tree, `3.6`) + TypeTree(lub, inferred = true) + else bound1 + else bound1 + assignType(cpy.MatchTypeTree(tree)(bound2, sel1, cases1), bound2, sel1, cases1) } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match @@ -2738,7 +2820,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => typed(rhs) - def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) @@ -2766,7 +2848,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer sym.owner.info.decls.openForMutations.unlink(sym) EmptyTree - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else ctx.profiler.onTypedDef(sym) { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` @@ -2864,21 +2946,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val ddef2 = assignType(cpy.DefDef(ddef)(name, paramss1, tpt1, rhs1), sym) postProcessInfo(ddef2, sym) - ddef2.setDefTree - //todo: make sure dependent method types do not depend on implicits or by-name params + //todo: make sure dependent method types do not depend on implicits or by-name params } /** (1) Check that the signature of the class member does not return a repeated parameter type * (2) If info is an erased class, set erased flag of member * (3) Check that erased classes are not parameters of polymorphic functions. + * (4) Make sure the definition's symbol is `sym`. + * (5) Set the `defTree` of `sym` to be `mdef`. */ - private def postProcessInfo(mdef: MemberDef, sym: Symbol)(using Context): Unit = + private def postProcessInfo(mdef: MemberDef, sym: Symbol)(using Context): MemberDef = if (!sym.isOneOf(Synthetic | InlineProxy | Param) && sym.info.finalResultType.isRepeatedParam) report.error(em"Cannot return repeated parameter type ${sym.info.finalResultType}", sym.srcPos) if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) + mdef.ensureHasSym(sym) + mdef.setDefTree - def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val TypeDef(name, rhs) = tdef completeAnnotations(tdef, sym) val rhs1 = tdef.rhs match @@ -2892,7 +2977,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } - def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = ctx.profiler.onTypedDef(cls) { if (!cls.info.isInstanceOf[ClassInfo]) return EmptyTree.assertingErrorsReported val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked @@ -2989,13 +3074,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ( if sym.isType then TypeDef(sym.asType) else if sym.is(Method) then DefDef(sym.asTerm) else ValDef(sym.asTerm) - ).withSpan(impl.span.startPos) + ).withSpan(impl.span.startPos).withAttachment(RefinementFromParent, ()) body ++ refinements case None => body /** Implement givens that were declared with a `deferred` rhs. - * The a given value matching the declared type is searched in a + * The given value matching the declared type is searched in a * context directly enclosing the current class, in which all given * parameters of the current class are also defined. */ @@ -3012,6 +3097,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer false else true + def willBeimplementedInParentClass(m: TermRef) = + val superCls = cls.superClass + superCls.exists && superCls.asClass.baseClasses.contains(m.symbol.owner) + def givenImpl(mbr: TermRef): ValDef = val dcl = mbr.symbol val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) @@ -3041,6 +3130,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cls.thisType.implicitMembers //.showing(i"impl def givens for $cls/$result") .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + .filter(!willBeimplementedInParentClass(_)) // only implement the given in the topmost class //.showing(i"impl def filtered givens for $cls/$result") .filter(isGivenValue) .map(givenImpl) @@ -3308,6 +3398,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate tuples of all arities */ def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = val tree1 = desugar.tuple(tree, pt) + checkDeprecatedAssignmentSyntax(tree) if tree1 ne tree then typed(tree1, pt) else val arity = tree.trees.length @@ -3333,6 +3424,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) + /** Checks if `tree` is a named tuple with one element that could be + * interpreted as an assignment, such as `(x = 1)`. If so, issues a warning. + */ + def checkDeprecatedAssignmentSyntax(tree: untpd.Tuple)(using Context): Unit = + tree.trees match + case List(NamedArg(name, value)) => + val tmpCtx = ctx.fresh.setNewTyperState() + typedAssign(untpd.Assign(untpd.Ident(name), value), WildcardType)(using tmpCtx) + if !tmpCtx.reporter.hasErrors then + // If there are no errors typing the above, then the named tuple is + // ambiguous and we issue a warning. + report.migrationWarning(DeprecatedAssignmentSyntax(name, value), tree.srcPos) + if MigrationVersion.AmbiguousNamedTupleSyntax.needsPatch then + patch(tree.source, Span(tree.span.start, tree.span.start + 1), "{") + patch(tree.source, Span(tree.span.end - 1, tree.span.end), "}") + case _ => () + /** Retrieve symbol attached to given tree */ protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { case Some(sym) => @@ -3874,7 +3982,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if isExtension then return found else checkImplicitConversionUseOK(found, selProto) - return withoutMode(Mode.ImplicitsEnabled)(typedSelect(tree, pt, found)) + return withoutMode(Mode.ImplicitsEnabled)(typedSelectWithAdapt(tree, pt, found)) case failure: SearchFailure => if failure.isAmbiguous then return @@ -4473,7 +4581,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) checkStatementPurity(tree1)(tree, ctx.owner, isUnitExpr = true) - if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { + if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.Whas.valueDiscard && !isThisTypeResult(tree)) { report.warning(ValueDiscarding(tree.tpe), tree.srcPos) } return tpd.Block(tree1 :: Nil, unitLiteral) @@ -4481,7 +4589,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // convert function literal to SAM closure tree match { - case closure(Nil, id @ Ident(nme.ANON_FUN), _) + case blockEndingInClosure(Nil, id @ Ident(nme.ANON_FUN), _) if defn.isFunctionNType(wtp) && !defn.isFunctionNType(pt) => pt match { case SAMType(samMeth, samParent) @@ -4490,6 +4598,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // but this prevents case blocks from implementing polymorphic partial functions, // since we do not know the result parameter a priori. Have to wait until the // body is typechecked. + // Note: Need to come back to this when we clean up SAMs/PartialFunctions + // These conditions would most likely be affected by a precise spec. return toSAM(tree, samParent) case _ => } @@ -4514,7 +4624,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def recover(failure: SearchFailureType) = if canDefineFurther(wtp) || canDefineFurther(pt) then readapt(tree) - else err.typeMismatch(tree, pt, failure) + else + val tree1 = healAdapt(tree, pt) + if tree1 ne tree then readapt(tree1) + else err.typeMismatch(tree, pt, failure) pt match case _: SelectionProto => @@ -4644,7 +4757,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var typeArgs = tree match case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree(_)) case _ => Nil - if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2.map(_.wrapInTypeTree(tree)) + if typeArgs.isEmpty then + val poly1 = tree match + case Select(qual, nme.apply) => qual.tpe.widen match + case defn.PolyFunctionOf(_) => + // Given a poly function, like the one in i6682a: + // val v = [T] => (y:T) => (x:y.type) => 3 + // It's possible to apply `v(v)` which extends to: + // v.apply[?T](v) + // Requiring the circular constraint `v <: ?T`, + // (because type parameter T occurs in v's type). + // So we create a fresh copy of the outer + // poly method type, so we now extend to: + // v.apply[?T'](v) + // Where `?T'` is a type var for a T' type parameter, + // leading to the non-circular `v <: ?T'` constraint. + // + // This also happens in `assignType(tree: untpd.TypeApply, ..)` + // to avoid any type arguments, containing the type lambda, + // being applied to the very same type lambda. + poly.newLikeThis(poly.paramNames, poly.paramInfos, poly.resType) + case _ => poly + case _ => poly + typeArgs = constrained(poly1, tree)._2.map(_.wrapInTypeTree(tree)) convertNewGenericArray(readapt(tree.appliedToTypeTrees(typeArgs))) case wtp => val isStructuralCall = wtp.isValueType && isStructuralTermSelectOrApply(tree) @@ -4663,6 +4798,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Hook for inheriting Typers to do a last-effort adaptation. If a different + * tree is returned, we will re-adapt that one, otherwise we issue a type error afterwards. +`` + */ + protected def healAdapt(tree: Tree, pt: Type)(using Context): Tree = tree + /** True if this inline typer has already issued errors */ def hasInliningErrors(using Context): Boolean = false diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 0c63f5b4ecb1..264d0f170769 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -43,7 +43,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { def typeCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit try - if !unit.suspended then + if !unit.suspended then ctx.profiler.onUnit(ctx.phase, unit): unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree) typr.println("typed: " + unit.source) record("retained untyped trees", unit.untpdTree.treeSize) diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 3f7d7dd39531..3b45d8f2fa51 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -499,7 +499,7 @@ object Signatures { def isSyntheticEvidence(name: String) = name.startsWith(NameKinds.ContextBoundParamName.separator) - && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) + && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.isOneOf(Flags.GivenOrImplicit)) def toTypeParam(tpe: PolyType): List[Param] = val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: @@ -651,7 +651,7 @@ object Signatures { * * @param err The error message to inspect. * @param params The parameters that were given at the call site. - * @param alreadyCurried Index of paramss we are currently in. + * @param paramssIndex Index of paramss we are currently in. * * @return A pair composed of the index of the best alternative (0 if no alternatives * were found), and the list of alternatives. diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 9da4f58f2deb..3ea43d16a7c8 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -119,7 +119,8 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends * For regular source files, simply return the argument. */ def positionInUltimateSource(position: SourcePosition): SourcePosition = - SourcePosition(underlying, position.span shift start) + if isSelfContained then position // return the argument + else SourcePosition(underlying, position.span shift start) private def calculateLineIndicesFromContents() = { val cs = content() diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 904704b2349c..a7358755043c 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -79,7 +79,6 @@ extends SrcPos, interfaces.SourcePosition, Showable { rec(this) } - override def toString: String = s"${if (source.exists) source.file.toString else "(no source)"}:$span" diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 233b1ca8fb62..ee72297c2a4f 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -136,12 +136,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) - /** Create a file on disk, if one does not exist already. */ - def create(): Unit - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit - /** Is this abstract file a directory? */ def isDirectory: Boolean diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index b6338082c696..5fdf43cfe8e3 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -226,7 +226,7 @@ object FileWriters { if (file.isInstanceOf[JarArchive]) { val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where - // created using `AbstractFile.bufferedOutputStream`instead of JarWritter + // created using `AbstractFile.bufferedOutputStream`instead of JarWriter val jarFile = file.underlyingSource.getOrElse{ throw new IllegalStateException("No underlying source for jar") } diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 728f89966af0..c396699f93b3 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -10,11 +10,13 @@ import scala.jdk.CollectionConverters.* * This class implements an [[AbstractFile]] backed by a jar * that be can used as the compiler's output directory. */ -class JarArchive private (root: Directory) extends PlainDirectory(root) { +class JarArchive private (val jarPath: Path, root: Directory) extends PlainDirectory(root) { def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) + + override def toString: String = jarPath.toString } object JarArchive { @@ -40,6 +42,6 @@ object JarArchive { } } val root = fs.getRootDirectories().iterator.next() - new JarArchive(Directory(root)) + new JarArchive(path, Directory(root)) } } diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala index 13c2c6851d2b..bef045e290a5 100644 --- a/compiler/src/dotty/tools/io/NoAbstractFile.scala +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -17,8 +17,6 @@ import java.io.InputStream object NoAbstractFile extends AbstractFile { def absolute: AbstractFile = this def container: AbstractFile = this - def create(): Unit = ??? - def delete(): Unit = ??? def jpath: JPath = null def input: InputStream = null def isDirectory: Boolean = false diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index acef191d3072..a6a39d9ff3eb 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -13,9 +13,8 @@ import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { - override def isDirectory: Boolean = true + override val isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) - override def delete(): Unit = givenPath.deleteRecursively() } /** This class implements an abstract file backed by a File. @@ -78,7 +77,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { } /** Is this abstract file a directory? */ - def isDirectory: Boolean = givenPath.isDirectory + val isDirectory: Boolean = givenPath.isDirectory // cached for performance on Windows /** Returns the time that this abstract file was last modified. */ def lastModified: Long = givenPath.lastModified.toMillis @@ -113,14 +112,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile { null } - /** Does this abstract file denote an existing file? */ - def create(): Unit = if (!exists) givenPath.createFile() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = - if (givenPath.isFile) givenPath.delete() - else if (givenPath.isDirectory) givenPath.toDirectory.deleteRecursively() - /** Returns a plain file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 157f63a2ac1a..949f2d0e61dd 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -34,12 +34,6 @@ extends AbstractFile { override def input: InputStream = sys.error("directories cannot be read") override def output: OutputStream = sys.error("directories cannot be written") - /** Does this abstract file denote an existing file? */ - def create(): Unit = { unsupported() } - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = { unsupported() } - /** Returns an abstract file with the given name. It does not * check that it exists. */ diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 9d290a9b0e6a..6fb9859503f2 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -82,12 +82,6 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF Iterator.empty } - /** Does this abstract file denote an existing file? */ - def create(): Unit = unsupported() - - /** Delete the underlying file or directory (recursively). */ - def delete(): Unit = unsupported() - /** * Returns the abstract file in this abstract directory with the * specified name. If there is no such file, returns null. The diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 9af935690ffc..a23bde8faaed 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -61,8 +61,6 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex def isDirectory: Boolean = true def lookupName(name: String, directory: Boolean): AbstractFile = unsupported() def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() - def create(): Unit = unsupported() - def delete(): Unit = unsupported() def output: OutputStream = unsupported() def container: AbstractFile = unsupported() def absolute: AbstractFile = unsupported() diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 294f0a331ec2..e4ac1626525e 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -21,11 +21,16 @@ class JLineTerminal extends java.io.Closeable { // Logger.getLogger("org.jline").setLevel(Level.FINEST) private val terminal = - TerminalBuilder.builder() - .dumb(dumbTerminal) // fail early if not able to create a terminal - .build() + var builder = TerminalBuilder.builder() + if System.getenv("TERM") == "dumb" then + // Force dumb terminal if `TERM` is `"dumb"`. + // Note: the default value for the `dumb` option is `null`, which allows + // JLine to fall back to a dumb terminal. This is different than `true` or + // `false` and can't be set using the `dumb` setter. + // This option is used at https://github.com/jline/jline3/blob/894b5e72cde28a551079402add4caea7f5527806/terminal/src/main/java/org/jline/terminal/TerminalBuilder.java#L528. + builder.dumb(true) + builder.build() private val history = new DefaultHistory - def dumbTerminal = Option(System.getenv("TERM")) == Some("dumb") private def blue(str: String)(using Context) = if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index b9139343bca1..24a624173050 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -122,7 +122,7 @@ object ParseResult { private def parseStats(using Context): List[untpd.Tree] = { val parser = new Parser(ctx.source) - val stats = parser.blockStatSeq() + val stats = parser.blockStatSeq(outermost = true) parser.accept(Tokens.EOF) stats } diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index d5688d1038b4..c127cc959e25 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -115,7 +115,8 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): val objectName = sym.owner.fullName.encode.toString.stripSuffix("$") val resObj: Class[?] = Class.forName(objectName, true, classLoader()) val symValue = resObj - .getDeclaredMethods.find(_.getName == sym.name.encode.toString) + .getDeclaredMethods + .find(method => method.getName == sym.name.encode.toString && method.getParameterCount == 0) .flatMap(result => rewrapValueClass(sym.info.classSymbol, result.invoke(null))) symValue .filter(_ => sym.is(Flags.Method) || sym.info != defn.UnitType) diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index d69173cb6d88..f909abfc129a 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -159,7 +159,7 @@ class ReplCompiler extends Compiler: def wrap(trees: List[untpd.Tree]): untpd.PackageDef = { import untpd.* - val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length))) + val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, syntheticUnitLiteral).withSpan(Span(0, expr.length))) val tmpl = Template(emptyConstructor, Nil, Nil, EmptyValDef, List(valdef)) val wrapper = TypeDef("$wrapper".toTypeName, tmpl) .withMods(Modifiers(Final)) diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 5bdf175c522c..589ea6c3c677 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -87,8 +87,21 @@ class ReplDriver(settings: Array[String], setupRootCtx(this.settings ++ settings, rootCtx) } + private val incompatibleOptions: Seq[String] = Seq( + initCtx.settings.YbestEffort.name, + initCtx.settings.YwithBestEffortTasty.name + ) + private def setupRootCtx(settings: Array[String], rootCtx: Context) = { - setup(settings, rootCtx) match + val incompatible = settings.intersect(incompatibleOptions) + val filteredSettings = + if !incompatible.isEmpty then + inContext(rootCtx) { + out.println(i"Options incompatible with repl will be ignored: ${incompatible.mkString(", ")}") + } + settings.filter(!incompatible.contains(_)) + else settings + setup(filteredSettings, rootCtx) match case Some((files, ictx)) => inContext(ictx) { shouldStart = true if files.nonEmpty then out.println(i"Ignoring spurious arguments: $files%, %") diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index 44886d59ac12..3790174526b3 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -11,6 +11,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.optional +import dotty.tools.dotc.ast.TreeTypeMap /** Matches a quoted tree against a quoted pattern tree. * A quoted pattern tree may have type and term holes in addition to normal terms. @@ -26,7 +27,7 @@ import dotty.tools.dotc.util.optional * - `isClosedUnder(x1, .., xn)('{e})` returns true if and only if all the references in `e` to names defined in the pattern are contained in the set `{x1, ... xn}`. * - `lift(x1, .., xn)('{e})` returns `(y1, ..., yn) => [xi = $yi]'{e}` where `yi` is an `Expr` of the type of `xi`. * - `withEnv(x1 -> y1, ..., xn -> yn)(matching)` evaluates matching recording that `xi` is equivalent to `yi`. - * - `matched` denotes that the the match succeeded and `matched('{e})` denotes that a match succeeded and extracts `'{e}` + * - `matched` denotes that the match succeeded and `matched('{e})` denotes that a match succeeded and extracts `'{e}` * - `&&&` matches if both sides match. Concatenates the extracted expressions of both sides. * * Note: that not all quoted terms bellow are valid expressions @@ -112,16 +113,17 @@ class QuoteMatcher(debug: Boolean) { /** Sequence of matched expressions. * These expressions are part of the scrutinee and will be bound to the quote pattern term splices. */ - type MatchingExprs = Seq[MatchResult] + private type MatchingExprs = Seq[MatchResult] - /** A map relating equivalent symbols from the scrutinee and the pattern + /** TODO-18271: update + * A map relating equivalent symbols from the scrutinee and the pattern * For example in * ``` * '{val a = 4; a * a} match case '{ val x = 4; x * x } * ``` * when matching `a * a` with `x * x` the environment will contain `Map(a -> x)`. */ - private type Env = Map[Symbol, Symbol] + private case class Env(val termEnv: Map[Symbol, Symbol], val typeEnv: Map[Symbol, Symbol]) private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env) @@ -132,7 +134,7 @@ class QuoteMatcher(debug: Boolean) { val (pat1, typeHoles, ctx1) = instrumentTypeHoles(pattern) inContext(ctx1) { optional { - given Env = Map.empty + given Env = new Env(Map.empty, Map.empty) scrutinee =?= pat1 }.map { matchings => lazy val spliceScope = SpliceScope.getCurrent @@ -236,6 +238,26 @@ class QuoteMatcher(debug: Boolean) { case _ => None end TypeTreeTypeTest + /* Some of method symbols in arguments of higher-order term hole are eta-expanded. + * e.g. + * g: (Int) => Int + * => { + * def $anonfun(y: Int): Int = g(y) + * closure($anonfun) + * } + * + * f: (using Int) => Int + * => f(using x) + * This function restores the symbol of the original method from + * the eta-expanded function. + */ + def getCapturedIdent(arg: Tree)(using Context): Ident = + arg match + case id: Ident => id + case Apply(fun, _) => getCapturedIdent(fun) + case Block((ddef: DefDef) :: _, _: Closure) => getCapturedIdent(ddef.rhs) + case Typed(expr, _) => getCapturedIdent(expr) + def runMatch(): optional[MatchingExprs] = pattern match /* Term hole */ @@ -244,14 +266,14 @@ class QuoteMatcher(debug: Boolean) { if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) && tpt2.tpe.derivesFrom(defn.RepeatedParamClass) => scrutinee match - case Typed(s, tpt1) if s.tpe <:< tpt.tpe => matched(scrutinee) + case Typed(s, tpt1) if isSubTypeUnderEnv(s, tpt) => matched(scrutinee) case _ => notMatched /* Term hole */ // Match a scala.internal.Quoted.patternHole and return the scrutinee tree case TypeApply(patternHole, tpt :: Nil) if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) && - scrutinee.tpe <:< tpt.tpe => + isSubTypeUnderEnv(scrutinee, tpt) => scrutinee match case ClosedPatternTerm(scrutinee) => matched(scrutinee) case _ => notMatched @@ -262,33 +284,32 @@ class QuoteMatcher(debug: Boolean) { case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil) if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) => - /* Some of method symbols in arguments of higher-order term hole are eta-expanded. - * e.g. - * g: (Int) => Int - * => { - * def $anonfun(y: Int): Int = g(y) - * closure($anonfun) - * } - * - * f: (using Int) => Int - * => f(using x) - * This function restores the symbol of the original method from - * the eta-expanded function. - */ - def getCapturedIdent(arg: Tree)(using Context): Ident = - arg match - case id: Ident => id - case Apply(fun, _) => getCapturedIdent(fun) - case Block((ddef: DefDef) :: _, _: Closure) => getCapturedIdent(ddef.rhs) - case Typed(expr, _) => getCapturedIdent(expr) - val env = summon[Env] val capturedIds = args.map(getCapturedIdent) val capturedSymbols = capturedIds.map(_.symbol) - val captureEnv = env.filter((k, v) => !capturedSymbols.contains(v)) + val captureEnv = Env( + termEnv = env.termEnv.filter((k, v) => !capturedIds.map(_.symbol).contains(v)), + typeEnv = env.typeEnv) withEnv(captureEnv) { scrutinee match - case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), env) + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), Nil, env) + case _ => notMatched + } + + /* Higher order term hole */ + // Matches an open term and wraps it into a lambda that provides the free variables + case Apply(TypeApply(Ident(_), List(TypeTree(), targs)), SeqLiteral(args, _) :: Nil) + if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHoleWithTypes) => + + val env = summon[Env] + val capturedIds = args.map(getCapturedIdent) + val capturedTargs = unrollHkNestedPairsTypeTree(targs) + val captureEnv = Env( + termEnv = env.termEnv.filter((k, v) => !capturedIds.map(_.symbol).contains(v)), + typeEnv = env.typeEnv.filter((k, v) => !capturedTargs.map(_.symbol).contains(v))) + withEnv(captureEnv) { + scrutinee match + case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, capturedIds, args.map(_.tpe), capturedTargs.map(_.tpe), env) case _ => notMatched } @@ -324,7 +345,7 @@ class QuoteMatcher(debug: Boolean) { /* Match reference */ case _: Ident if symbolMatch(scrutinee, pattern) => matched /* Match type */ - case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched + case TypeTreeTypeTest(pattern) if isSubTypeUnderEnv(scrutinee, pattern) => matched case _ => notMatched /* Match application */ @@ -346,8 +367,12 @@ class QuoteMatcher(debug: Boolean) { pattern match case Block(stat2 :: stats2, expr2) => val newEnv = (stat1, stat2) match { - case (stat1: MemberDef, stat2: MemberDef) => - summon[Env] + (stat1.symbol -> stat2.symbol) + case (stat1: ValOrDefDef, stat2: ValOrDefDef) => + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv + (stat1.symbol -> stat2.symbol), typeEnv) + case (stat1: TypeDef, stat2: TypeDef) => + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv, typeEnv + (stat1.symbol -> stat2.symbol)) case _ => summon[Env] } @@ -403,14 +428,16 @@ class QuoteMatcher(debug: Boolean) { // TODO remove this? case TypeTreeTypeTest(scrutinee) => pattern match - case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched + case TypeTreeTypeTest(pattern) if isSubTypeUnderEnv(scrutinee, pattern) => matched case _ => notMatched /* Match val */ case scrutinee @ ValDef(_, tpt1, _) => pattern match case pattern @ ValDef(_, tpt2, _) if checkValFlags() => - def rhsEnv = summon[Env] + (scrutinee.symbol -> pattern.symbol) + def rhsEnv = + val Env(termEnv, typeEnv) = summon[Env] + new Env(termEnv + (scrutinee.symbol -> pattern.symbol), typeEnv) tpt1 =?= tpt2 &&& withEnv(rhsEnv)(scrutinee.rhs =?= pattern.rhs) case _ => notMatched @@ -427,11 +454,38 @@ class QuoteMatcher(debug: Boolean) { notMatched case _ => matched + /** + * Implementation restriction: The current implementation matches type parameters + * only when they have empty bounds (>: Nothing <: Any) + */ + def matchTypeDef(sctypedef: TypeDef, pttypedef: TypeDef): MatchingExprs = sctypedef match + case TypeDef(_, TypeBoundsTree(sclo, schi, EmptyTree)) + if sclo.tpe == defn.NothingType && schi.tpe == defn.AnyType => + pttypedef match + case TypeDef(_, TypeBoundsTree(ptlo, pthi, EmptyTree)) + if sclo.tpe == defn.NothingType && schi.tpe == defn.AnyType => + matched + case _ => notMatched + case _ => notMatched + def matchParamss(scparamss: List[ParamClause], ptparamss: List[ParamClause])(using Env): optional[(Env, MatchingExprs)] = (scparamss, ptparamss) match { - case (scparams :: screst, ptparams :: ptrest) => + case (ValDefs(scparams) :: screst, ValDefs(ptparams) :: ptrest) => val mr1 = matchLists(scparams, ptparams)(_ =?= _) - val newEnv = summon[Env] ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)) + val Env(termEnv, typeEnv) = summon[Env] + val newEnv = new Env( + termEnv = termEnv ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)), + typeEnv = typeEnv + ) + val (resEnv, mrrest) = withEnv(newEnv)(matchParamss(screst, ptrest)) + (resEnv, mr1 &&& mrrest) + case (TypeDefs(scparams) :: screst, TypeDefs(ptparams) :: ptrest) => + val mr1 = matchLists(scparams, ptparams)(matchTypeDef) + val Env(termEnv, typeEnv) = summon[Env] + val newEnv = new Env( + termEnv = termEnv, + typeEnv = typeEnv ++ scparams.map(_.symbol).zip(ptparams.map(_.symbol)), + ) val (resEnv, mrrest) = withEnv(newEnv)(matchParamss(screst, ptrest)) (resEnv, mr1 &&& mrrest) case (Nil, Nil) => (summon[Env], matched) @@ -439,8 +493,8 @@ class QuoteMatcher(debug: Boolean) { } val ematch = matchErasedParams(scrutinee.tpe.widenTermRefExpr, pattern.tpe.widenTermRefExpr) - val (pEnv, pmatch) = matchParamss(paramss1, paramss2) - val defEnv = pEnv + (scrutinee.symbol -> pattern.symbol) + val (Env(termEnv, typeEnv), pmatch) = matchParamss(paramss1, paramss2) + val defEnv = Env(termEnv + (scrutinee.symbol -> pattern.symbol), typeEnv) ematch &&& pmatch @@ -514,11 +568,19 @@ class QuoteMatcher(debug: Boolean) { else scrutinee case _ => scrutinee val pattern = patternTree.symbol + val Env(termEnv, typeEnv) = summon[Env] devirtualizedScrutinee == pattern - || summon[Env].get(devirtualizedScrutinee).contains(pattern) + || termEnv.get(devirtualizedScrutinee).contains(pattern) + || typeEnv.get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) + private def isSubTypeUnderEnv(scrutinee: Tree, pattern: Tree)(using Env, Context): Boolean = + val env = summon[Env].typeEnv + val scType = if env.isEmpty then scrutinee.tpe + else scrutinee.subst(env.keys.toList, env.values.toList).tpe + scType <:< pattern.tpe + private object ClosedPatternTerm { /** Matches a term that does not contain free variables defined in the pattern (i.e. not defined in `Env`) */ def unapply(term: Tree)(using Env, Context): Option[term.type] = @@ -526,16 +588,24 @@ class QuoteMatcher(debug: Boolean) { /** Return all free variables of the term defined in the pattern (i.e. defined in `Env`) */ def freePatternVars(term: Tree)(using Env, Context): Set[Symbol] = - val accumulator = new TreeAccumulator[Set[Symbol]] { + val Env(termEnv, typeEnv) = summon[Env] + val typeAccumulator = new TypeAccumulator[Set[Symbol]] { + def apply(x: Set[Symbol], tp: Type): Set[Symbol] = tp match + case tp: TypeRef if typeEnv.contains(tp.typeSymbol) => foldOver(x + tp.typeSymbol, tp) + case tp: TermRef if termEnv.contains(tp.termSymbol) => foldOver(x + tp.termSymbol, tp) + case _ => foldOver(x, tp) + } + val treeAccumulator = new TreeAccumulator[Set[Symbol]] { def apply(x: Set[Symbol], tree: Tree)(using Context): Set[Symbol] = tree match - case tree: Ident if summon[Env].contains(tree.symbol) => foldOver(x + tree.symbol, tree) + case tree: Ident if termEnv.contains(tree.symbol) => foldOver(typeAccumulator(x, tree.tpe) + tree.symbol, tree) + case tree: TypeTree => typeAccumulator(x, tree.tpe) case _ => foldOver(x, tree) } - accumulator.apply(Set.empty, term) + treeAccumulator(Set.empty, term) } - enum MatchResult: + private enum MatchResult: /** Closed pattern extracted value * @param tree Scrutinee sub-tree that matched */ @@ -546,9 +616,10 @@ class QuoteMatcher(debug: Boolean) { * @param patternTpe Type of the pattern hole (from the pattern) * @param argIds Identifiers of HOAS arguments (from the pattern) * @param argTypes Eta-expanded types of HOAS arguments (from the pattern) + * @param typeArgs type arguments from the pattern * @param env Mapping between scrutinee and pattern variables */ - case OpenTree(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env) + case OpenTree(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], typeArgs: List[Type], env: Env) /** Return the expression that was extracted from a hole. * @@ -561,28 +632,61 @@ class QuoteMatcher(debug: Boolean) { def toExpr(mapTypeHoles: Type => Type, spliceScope: Scope)(using Context): Expr[Any] = this match case MatchResult.ClosedTree(tree) => new ExprImpl(tree, spliceScope) - case MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env) => + case MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, typeArgs, Env(termEnv, typeEnv)) => val names: List[TermName] = argIds.map(_.symbol.name.asTermName) val paramTypes = argTypes.map(tpe => mapTypeHoles(tpe.widenTermRefExpr)) - val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + val ptTypeVarSymbols = typeArgs.map(_.typeSymbol) + val isNotPoly = typeArgs.isEmpty + + val methTpe = if isNotPoly then + MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) + else + val typeArgs1 = PolyType.syntheticParamNames(typeArgs.length) + val bounds = typeArgs map (_ => TypeBounds.empty) + val resultTypeExp = (pt: PolyType) => { + val argTypes1 = paramTypes.map(_.subst(ptTypeVarSymbols, pt.paramRefs)) + val resultType1 = mapTypeHoles(patternTpe).subst(ptTypeVarSymbols, pt.paramRefs) + MethodType(argTypes1, resultType1) + } + PolyType(typeArgs1)(_ => bounds, resultTypeExp) + val meth = newAnonFun(ctx.owner, methTpe) + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { - val argsMap = argIds.view.map(_.symbol).zip(lambdaArgss.head).toMap - val body = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = - tree match - /* - * When matching a method call `f(0)` against a HOAS pattern `p(g)` where - * f has a method type `(x: Int): Int` and `f` maps to `g`, `p` should hold - * `g.apply(0)` because the type of `g` is `Int => Int` due to eta expansion. - */ - case Apply(fun, args) if env.contains(tree.symbol) => transform(fun).select(nme.apply).appliedToArgs(args.map(transform)) - case tree: Ident => env.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) - case tree => super.transform(tree) - }.transform(tree) + val (typeParams, params) = if isNotPoly then + (List.empty, lambdaArgss.head) + else + (lambdaArgss.head.map(_.tpe), lambdaArgss.tail.head) + + val typeArgsMap = ptTypeVarSymbols.zip(typeParams).toMap + val argsMap = argIds.view.map(_.symbol).zip(params).toMap + + val body = new TreeTypeMap( + typeMap = if isNotPoly then IdentityTypeMap + else new TypeMap() { + override def apply(tp: Type): Type = tp match { + case tr: TypeRef if tr.prefix.eq(NoPrefix) => + typeEnv.get(tr.symbol).flatMap(typeArgsMap.get).getOrElse(tr) + case tp => mapOver(tp) + } + }, + treeMap = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = + tree match + /* + * When matching a method call `f(0)` against a HOAS pattern `p(g)` where + * f has a method type `(x: Int): Int` and `f` maps to `g`, `p` should hold + * `g.apply(0)` because the type of `g` is `Int => Int` due to eta expansion. + */ + case Apply(fun, args) if termEnv.contains(tree.symbol) => transform(fun).select(nme.apply).appliedToArgs(args.map(transform)) + case tree: Ident => termEnv.get(tree.symbol).flatMap(argsMap.get).getOrElse(tree) + case tree => super.transform(tree) + }.transform + ).transform(tree) + TreeOps(body).changeNonLocalOwners(meth) } - val hoasClosure = Closure(meth, bodyFn) + val hoasClosure = Closure(meth, bodyFn).withSpan(tree.span) new ExprImpl(hoasClosure, spliceScope) private inline def notMatched[T]: optional[T] = @@ -594,12 +698,17 @@ class QuoteMatcher(debug: Boolean) { private inline def matched(tree: Tree)(using Context): MatchingExprs = Seq(MatchResult.ClosedTree(tree)) - private def matchedOpen(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], env: Env)(using Context): MatchingExprs = - Seq(MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, env)) + private def matchedOpen(tree: Tree, patternTpe: Type, argIds: List[Tree], argTypes: List[Type], typeArgs: List[Type], env: Env)(using Context): MatchingExprs = + Seq(MatchResult.OpenTree(tree, patternTpe, argIds, argTypes, typeArgs, env)) extension (self: MatchingExprs) /** Concatenates the contents of two successful matchings */ - def &&& (that: MatchingExprs): MatchingExprs = self ++ that + private def &&& (that: MatchingExprs): MatchingExprs = self ++ that end extension + // TODO-18271: Duplicate with QuotePatterns.unrollHkNestedPairsTypeTree + private def unrollHkNestedPairsTypeTree(tree: Tree)(using Context): List[Tree] = tree match + case AppliedTypeTree(tupleN, bindings) if defn.isTupleClass(tupleN.symbol) => bindings // TupleN, 1 <= N <= 22 + case AppliedTypeTree(_, head :: tail :: Nil) => head :: unrollHkNestedPairsTypeTree(tail) // KCons or *: + case _ => Nil // KNil or EmptyTuple } diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 517adff17991..22be293c3562 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -301,7 +301,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object DefDef extends DefDefModule: def apply(symbol: Symbol, rhsFn: List[List[Tree]] => Option[Term]): DefDef = - xCheckMacroAssert(symbol.isTerm, s"expected a term symbol but received $symbol") + xCheckMacroAssert(symbol.isTerm, s"expected a term symbol, but received $symbol") xCheckMacroAssert(symbol.flags.is(Flags.Method), "expected a symbol with `Method` flag set") withDefaultPos(tpd.DefDef(symbol.asTerm, prefss => xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(prefss)), symbol).getOrElse(tpd.EmptyTree) @@ -472,7 +472,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def term(tp: TermRef): Ref = withDefaultPos(tpd.ref(tp).asInstanceOf[tpd.RefTree]) def apply(sym: Symbol): Ref = - assert(sym.isTerm) + assert(sym.isTerm, s"expected a term symbol, but received $sym") val refTree = tpd.ref(sym) match case t @ tpd.This(ident) => // not a RefTree, so we need to work around this - issue #19732 // ident in `This` can be a TypeIdent of sym, so we manually prepare the ref here, @@ -1128,7 +1128,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def of[T <: AnyKind](using tp: scala.quoted.Type[T]): TypeTree = tp.asInstanceOf[TypeImpl].typeTree def ref(sym: Symbol): TypeTree = - assert(sym.isType, "Expected a type symbol, but got " + sym) + assert(sym.isType, s"Expected a type symbol, but got $sym") tpd.ref(sym) end TypeTree @@ -1162,7 +1162,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object TypeIdent extends TypeIdentModule: def apply(sym: Symbol): TypeTree = - assert(sym.isType) + assert(sym.isType, s"Expected a type symbol, but got $sym") withDefaultPos(tpd.ref(sym).asInstanceOf[tpd.TypeTree]) def copy(original: Tree)(name: String): TypeIdent = tpd.cpy.Ident(original)(name.toTypeName) @@ -1811,7 +1811,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def =:=(that: TypeRepr): Boolean = self =:= that def <:<(that: TypeRepr): Boolean = self <:< that def widen: TypeRepr = self.widen - def widenTermRefByName: TypeRepr = self.widenTermRefExpr + def widenTermRefByName: TypeRepr = + self.widenTermRefExpr match + case dotc.core.Types.ClassInfo(prefix, sym, _, _, _) => prefix.select(sym) + case other => other def widenByName: TypeRepr = self.widenExpr def dealias: TypeRepr = self.dealias def dealiasKeepOpaques: TypeRepr = self.dealiasKeepOpaques @@ -2646,6 +2649,16 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def newBind(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol = checkValidFlags(flags.toTermFlags, Flags.validBindFlags) dotc.core.Symbols.newSymbol(owner, name.toTermName, flags | dotc.core.Flags.Case, tpe) + + def newTypeAlias(owner: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol = + checkValidFlags(flags.toTypeFlags, Flags.validTypeAliasFlags) + assert(!tpe.isInstanceOf[Types.TypeBounds], "Passed `tpe` into newTypeAlias should not represent TypeBounds") + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags, dotc.core.Types.TypeAlias(tpe), privateWithin) + + def newBoundedType(owner: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol = + checkValidFlags(flags.toTypeFlags, Flags.validBoundedTypeFlags) + dotc.core.Symbols.newSymbol(owner, name.toTypeName, flags | dotc.core.Flags.Deferred, tpe, privateWithin) + def noSymbol: Symbol = dotc.core.Symbols.NoSymbol private inline def checkValidFlags(inline flags: Flags, inline valid: Flags): Unit = @@ -2685,9 +2698,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler if self.exists then val symPos = self.sourcePos if symPos.exists then Some(symPos) - else + else if self.source.exists then if xCheckMacro then report.warning(s"Missing symbol position (defaulting to position 0): $self\nThis is a compiler bug. Please report it.") Some(self.source.atSpan(dotc.util.Spans.Span(0))) + else None else None def docstring: Option[String] = @@ -2986,6 +3000,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler // Keep: aligned with Quotes's `newBind` doc private[QuotesImpl] def validBindFlags: Flags = Case // Flags that could be allowed: Implicit | Given | Erased + + // Keep: aligned with Quotes's 'newBoundedType' doc + private[QuotesImpl] def validBoundedTypeFlags: Flags = Private | Protected | Override | Deferred | Final | Infix | Local + + // Keep: aligned with Quotes's `newTypeAlias` doc + private[QuotesImpl] def validTypeAliasFlags: Flags = Private | Protected | Override | Final | Infix | Local + end Flags given FlagsMethods: FlagsMethods with diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index acf66fcf2009..82be54a9d793 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -177,6 +177,8 @@ object Extractors { this += "Alternatives(" ++= patterns += ")" case TypedOrTest(tree, tpt) => this += "TypedOrTest(" += tree += ", " += tpt += ")" + case tree => + this += s"" } def visitConstant(x: Constant): this.type = x match { @@ -241,6 +243,8 @@ object Extractors { this += "MatchCase(" += pat += ", " += rhs += ")" case FlexibleType(tp) => this += "FlexibleType(" += tp += ")" + case tp => + this += s"" } def visitSignature(sig: Signature): this.type = { diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 9503177ff738..64a0ff9db9ec 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1150,8 +1150,19 @@ object SourceCode { case tp: TypeRef if tp.typeSymbol == Symbol.requiredClass("scala.") => this += "_*" case _ => - printType(tp) - inSquare(printTypesOrBounds(args, ", ")) + if !fullNames && args.lengthCompare(2) == 0 && tp.typeSymbol.flags.is(Flags.Infix) then + val lhs = args(0) + val rhs = args(1) + this += "(" + printType(lhs) + this += " " + printType(tp) + this += " " + printType(rhs) + this += ")" + else + printType(tp) + inSquare(printTypesOrBounds(args, ", ")) } case AnnotatedType(tp, annot) => @@ -1292,7 +1303,9 @@ object SourceCode { val sym = annot.tpe.typeSymbol sym != Symbol.requiredClass("scala.forceInline") && sym.maybeOwner != Symbol.requiredPackage("scala.annotation.internal") - case x => cannotBeShownAsSource(x.show(using Printer.TreeStructure)) + case x => + cannotBeShownAsSource(x.show(using Printer.TreeStructure)) + false } printAnnotations(annots) if (annots.nonEmpty) this += " " @@ -1463,8 +1476,8 @@ object SourceCode { } } - private def cannotBeShownAsSource(x: String): Nothing = - throw new Exception(s"$x does not have a source representation") + private def cannotBeShownAsSource(x: String): this.type = + this += s"<$x does not have a source representation>" private object SpecialOp { def unapply(arg: Tree): Option[(String, List[Term])] = arg match { diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 115803d79dc1..86b22009d15a 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -75,8 +75,8 @@ class CoursierScalaTests: version() def emptyArgsEqualsRepl() = - val output = CoursierScalaTests.csScalaCmd() - assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working + val output = CoursierScalaTests.csScalaCmdWithStdin(Seq.empty, Some("println(\"Hello World\")\n:quit")) + assertTrue(output.mkString("\n").contains("Hello World")) emptyArgsEqualsRepl() def run() = @@ -132,8 +132,8 @@ class CoursierScalaTests: compileFilesToJarAndRun() def replWithArgs() = - val output = CoursierScalaTests.csScalaCmd("-source", "3.0-migration") - assertTrue(output.mkString("\n").contains("Unable to create a terminal")) // Scala attempted to create REPL so we can assume it is working + val output = CoursierScalaTests.csScalaCmdWithStdin(Seq("-source", "3.0-migration"), Some("println(\"Hello World\")\n:quit")) + assertTrue(output.mkString("\n").contains("Hello World")) replWithArgs() def argumentFile() = @@ -148,25 +148,31 @@ class CoursierScalaTests: object CoursierScalaTests: - def execCmd(command: String, options: String*): (Int, List[String]) = + private def execCmd(command: String, options: Seq[String] = Seq.empty, stdin: Option[String] = None): (Int, List[String]) = val cmd = (command :: options.toList).toSeq.mkString(" ") val out = new ListBuffer[String] - val code = cmd.!(ProcessLogger(out += _, out += _)) + val process = stdin match + case Some(input) => Process(cmd) #< new java.io.ByteArrayInputStream(input.getBytes) + case None => Process(cmd) + val code = process.!(ProcessLogger(out += _, out += _)) (code, out.toList) def csScalaCmd(options: String*): List[String] = - csCmd("dotty.tools.MainGenericRunner", options*) + csScalaCmdWithStdin(options, None) + + def csScalaCmdWithStdin(options: Seq[String], stdin: Option[String]): List[String] = + csCmd("dotty.tools.MainGenericRunner", options, stdin) def csScalaCompilerCmd(options: String*): List[String] = - csCmd("dotty.tools.dotc.Main", options*) + csCmd("dotty.tools.dotc.Main", options) - private def csCmd(entry: String, options: String*): List[String] = + private def csCmd(entry: String, options: Seq[String], stdin: Option[String] = None): List[String] = val (jOpts, args) = options.partition(_.startsWith("-J")) val newOptions = args match case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions)*)._2 + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions), stdin)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = @@ -177,7 +183,7 @@ object CoursierScalaTests: case other => fail(s"Unsupported OS for coursier launcher: $other") def runAndCheckCmd(cmd: String, options: String*): Unit = - val (code, out) = execCmd(cmd, options*) + val (code, out) = execCmd(cmd, options) if code != 0 then fail(s"Failed to run $cmd ${options.mkString(" ")}, exit code: $code, output: ${out.mkString("\n")}") diff --git a/compiler/test-resources/repl/19184 b/compiler/test-resources/repl/19184 new file mode 100644 index 000000000000..cf4ce6f1d22f --- /dev/null +++ b/compiler/test-resources/repl/19184 @@ -0,0 +1,5 @@ +scala> def o(s: String) = "o"; def oo(s: String) = "oo"; val o = "o"; val oo = "oo" +def o(s: String): String +def oo(s: String): String +val o: String = o +val oo: String = oo diff --git a/compiler/test-resources/repl/i18383 b/compiler/test-resources/repl/i18383 index 81d3c9d5a7fd..563495e2e999 100644 --- a/compiler/test-resources/repl/i18383 +++ b/compiler/test-resources/repl/i18383 @@ -4,7 +4,7 @@ scala> import scala.collection.* scala> class Foo { import scala.util.*; println("foo") } 1 warning found --- Warning: -------------------------------------------------------------------- +-- [E198] Unused Symbol Warning: ----------------------------------------------- 1 | class Foo { import scala.util.*; println("foo") } | ^ | unused import diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist index ff02be107a8a..99a83a467f08 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.blacklist +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -13,6 +13,10 @@ curried-dependent-ift.scala i17121.scala illegal-match-types.scala i13780-1.scala +i20317a.scala +i11226.scala +i974.scala +i13864.scala # semantic db generation fails in the first compilation i1642.scala diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist index f435867fcaab..03b020db64d9 100644 --- a/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.blacklist @@ -4,3 +4,18 @@ t9312.scala unapplySeq-implicit-arg.scala unapplySeq-implicit-arg2.scala unapplySeq-implicit-arg3.scala +ScalaCheck.scala +mutable-read8.scala +TypeCast.scala +global-cycle8.scala +global-cycle6.scala +i12544b.scala +t9360.scala +mutable-array.scala +patmat-unapplySeq2.scala +line-spacing.scala +global-list.scala +t5366.scala +mutable-read7.scala +t9115.scala +Color.scala \ No newline at end of file diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index d6f962176ecc..032b53150e49 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -67,6 +67,7 @@ mt-redux-norm.perspective.scala i18211.scala 10867.scala named-tuples1.scala +i20897.scala # Opaque type i5720.scala @@ -135,3 +136,5 @@ hylolib-deferred-given hylolib-cb hylolib +# typecheckErrors method unpickling +i21415.scala diff --git a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist index 63a6e2cee345..6fdfccf7646c 100644 --- a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist @@ -2,3 +2,4 @@ tasty-extractors-1 tasty-extractors-2 tasty-extractors-types +type-print diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index dacbc63bb520..31304e061bc7 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -27,7 +27,6 @@ tuple-zip.scala tuples1.scala tuples1a.scala tuples1b.scala -typeCheckErrors.scala typeclass-derivation-doc-example.scala typeclass-derivation1.scala typeclass-derivation2.scala @@ -47,3 +46,6 @@ trait-static-forwarder i17255 named-tuples-strawman-2.scala +# typecheckErrors method unpickling +typeCheckErrors.scala + diff --git a/compiler/test/dotty/tools/DottyTest.scala b/compiler/test/dotty/tools/DottyTest.scala index 7ccbc09a4c92..76d2fdcb6d26 100644 --- a/compiler/test/dotty/tools/DottyTest.scala +++ b/compiler/test/dotty/tools/DottyTest.scala @@ -40,13 +40,13 @@ trait DottyTest extends ContextEscapeDetection { protected def initializeCtx(fc: FreshContext): Unit = { fc.setSetting(fc.settings.encoding, "UTF8") fc.setSetting(fc.settings.classpath, TestConfiguration.basicClasspath) - fc.setSetting(fc.settings.language, List("experimental.erasedDefinitions")) + fc.setSetting(fc.settings.language, List("experimental.erasedDefinitions").asInstanceOf) fc.setProperty(ContextDoc, new ContextDocstrings) } protected def defaultCompiler: Compiler = new Compiler() - private def compilerWithChecker(phase: String)(assertion: (tpd.Tree, Context) => Unit) = new Compiler { + protected def compilerWithChecker(phase: String)(assertion: (tpd.Tree, Context) => Unit) = new Compiler { private val baseCompiler = defaultCompiler diff --git a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala index c99de8fcf956..a1fe40e58b56 100644 --- a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala @@ -161,6 +161,42 @@ class ArrayApplyOptTest extends DottyBytecodeTest { } } + @Test def emptyListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyList"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[String] = List() + | def meth2: List[String] = Nil + |} + """.stripMargin + + @Test def emptyRefListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyListOfRef"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[String] = List[String]() + | def meth2: List[String] = Nil + |} + """.stripMargin + + @Test def emptyPrimitiveListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("EmptyListOfInt"): + """import scala.collection.immutable.Nil + |class Foo { + | def meth1: List[Int] = List() + | def meth2: List[Int] = Nil + |} + """.stripMargin + + @Test def primitiveListApplyAvoidsIntermediateArray = + checkApplyAvoidsIntermediateArray("ListOfInt"): + """import scala.collection.immutable.{ ::, Nil } + |class Foo { + | def meth1: List[Int] = List(1, 2, 3) + | def meth2: List[Int] = new ::(1, new ::(2, new ::(3, Nil))) + |} + """.stripMargin + @Test def testListApplyAvoidsIntermediateArray = { checkApplyAvoidsIntermediateArray("List"): """import scala.collection.immutable.{ ::, Nil } diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index f446913d7964..e92c4c26adb8 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -158,6 +158,126 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + @Test def switchOnUnionOfInts = { + val source = + """ + |object Foo { + | def foo(x: 1 | 2 | 3 | 4 | 5) = x match { + | case 1 => println(3) + | case 2 | 3 => println(2) + | case 4 => println(1) + | case 5 => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfStrings = { + val source = + """ + |object Foo { + | def foo(s: "one" | "two" | "three" | "four" | "five") = s match { + | case "one" => println(3) + | case "two" | "three" => println(2) + | case "four" | "five" => println(1) + | case _ => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfChars = { + val source = + """ + |object Foo { + | def foo(ch: 'a' | 'b' | 'c' | 'd' | 'e'): Int = ch match { + | case 'a' => 1 + | case 'b' => 2 + | case 'c' => 3 + | case 'd' => 4 + | case 'e' => 5 + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfIntSingletons = { + val source = + """ + |object Foo { + | final val One = 1 + | final val Two = 2 + | final val Three = 3 + | final val Four = 4 + | final val Five = 5 + | type Values = One.type | Two.type | Three.type | Four.type | Five.type + | + | def foo(s: Values) = s match { + | case One => println(3) + | case Two | Three => println(2) + | case Four => println(1) + | case Five => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + + @Test def switchOnUnionOfStringSingletons = { + val source = + """ + |object Foo { + | final val One = "one" + | final val Two = "two" + | final val Three = "three" + | final val Four = "four" + | final val Five = "five" + | type Values = One.type | Two.type | Three.type | Four.type | Five.type + | + | def foo(s: Values) = s match { + | case One => println(3) + | case Two | Three => println(2) + | case Four => println(1) + | case Five => println(0) + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val moduleIn = dir.lookupName("Foo$.class", directory = false) + val moduleNode = loadClassNode(moduleIn.input) + val methodNode = getMethod(moduleNode, "foo") + assert(verifySwitch(methodNode)) + } + } + @Test def matchWithDefaultNoThrowMatchError = { val source = """class Test { @@ -1843,6 +1963,30 @@ class DottyBytecodeTests extends DottyBytecodeTest { assertSameCode(instructions, expected) } } + + /** + * Test 'additional' imports are generated in deterministic order + * https://github.com/scala/scala3/issues/20496 + */ + @Test def deterministicAdditionalImports = { + val source = + """trait Actor: + | def receive() = () + |trait Timers: + | def timers() = () + |abstract class ShardCoordinator extends Actor with Timers + |class PersistentShardCoordinator extends ShardCoordinator: + | def foo = + | super.receive() + | super.timers()""".stripMargin + checkBCode(source) { dir => + val clsIn = dir.lookupName("PersistentShardCoordinator.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val expected = List("Actor", "Timers") + assertEquals(expected, clsNode.interfaces.asScala) + } + } } object invocationReceiversTestCode { diff --git a/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala b/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala new file mode 100644 index 000000000000..7bb52260c366 --- /dev/null +++ b/compiler/test/dotty/tools/backend/jvm/SourcePositionsTest.scala @@ -0,0 +1,116 @@ +package dotty.tools.backend.jvm + +import scala.language.unsafeNulls + +import org.junit.Assert._ +import org.junit.Test + +class SourcePositionsTest extends DottyBytecodeTest: + import ASMConverters._ + + @Test def issue18238_a(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(7, Label(32)) // point back to `if (x < y) + ) + assertEquals(expected, lineNumbers) + } + } + + @Test def issue18238_b(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | else () + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(12, Label(32)) // else () + ) + assertEquals(expected, lineNumbers) + } + } + + @Test def issue18238_c(): Unit = { + val code = + """ + |class Test { + | def test(): Unit = { + | var x = 3 + | var y = 2 + | while(true) { + | if (x < y) + | if (x >= y) + | x += 1 + | else + | y -= 1 + | println() + | } + | } + |}""".stripMargin + + checkBCode(code) { dir => + val testClass = loadClassNode(dir.lookupName("Test.class", directory = false).input, skipDebugInfo = false) + val testMethod = getMethod(testClass, "test") + val lineNumbers = instructionsFromMethod(testMethod).collect{case ln: LineNumber => ln} + val expected = List( + LineNumber(4, Label(0)), // var x + LineNumber(5, Label(4)), // var y + LineNumber(6, Label(8)), // while(true) + LineNumber(7, Label(13)), // if (x < y) + LineNumber(8, Label(18)), // if (x >= y) + LineNumber(9, Label(23)), // x += 1 + LineNumber(11, Label(27)), // y -= 1 + LineNumber(12, Label(31)) // println() + ) + assertEquals(expected, lineNumbers) + } + } diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index a40c1ec1e5b2..3b19f1d3d4bb 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -32,13 +32,6 @@ class BootstrappedOnlyCompilationTests { ).checkCompile() } - // @Test - def posWithCompilerCC: Unit = - implicit val testGroup: TestGroup = TestGroup("compilePosWithCompilerCC") - aggregateTests( - compileDir("tests/pos-with-compiler-cc/dotc", withCompilerOptions.and("-language:experimental.captureChecking")) - ).checkCompile() - @Test def posWithCompiler: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePosWithCompiler") aggregateTests( @@ -193,7 +186,7 @@ class BootstrappedOnlyCompilationTests { // 1. hack with absolute path for -Xplugin // 2. copy `pluginFile` to destination - def compileFilesInDir(dir: String): CompilationTest = { + def compileFilesInDir(dir: String, run: Boolean = false): CompilationTest = { val outDir = defaultOutputDir + "testPlugins/" val sourceDir = new java.io.File(dir) @@ -201,7 +194,10 @@ class BootstrappedOnlyCompilationTests { val targets = dirs.map { dir => val compileDir = createOutputDirsForDir(dir, sourceDir, outDir) Files.copy(dir.toPath.resolve(pluginFile), compileDir.toPath.resolve(pluginFile), StandardCopyOption.REPLACE_EXISTING) - val flags = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + val flags = { + val base = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + if run then base.withRunClasspath(withCompilerClasspath) else base + } SeparateCompilationSource("testPlugins", dir, flags, compileDir) } @@ -210,6 +206,7 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/plugins/neg").checkExpectedErrors() compileDir("tests/plugins/custom/analyzer", withCompilerOptions.and("-Yretain-trees")).checkCompile() + compileFilesInDir("tests/plugins/run", run = true).checkRuns() } } diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index de3bd02bba6e..9f72db6fc390 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -63,6 +63,7 @@ class CompilationTests { compileFile("tests/rewrites/rewrites.scala", defaultOptions.and("-source", "3.0-migration").and("-rewrite", "-indent")), compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")), + compileFile("tests/rewrites/i21394.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/uninitialized-var.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/private-this.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), @@ -76,6 +77,10 @@ class CompilationTests { compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i20002.scala", defaultOptions.and("-indent", "-rewrite")), + compileDir("tests/rewrites/annotation-named-pararamters", defaultOptions.and("-rewrite", "-source:3.6-migration")), + compileFile("tests/rewrites/i21418.scala", unindentOptions.and("-rewrite", "-source:3.5-migration")), + compileFile("tests/rewrites/infix-named-args.scala", defaultOptions.and("-rewrite", "-source:3.6-migration")), + compileFile("tests/rewrites/ambigious-named-tuple-assignment.scala", defaultOptions.and("-rewrite", "-source:3.6-migration")), ).checkRewrites() } @@ -143,7 +148,7 @@ class CompilationTests { "tests/neg-custom-args/toplevel-samesource/S.scala", "tests/neg-custom-args/toplevel-samesource/nested/S.scala"), defaultOptions), - compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures.and("-language:_")), + compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures), ).checkExpectedErrors() } @@ -210,6 +215,11 @@ class CompilationTests { ) }.checkCompile() + @Test def explicitNullsWarn: Unit = { + implicit val testGroup: TestGroup = TestGroup("explicitNullsWarn") + compileFilesInDir("tests/explicit-nulls/warn", explicitNullsOptions) + }.checkWarnings() + @Test def explicitNullsRun: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsRun") compileFilesInDir("tests/explicit-nulls/run", explicitNullsOptions) diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 301dc10ab54e..996ab22f67b1 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -272,8 +272,8 @@ class SettingsTests { val booleanSetting = BooleanSetting(RootSetting, "booleanSetting", "booleanSetting", false) val stringSetting = StringSetting(RootSetting, "stringSetting", "stringSetting", "", "test") val choiceSetting = ChoiceSetting(RootSetting, "choiceSetting", "choiceSetting", "", List("a", "b"), "a") - val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List()) - val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List()) + val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List(), legacyChoices = List("c")) + val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List(), legacyChoices = List("c")) val intSetting = IntSetting(RootSetting, "intSetting", "intSetting", 0) val intChoiceSetting = IntChoiceSetting(RootSetting, "intChoiceSetting", "intChoiceSetting", List(1,2,3), 1) val multiStringSetting = MultiStringSetting(RootSetting, "multiStringSetting", "multiStringSetting", "", List("a", "b"), List()) @@ -289,8 +289,8 @@ class SettingsTests { List("-booleanSetting", "true"), List("-stringSetting", "newTest"), List("-choiceSetting", "b"), - List("-multiChoiceSetting", "a,b"), - List("-multiChoiceHelpSetting", "a,b"), + List("-multiChoiceSetting", "a,b,c"), + List("-multiChoiceHelpSetting", "a,b,c"), List("-intSetting", "42"), List("-intChoiceSetting", "2"), List("-multiStringSetting", "a,b"), diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 3dc4f4e4ec5e..a412848eaa98 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -81,7 +81,7 @@ class ScalaSettingsTests: val conf = sets.Wconf.valueIn(proc.sstate) val sut = reporting.WConf.fromSettings(conf).getOrElse(???) val msg = "There was a problem!".toMessage - val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) + val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition, origin="") assertEquals(Action.Silent, sut.action(depr)) val feat = new Diagnostic.FeatureWarning(msg, util.NoSourcePosition) assertEquals(Action.Error, sut.action(feat)) @@ -197,7 +197,7 @@ class ScalaSettingsTests: val proc = sets.processArguments(sumy, processAll = true, skipped = Nil) val conf = sets.Wconf.valueIn(proc.sstate) val msg = "Don't use that!".toMessage - val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition) + val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition, origin="") val sut = reporting.WConf.fromSettings(conf).getOrElse(???) assertEquals(Action.Silent, sut.action(depr)) @@ -293,7 +293,8 @@ class ScalaSettingsTests: util.SourcePosition( source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), span = util.Spans.Span(1L) - ) + ), + origin="", ) ) assertEquals(result, Right(reporting.Action.Error)) diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 382c029c86e0..8a80a6978bdb 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -6,7 +6,7 @@ import scala.language.unsafeNulls import vulpix.FileDiff import vulpix.TestConfiguration -import vulpix.TestConfiguration +import vulpix.ParallelTesting import reporting.TestReporter import java.io._ @@ -25,7 +25,9 @@ import java.io.File class PrintingTest { def options(phase: String, flags: List[String]) = - List(s"-Xprint:$phase", "-color:never", "-nowarn", "-classpath", TestConfiguration.basicClasspath) ::: flags + val outDir = ParallelTesting.defaultOutputDir + "printing" + File.pathSeparator + File(outDir).mkdirs() + List(s"-Xprint:$phase", "-color:never", "-nowarn", "-d", outDir, "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") diff --git a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala new file mode 100644 index 000000000000..a5abf86b84c5 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala @@ -0,0 +1,106 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* +import java.nio.file.Files +import java.nio.charset.StandardCharsets +import java.util.concurrent.locks.LockSupport +import scala.concurrent.duration.* + +class ChromeTraceTest: + private def testTraceOutputs(generator: ChromeTrace => Unit)(checkContent: PartialFunction[List[String], Unit]): Unit = { + val outfile = Files.createTempFile("trace-", ".json").nn + val tracer = new ChromeTrace(outfile) + try generator(tracer) + finally tracer.close() + val contentLines = scala.io.Source.fromFile(outfile.toFile().nn).getLines().toList + checkContent.applyOrElse( + contentLines, + content => fail(s"Invalid output lines: ${content.mkString(System.lineSeparator().nn)}") + ) + } + + @Test def traceCounterEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceCounterEvent("foo", "counter1", 42, processWide = true) + tracer.traceCounterEvent("bar", "counter2", 21, processWide = false) + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"foo","ph":"C","tid":"${tid1}","pid":"${pid1}","ts":${ts1},"args":{"counter1":42}}""" :: + s""",{"cat":"scalac","name":"bar","ph":"C","tid":"${tid2}","pid":"${pid2}","ts":${ts2},"args":{"counter2":21}}""" :: + "]}" :: Nil => + assertEquals(tid1, tid2) + assertTrue(tid1.toIntOption.isDefined) + assertNotEquals(pid1, pid2) + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-$tid1", pid2) + assertTrue(ts1.toLong < ts2.toLong) + } + + @Test def traceDurationEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceDurationEvent(name = "name1", startNanos = 1000L, durationNanos = 2500L, tid = "this-thread") + tracer.traceDurationEvent(name = "name2", startNanos = 1000L, durationNanos = 5000L, tid = "this-thread", pidSuffix = "pidSuffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"name1","ph":"X","tid":"this-thread","pid":"${pid1}","ts":1,"dur":2}""" :: + s""",{"cat":"scalac","name":"name2","ph":"X","tid":"this-thread","pid":"${pid2}","ts":1,"dur":5}""" :: + "]}" :: Nil => + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-pidSuffix", pid2) + } + + @Test def traceDurationEvents(): Unit = { + val testStart = System.nanoTime() + testTraceOutputs{ tracer => + tracer.traceDurationEventStart(cat = "test1", name = "event1") + sleep(2.millis) + tracer.traceDurationEventStart(cat = "test2", name = "event2", colour = "RED", pidSuffix = "pid-suffix") + sleep(4.millis) + tracer.traceDurationEventEnd(cat = "test2", name = "event2") + sleep(8.millis) + tracer.traceDurationEventEnd(cat = "test1", name = "event1", colour = "RED", pidSuffix = "pid-suffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"test1","name":"event1","ph":"B","pid":"${pid1}","tid":"${tid1}","ts":${ts1}}""" :: + s""",{"cat":"test2","name":"event2","ph":"B","pid":"${pid2}","tid":"${tid2}","ts":${ts2},"cname":"RED"}""" :: + s""",{"cat":"test2","name":"event2","ph":"E","pid":"${pid3}","tid":"${tid3}","ts":${ts3}}""" :: + s""",{"cat":"test1","name":"event1","ph":"E","pid":"${pid4}","tid":"${tid4}","ts":${ts4},"cname":"RED"}""" :: + "]}" :: Nil => + val traceEnd = System.nanoTime() + assertTrue(tid1.toIntOption.isDefined) + assertEquals(pid1, pid3) + assertTrue(pid1.endsWith(s"-$tid1")) + assertEquals(pid2, pid4) + assertTrue(pid2.endsWith("-pid-suffix")) + List(tid1, tid2, tid3).foreach: tid => + assertEquals(tid4, tid) + List(pid1, pid2, pid3, pid4).foreach: pid => + assertTrue(pid.takeWhile(_ != '-').toIntOption.isDefined) + + List(ts1, ts2, ts3, ts4).map(_.toLong) match { + case all @ List(ts1, ts2, ts3, ts4) => + all.foreach: ts => + // Timestamps are presented using Epoch microsecondos + assertTrue(ts >= testStart / 1000) + assertTrue(ts <= traceEnd / 1000) + assertTrue(ts2 >= ts1 + 2.millis.toMicros) + assertTrue(ts3 >= ts2 + 4.millis.toMicros) + assertTrue(ts4 >= ts3 + 8.millis.toMicros) + case _ => fail("unreachable") + } + } + } + + private def sleep(duration: FiniteDuration): Unit = { + // A bit of additional precautions to ensure we don't continue execution to early + // Both LockSuppport and Thread.sleep can return earlier then expected (depending on OS) + var remainingNanos = duration.toNanos + val deadline = System.nanoTime() + remainingNanos + while + remainingNanos = deadline - System.nanoTime() + remainingNanos > 0 + do + val millis = NANOSECONDS.toMillis(remainingNanos) + Thread.sleep(millis, (remainingNanos % 1.millis.toNanos).toInt) + } diff --git a/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala new file mode 100644 index 000000000000..3253cff52057 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala @@ -0,0 +1,91 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), threadsafe = false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", asChars = true) + writeBoth(s"line $i chars", asChars = false) + sTest.newLine() + sExpected.newLine() + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Ignore + @Test def showPerformance(): Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath.nn) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine() + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine() + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} + diff --git a/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala new file mode 100644 index 000000000000..f1f570cc85d4 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/TraceNameManglingTest.scala @@ -0,0 +1,135 @@ +package dotty.tools.dotc.profile + +import org.junit.Assert.* +import org.junit.* + +import scala.annotation.tailrec +import dotty.tools.DottyTest +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.core.Contexts.FreshContext +import java.nio.file.Files +import java.util.Locale + +class TraceNameManglingTest extends DottyTest { + + override protected def initializeCtx(fc: FreshContext): Unit = { + super.initializeCtx(fc) + val tmpDir = Files.createTempDirectory("trace_name_mangling_test").nn + fc.setSetting(fc.settings.YprofileEnabled, true) + fc.setSetting( + fc.settings.YprofileTrace, + tmpDir.resolve("trace.json").nn.toAbsolutePath().toString() + ) + fc.setSetting( + fc.settings.YprofileDestination, + tmpDir.resolve("profiler.out").nn.toAbsolutePath().toString() + ) + } + + @Test def escapeBackslashes(): Unit = { + val isWindows = sys.props("os.name").toLowerCase(Locale.ROOT).nn.contains("windows") + // It is not possible to create a file with backslash in name on Windows + val filename = if isWindows then "test.scala" else "\\.scala" + checkTraceEvents( + """ + |class /\ : + | var /\ = ??? + |object /\{ + | def /\ = ??? + |}""".stripMargin, + filename = filename + )( + Set( + raw"class /\\", + raw"object /\\", + raw"method /\\", + raw"variable /\\", + raw"setter /\\_=" + ).map(TraceEvent("typecheck", _)) + ++ Set( + // See comment aboce for Windows limitations + TraceEvent("file", if isWindows then filename else "\\\\.scala") + ) + ) + } + + @Test def escapeDoubleQuotes(): Unit = { + val filename = "\"quoted\".scala" + checkTraceEvents( + """ + |class `"QuotedClass"`: + | var `"quotedVar"` = ??? + |object `"QuotedObject"` { + | def `"quotedMethod"` = ??? + |}""".stripMargin, + filename = filename + ): + Set( + raw"class \"QuotedClass\"", + raw"object \"QuotedObject\"", + raw"method \"quotedMethod\"", + raw"variable \"quotedVar\"" + ).map(TraceEvent("typecheck", _)) + ++ Set(TraceEvent("file", "\\\"quoted\\\".scala")) + } + @Test def escapeNonAscii(): Unit = { + val filename = "unic😀de.scala" + checkTraceEvents( + """ + |class ΩUnicodeClass: + | var `中文Var` = ??? + |object ΩUnicodeObject { + | def 中文Method = ??? + |}""".stripMargin, + filename = filename + ): + Set( + "class \\u03A9UnicodeClass", + "object \\u03A9UnicodeObject", + "method \\u4E2D\\u6587Method", + "variable \\u4E2D\\u6587Var" + ).map(TraceEvent("typecheck", _)) + ++ Set(TraceEvent("file", "unic\\uD83D\\uDE00de.scala")) + } + + case class TraceEvent(category: String, name: String) + private def compileWithTracer( + code: String, + filename: String, + afterPhase: String = "typer" + )(checkEvents: Seq[TraceEvent] => Unit) = { + val runCtx = locally: + val source = SourceFile.virtual(filename, code) + val c = compilerWithChecker(afterPhase) { (_, _) => () } + val run = c.newRun + run.compileSources(List(source)) + run.runContext + assert(!runCtx.reporter.hasErrors, "compilation failed") + val outfile = ctx.settings.YprofileTrace.value + checkEvents: + scala.io.Source + .fromFile(outfile) + .getLines() + .collect: + case s"""${_}"cat":"${category}","name":${name},"ph":${_}""" => + TraceEvent(category, name.stripPrefix("\"").stripSuffix("\"")) + .distinct.toSeq + } + + private def checkTraceEvents(code: String, filename: String = "test")(expected: Set[TraceEvent]): Unit = { + compileWithTracer(code, filename = filename, afterPhase = "typer"){ events => + val missing = expected.diff(events.toSet) + def showFound = events + .groupBy(_.category) + .collect: + case (category, events) + if expected.exists(_.category == category) => + s"- $category: [${events.map(_.name).mkString(", ")}]" + .mkString("\n") + assert( + missing.isEmpty, + s"""Missing ${missing.size} names [${missing.mkString(", ")}] in events, got:\n${showFound}""" + ) + } + } +} diff --git a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala index b08062913dac..9841fcbafb5b 100644 --- a/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala +++ b/compiler/test/dotty/tools/dotc/typer/InstantiateModel.scala @@ -4,22 +4,16 @@ package typer // Modelling the decision in IsFullyDefined object InstantiateModel: - enum LB { case NN; case LL; case L1 }; import LB.* - enum UB { case AA; case UU; case U1 }; import UB.* - enum Var { case V; case NotV }; import Var.* - enum MSe { case M; case NotM }; import MSe.* - enum Bot { case Fail; case Ok; case Flip }; import Bot.* - enum Act { case Min; case Max; case ToMax; case Skip; case False }; import Act.* + enum LB { case NN; case LL; case L1 }; import LB.* + enum UB { case AA; case UU; case U1 }; import UB.* + enum Decision { case Min; case Max; case ToMax; case Skip; case Fail }; import Decision.* // NN/AA = Nothing/Any // LL/UU = the original bounds, on the type parameter // L1/U1 = the constrained bounds, on the type variable - // V = variance >= 0 ("non-contravariant") - // MSe = minimisedSelected - // Bot = IfBottom // ToMax = delayed maximisation, via addition to toMaximize // Skip = minimisedSelected "hold off instantiating" - // False = return false + // Fail = IfBottom.fail's bail option // there are 9 combinations: // # | LB | UB | d | // d = direction @@ -34,24 +28,27 @@ object InstantiateModel: // 8 | NN | UU | 0 | T <: UU // 9 | NN | AA | 0 | T - def decide(lb: LB, ub: UB, v: Var, bot: Bot, m: MSe): Act = (lb, ub) match + def instDecision(lb: LB, ub: UB, v: Int, ifBottom: IfBottom, min: Boolean) = (lb, ub) match case (L1, AA) => Min case (L1, UU) => Min case (LL, U1) => Max case (NN, U1) => Max - case (L1, U1) => if m==M || v==V then Min else ToMax - case (LL, UU) => if m==M || v==V then Min else ToMax - case (LL, AA) => if m==M || v==V then Min else ToMax - - case (NN, UU) => bot match - case _ if m==M => Max - //case Ok if v==V => Min // removed, i14218 fix - case Fail if v==V => False - case _ => ToMax - - case (NN, AA) => bot match - case _ if m==M => Skip - case Ok if v==V => Min - case Fail if v==V => False - case _ => ToMax + case (L1, U1) => if min then Min else pickVar(v, Min, Min, ToMax) + case (LL, UU) => if min then Min else pickVar(v, Min, Min, ToMax) + case (LL, AA) => if min then Min else pickVar(v, Min, Min, ToMax) + + case (NN, UU) => ifBottom match + case _ if min => Max + case IfBottom.ok => pickVar(v, Min, ToMax, ToMax) + case IfBottom.fail => pickVar(v, Fail, Fail, ToMax) + case IfBottom.flip => ToMax + + case (NN, AA) => ifBottom match + case _ if min => Skip + case IfBottom.ok => pickVar(v, Min, Min, ToMax) + case IfBottom.fail => pickVar(v, Fail, Fail, ToMax) + case IfBottom.flip => ToMax + + def pickVar[A](v: Int, cov: A, inv: A, con: A) = + if v > 0 then cov else if v == 0 then inv else con diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 67e63d0156a5..221eb8acb9de 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -456,6 +456,43 @@ class ReplCompilerTests extends ReplTest: assertTrue(last, last.startsWith("val res0: tpolecat.type = null")) assertTrue(last, last.endsWith("""// result of "res0.toString" is null""")) + @Test def `i21431 filter out best effort options`: Unit = + initially: + run(":settings -Ybest-effort -Ywith-best-effort-tasty") + .andThen: + run("0") // check for crash + val last = lines() + assertTrue(last(0), last(0) == ("Options incompatible with repl will be ignored: -Ybest-effort, -Ywith-best-effort-tasty")) + assertTrue(last(1), last(1) == ("val res0: Int = 0")) + + @Test def `i9879`: Unit = initially: + run { + """|opaque type A = Int; def getA: A = 0 + |object Wrapper { opaque type A = Int; def getA: A = 1 } + |val x = getA + |val y = Wrapper.getA""".stripMargin + } + val expected = List( + "def getA: A", + "// defined object Wrapper", + "val x: A = 0", + "val y: Wrapper.A = 1" + ) + assertEquals(expected, lines()) + + @Test def `i9879b`: Unit = initially: + run { + """|def test = + | type A = Int + | opaque type B = String + | object Wrapper { opaque type C = Int } + | ()""".stripMargin + } + val all = lines() + assertEquals(6, all.length) + assertTrue(all.head.startsWith("-- [E103] Syntax Error")) + assertTrue(all.exists(_.trim().startsWith("| Illegal start of statement: this modifier is not allowed here"))) + object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index f719752be353..95419824d9d1 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -9,7 +9,7 @@ import org.junit.Test class TabcompleteTests extends ReplTest { @Test def tabCompleteList = initially { - val comp = tabComplete("List.r") + val comp = tabComplete("List.ra") assertEquals(List("range"), comp.distinct) } @@ -112,7 +112,7 @@ class TabcompleteTests extends ReplTest { val comp = tabComplete("(null: AnyRef).") assertEquals( List("!=", "##", "->", "==", "asInstanceOf", "ensuring", "eq", "equals", "formatted", - "getClass", "hashCode", "isInstanceOf", "ne", "nn", "notify", "notifyAll", "synchronized", "toString", "wait", "→"), + "getClass", "hashCode", "isInstanceOf", "ne", "nn", "notify", "notifyAll", "runtimeChecked", "synchronized", "toString", "wait", "→"), comp.distinct.sorted) } @@ -163,6 +163,7 @@ class TabcompleteTests extends ReplTest { "nn", "notify", "notifyAll", + "runtimeChecked", "synchronized", "toString", "valueOf", diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index a946e509aeb3..0244e208af3c 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -67,7 +67,7 @@ class ClasspathTests: (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath") } // verify that the script hasbang classpath setting was effective at supplementing the classpath - // (a minimal subset of jars below dist*/target/pack/lib are always be in the classpath) + // (a minimal subset of jars below dist*/target/universal/stage/lib are always be in the classpath) val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then printf("packlib dir [%s]\n", packlibDir) printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", "")) diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index dd1cc04bb58a..771c3ba14af0 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -16,7 +16,7 @@ import scala.jdk.CollectionConverters.* /** * Common Code for supporting scripting tests. * To override the path to the bash executable, set TEST_BASH= - * To specify where `dist[*]/target/pack/bin` resides, set TEST_CWD= + * To specify where `dist[*]/target/universal/stage/bin` resides, set TEST_CWD= * Test scripts run in a bash env, so paths are converted to forward slash via .norm. */ object ScriptTestEnv { @@ -48,7 +48,7 @@ object ScriptTestEnv { } lazy val nativePackDir: Option[String] = { - def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/pack") + def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/universal/stage") def nativeOs(os: String) = archNorm match case arch @ ("aarch64" | "x86_64") => nativeDir(os, arch) case _ => None @@ -61,7 +61,7 @@ object ScriptTestEnv { def jvmPackDir() = println("warning: unknown OS architecture combination, defaulting to JVM launcher.") - "dist/target/pack" + "dist/target/universal/stage" def packDir: String = nativePackDir.getOrElse(jvmPackDir()) @@ -302,7 +302,7 @@ object ScriptTestEnv { // use optional TEST_BASH if defined, otherwise, bash must be in PATH // envScalaHome is: - // dist[*]/target/pack, if present + // dist[*]/target/universal/stage, if present // else, SCALA_HOME if defined // else, not defined lazy val envScalaHome = diff --git a/dist/LICENSE.rtf b/dist/LICENSE.rtf new file mode 100644 index 000000000000..c2c7feee2921 --- /dev/null +++ b/dist/LICENSE.rtf @@ -0,0 +1,41 @@ +{\rtf1\ansi\ansicpg1252\deff0\nouicompat\deflang1033 +{\fonttbl{\f0\fswiss\fcharset0 Arial;}} +{\*\generator Riched20 10.0.18362}\viewkind4\uc1 +\pard\sa200\sl276\slmult1\b\f0\fs32 Apache License\par +\b0\fs28 Version 2.0, January 2004\par +\ul http://www.apache.org/licenses/\ulnone\par +\pard\sa200\sl276\slmult1\b\fs24 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\par +\pard\sa200\sl276\slmult1\b0\fs20 1. Definitions.\par +\pard\sa200\sl276\slmult1 "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.\par + "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.\par + "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50\%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.\par + "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.\par + "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.\par + "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.\par + "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).\par + "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.\par + "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."\par + "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.\par +\pard\sa200\sl276\slmult1\b 2. Grant of Copyright License. \b0 Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.\par +\pard\sa200\sl276\slmult1\b 3. Grant of Patent License. \b0 Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.\par +\pard\sa200\sl276\slmult1\b 4. Redistribution. \b0 You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:\par + (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and\par + (b) You must cause any modified files to carry prominent notices stating that You changed the files; and\par + (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and\par + (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.\par + You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.\par +\pard\sa200\sl276\slmult1\b 5. Submission of Contributions. \b0 Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.\par +\pard\sa200\sl276\slmult1\b 6. Trademarks. \b0 This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.\par +\pard\sa200\sl276\slmult1\b 7. Disclaimer of Warranty. \b0 Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.\par +\pard\sa200\sl276\slmult1\b 8. Limitation of Liability. \b0 In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.\par +\pard\sa200\sl276\slmult1\b 9. Accepting Warranty or Additional Liability. \b0 While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.\par +\pard\sa200\sl276\slmult1\qc\b END OF TERMS AND CONDITIONS\par +\pard\sa200\sl276\slmult1\b0\fs20 APPENDIX: How to apply the Apache License to your work.\par +\pard\sa200\sl276\slmult1 To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.\par + Copyright [yyyy] [name of copyright owner]\par + Licensed under the Apache License, Version 2.0 (the "License");\par + you may not use this file except in compliance with the License.\par + You may obtain a copy of the License at\par +\pard\sa200\sl276\slmult1\li720 \ul http://www.apache.org/licenses/LICENSE-2.0\ulnone\par +\pard\sa200\sl276\slmult1 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\par +} diff --git a/dist/bin/scala b/dist/bin/scala index 35efdfc38d96..81e11a4dffc2 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,8 +26,8 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common-shared" -source "$PROG_HOME/bin/cli-common-platform" +source "$PROG_HOME/libexec/common-shared" +source "$PROG_HOME/libexec/cli-common-platform" SCALA_VERSION="" # iterate through lines in VERSION_SRC @@ -63,6 +63,7 @@ eval "${SCALA_CLI_CMD_BASH[@]}" \ "--cli-default-scala-version \"$SCALA_VERSION\"" \ "-r \"$MVN_REPOSITORY\"" \ "${scala_args[@]}" + scala_exit_status=$? onExit diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index 7418909da263..bd5bf0b8dfbe 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end @rem ######################################################################### @@ -19,7 +19,7 @@ if not %_EXITCODE%==0 goto end call :setScalaOpts -call "%_PROG_HOME%\bin\cli-common-platform.bat" +call "%_PROG_HOME%\libexec\cli-common-platform.bat" @rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat. @rem WE NEED TO PASS '--skip-cli-updates' for JVM launchers but we actually don't need it for native launchers diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy index 18fc6d874e34..62755801819b 100755 --- a/dist/bin/scala_legacy +++ b/dist/bin/scala_legacy @@ -26,7 +26,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" while [[ $# -gt 0 ]]; do case "$1" in diff --git a/dist/bin/scalac b/dist/bin/scalac index a527d9767749..ec91629a87ac 100755 --- a/dist/bin/scalac +++ b/dist/bin/scalac @@ -26,7 +26,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" [ -z "$PROG_NAME" ] && PROG_NAME=$CompilerMain diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index e2898bdc2890..038c733f24c8 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end call :args %* diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 0af5a2b55acb..e137176e819f 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -28,7 +28,7 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/libexec/common" default_java_opts="-Xmx768m -Xms768m" withCompiler=true diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index b9e4820b006d..2be13aa628b1 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -11,7 +11,7 @@ for %%f in ("%~dp0.") do ( @rem get rid of the trailing slash set "_PROG_HOME=!_PROG_HOME:~0,-1!" ) -call "%_PROG_HOME%\bin\common.bat" +call "%_PROG_HOME%\libexec\common.bat" if not %_EXITCODE%==0 goto end set _DEFAULT_JAVA_OPTS=-Xmx768m -Xms768m diff --git a/dist/libexec-native-overrides/cli-common-platform b/dist/libexec-native-overrides/cli-common-platform new file mode 100644 index 000000000000..246cbc58d5c7 --- /dev/null +++ b/dist/libexec-native-overrides/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$PROG_HOME/libexec/scala-cli\"") diff --git a/dist/libexec-native-overrides/cli-common-platform.bat b/dist/libexec-native-overrides/cli-common-platform.bat new file mode 100644 index 000000000000..239ab40f1f28 --- /dev/null +++ b/dist/libexec-native-overrides/cli-common-platform.bat @@ -0,0 +1,3 @@ +@echo off + +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\libexec\scala-cli.exe" diff --git a/dist/libexec/cli-common-platform b/dist/libexec/cli-common-platform new file mode 100644 index 000000000000..e56f5221dbf2 --- /dev/null +++ b/dist/libexec/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$JAVACMD\"" "-jar \"$PROG_HOME/libexec/scala-cli.jar\"") diff --git a/dist/libexec/cli-common-platform.bat b/dist/libexec/cli-common-platform.bat new file mode 100644 index 000000000000..45b09f3460e6 --- /dev/null +++ b/dist/libexec/cli-common-platform.bat @@ -0,0 +1,5 @@ +@echo off + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" +set SCALA_CLI_CMD_WIN="%_JAVACMD%" "-jar" "%_PROG_HOME%\libexec\scala-cli.jar" diff --git a/dist/bin/common b/dist/libexec/common similarity index 95% rename from dist/bin/common rename to dist/libexec/common index 2de8bdf9f99a..28b5d66a9ed3 100644 --- a/dist/bin/common +++ b/dist/libexec/common @@ -1,6 +1,6 @@ #!/usr/bin/env bash -source "$PROG_HOME/bin/common-shared" +source "$PROG_HOME/libexec/common-shared" #/*-------------------------------------------------- # * The code below is for Dotty diff --git a/dist/libexec/common-shared b/dist/libexec/common-shared new file mode 100644 index 000000000000..fa1e62c09241 --- /dev/null +++ b/dist/libexec/common-shared @@ -0,0 +1,137 @@ +#!/usr/bin/env bash + +# Common options for both scala-cli and java based launchers + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" +PROG_HOME_URI="file://$PROG_HOME" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +declare -a scala_args +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/common.bat b/dist/libexec/common.bat similarity index 100% rename from dist/bin/common.bat rename to dist/libexec/common.bat diff --git a/docs/_docs/contributing/architecture/phases.md b/docs/_docs/contributing/architecture/phases.md index 8e63de04dadb..1421667922df 100644 --- a/docs/_docs/contributing/architecture/phases.md +++ b/docs/_docs/contributing/architecture/phases.md @@ -63,7 +63,7 @@ Finally are [staging], which ensures that quotes conform to the trees to embedded TASTy strings. ### `transformPhases` -These phases are concerned with tranformation into lower-level forms +These phases are concerned with transformation into lower-level forms suitable for the runtime system, with two sub-groupings: - High-level transformations: All phases from [firstTransform] to [erasure]. Most of these phases transform syntax trees, expanding high-level constructs diff --git a/docs/_docs/contributing/architecture/types.md b/docs/_docs/contributing/architecture/types.md index ed8995c08643..bf96d33b6a3c 100644 --- a/docs/_docs/contributing/architecture/types.md +++ b/docs/_docs/contributing/architecture/types.md @@ -108,7 +108,7 @@ Ground Type has no meaningful underlying type, typically it is the type of metho definitions, but also union types and intersection types, along with utility types of the compiler. -Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [dotty/tools/dotc/core/Types.scala][1]: +Here's a diagram, serving as the mental model of the most important and distinct types available after the `typer` phase, derived from [Types.scala]: ``` Type -+- proxy_type --+- NamedType --------+- TypeRef diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md index 071cbeb0c0a2..b6e3e4fac00a 100644 --- a/docs/_docs/contributing/getting-started.md +++ b/docs/_docs/contributing/getting-started.md @@ -81,6 +81,12 @@ $ scalac tests/pos/HelloWorld.scala $ scala HelloWorld ``` +Note that the `scalac` and `scala` scripts have slow roundtrip times when working on the compiler codebase: whenever +any source file changes they invoke `sbt dist/pack` first. + +As an alternative, run the `buildQuick` task in sbt. It builds the compiler and writes its classpath to the `bin/.cp` +file, which enables the `scalacQ` and `scalaQ` scripts in the `bin/` folder. + ## Starting a REPL ```bash diff --git a/docs/_docs/contributing/setting-up-your-ide.md b/docs/_docs/contributing/setting-up-your-ide.md index a02c1dee63cb..3779ce1c3403 100644 --- a/docs/_docs/contributing/setting-up-your-ide.md +++ b/docs/_docs/contributing/setting-up-your-ide.md @@ -3,16 +3,15 @@ layout: doc-page title: Setting up your IDE --- -You can use either Metals with your favorite editor (VS Code, Neovim, Sublime) -or [IntelliJ IDEA for -Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html) +You can use either Metals with your favorite editor or +[IntelliJ IDEA for Scala](https://www.jetbrains.com/help/idea/discover-intellij-idea-for-scala.html) to work on the Scala 3 codebase. There are however a few additional considerations to take into account. ## Bootstrapping Projects -The sbt build for dotty implements bootstrapping within the same build, so each component has -two projects: +The sbt build for dotty implements bootstrapping within the same build, so each +component has two projects: ``` sbt:scala3> projects @@ -32,23 +31,24 @@ Normally this is fine, but if you're working on certain modules like `scaladoc` you'll actually want these modules exported. In order to achieve this you'll want to make sure you do two things: -1. You'll want to find and change the following under - `commonBootstrappedSettings` which is found in the - [`Build.scala`](https://github.com/scala/scala3/blob/main/project/Build.scala) - file. +1. You'll want to find and change the following above + `commonBootstrappedSettings` which is found in the + [`Build.scala`](https://github.com/scala/scala3/blob/main/project/Build.scala) + file. ```diff -- bspEnabled := false, -+ bspEnabled := true, +- val enableBspAllProjects = false, ++ val enableBspAllProjects = true, ``` -2. Set `sbt` as your build server instead of the default, Bloop. You can achieve - this with the `Metals: Switch Build Server` command and then choosing sbt. In - VSCode, this looks like this: +2. Run `sbt publishLocal` to get the needed presentation compiler jars. -![bsp-switch](https://user-images.githubusercontent.com/777748/241986423-0724ae74-0ebd-42ef-a1b7-4d17678992b4.png) +By default Metals uses Bloop build server, however you can also use sbt +directly. You can achieve this with the `Metals: Switch Build Server` command +and then choosing sbt. In VSCode, this looks like this: +![bsp-switch](https://user-images.githubusercontent.com/777748/241986423-0724ae74-0ebd-42ef-a1b7-4d17678992b4.png) ### IntelliJ diff --git a/docs/_docs/internals/best-effort-compilation.md b/docs/_docs/internals/best-effort-compilation.md index 2fed951c3fd8..248203883a3c 100644 --- a/docs/_docs/internals/best-effort-compilation.md +++ b/docs/_docs/internals/best-effort-compilation.md @@ -11,6 +11,9 @@ It is composed of two experimental compiler options: * `-Ywith-best-effort-tasty` allows to read Best Effort TASTy files, and if such file is read from the classpath then limits compilation to the frontend phases +IMPORTANT: These options are meant to by used by an IDE and should never be used on the user side, in the project definition. +This is why they are hidden behind a private `-Y` option specifier. + This feature aims to force through to the typer phase regardless of errors, and then serialize tasty-like files obtained from the error trees into the best effort directory (`META-INF/best-effort`) and also serialize semanticdb as normal. diff --git a/docs/_docs/internals/gadts.md b/docs/_docs/internals/gadts.md index 58f511c946c3..9a96043fc299 100644 --- a/docs/_docs/internals/gadts.md +++ b/docs/_docs/internals/gadts.md @@ -70,7 +70,7 @@ Right now, we record GADT constraints for: - function/method type parameters - class type parameters -There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-depepdent GADTs for Scala 3". +There is a branch on the way which will also record them for type members (so path-dependent types) and singleton types. It has a paper associated: "Implementing path-dependent GADTs for Scala 3". ### What are necessary relationships? Any examples? diff --git a/docs/_docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md index a25c287e16c9..6dbe387a7cfb 100644 --- a/docs/_docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -160,7 +160,7 @@ phases. The current list of phases is specified in class [Compiler] as follows: new LetOverApply, // Lift blocks from receivers of applications new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types + List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index dd4a3af403ab..d0074bb503c2 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -59,9 +59,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -176,12 +177,12 @@ ClassQualifier ::= ‘[’ id ‘]’ ### Types ```ebnf Type ::= FunType - | HkTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) + | TypTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) | MatchType | InfixType FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) - | HKTypeParamClause '=>' Type PolyFunction(ps, t) + | TypTypeParamClause '=>' Type PolyFunction(ps, t) FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause @@ -222,7 +223,9 @@ TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) -ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBounds ::= ContextBound + | ContextBound `:` ContextBounds -- to be deprecated + | '{' ContextBound {',' ContextBound} '}' ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} NamesAndTypes ::= NameAndType {‘,’ NameAndType} @@ -232,10 +235,10 @@ NameAndType ::= id ':' Type ### Expressions ```ebnf Expr ::= FunParams (‘=>’ | ‘?=>’) Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr) - | HkTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) + | TypTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) | Expr1 BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block - | HkTypeParamClause ‘=>’ Block + | TypTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings | id @@ -285,7 +288,7 @@ SimpleExpr ::= SimpleRef ColonArgument ::= colon [LambdaStart] indent (CaseClauses | Block) outdent LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ + | TypTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -363,11 +366,14 @@ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds + TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ -TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds +TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeBounds HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ -HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [HkTypeParamClause] TypeBounds ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] @@ -384,9 +390,6 @@ DefParamClause ::= DefTypeParamClause TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause | UsingParamClause - -DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -457,25 +460,35 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef | ‘enum’ EnumDef - | ‘given’ GivenDef + | ‘given’ (GivenDef | OldGivenDef) ClassDef ::= id ClassConstr [Template] ClassDef(mods, name, tparams, templ) ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses with DefDef(_, , Nil, vparamss, EmptyTree, EmptyTree) as first stat ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenConditional '=>'] GivenSig -GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} -GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - | ConstrApps ['as' id] TemplateBody +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType GivenType ::= AnnotType1 {id [nl] AnnotType1} +OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_docs/reference/contextual/by-name-context-parameters.md b/docs/_docs/reference/contextual/by-name-context-parameters.md index 7c517abe9406..e903ac2642c3 100644 --- a/docs/_docs/reference/contextual/by-name-context-parameters.md +++ b/docs/_docs/reference/contextual/by-name-context-parameters.md @@ -12,7 +12,7 @@ trait Codec[T]: given intCodec: Codec[Int] = ??? -given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with +given optionCodec: [T] => (ev: => Codec[T]) => Codec[Option[T]]: def write(xo: Option[T]) = xo match case Some(x) => ev.write(x) case None => diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 11d57c8cbd52..60357b3f098d 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -4,50 +4,208 @@ title: "Context Bounds" nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html --- -A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: +A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. These patterns are commonplace when modelling type classes in Scala. Using a context bound, the `maximum` function of the [last section](./using-clauses.md) can be written like this: ```scala def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) ``` -A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds -are added as follows: +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`, which is added to the signature of the enclosing method. The generated parameter is called a _witness_ for the context bound. - - If the method parameters end in an implicit parameter list or using clause, - context parameters are added in front of that list. - - Otherwise they are added as a separate parameter clause at the end. - -Example: +For instance the `maximum` method above expands to +```scala +def maximum[T](xs: List[T])(using Ord[T]): T = ... +``` +Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. ```scala -def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R +def f[T <: B : C](x: T): R = ... ``` -would expand to +## Named Context Bounds + +A context bound can be given a name with an `as` clause. For example, assume the following trait definitions. +```scala + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A +``` +We can write `reduce` function over lists of monoid instances like this: +```scala + def reduce[A: Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` +We use `as x` after the type of a context bound to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. +In a context bound with a naming clause the witness parameter carries the given name. For instance the expanded signature of `reduce` would be ```scala -def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R + def reduce[A](xs: List[A])(using m: Monoid[A]): A ``` +Since the context parameter now has a name, it can be referred +to in the body of `reduce`. An example is the `m.unit` reference in the definition above. -Context bounds can be combined with subtype bounds. If both are present, subtype bounds come first, e.g. +If the context bound does not carry an `as` clause, the generated witness parameter gets a compiler-synthesized name. However, a [currently experimental +language extension](../experimental/default-names-context-bounds.md) would in this case give the context parameter the same name as the bound type parameter. +Named context bounds were introduced in Scala 3.6. + +## Aggregate Context Bounds + +A type parameter can have several context bounds. If there are multiple bounds, they are written inside braces `{...}`. Example: ```scala -def g[T <: B : C](x: T): R = ... + trait: + def showMax[X : {Ord, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ord as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) ``` -## Migration +This syntax is valid from Scala 3.6. The previous syntax used +chains of `:` clauses, as in `[X : Ord : Show]`. This syntax is still available but will be deprecated and removed over time. + +## Placement of Generated Context Parameters -To ease migration, context bounds in Dotty map in Scala 3.0 to old-style implicit parameters -for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.1 on, they will map to context parameters instead, as is described above. +The witness context parameter(s) generated from context bounds are added as follows: -If the source version is `future-migration`, any pairing of an evidence + 1. If one of the bounds is referred to by its name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility with Scala 2 and earlier Scala 3 versions is maintained. + +**Examples:** + + 1. By rule 3, + ```scala + def f[T: {C1, C2}](x: T): R + ``` + expands to + ```scala + def f[T](x: T)(using C1, C2): R + ``` + Equally by rule 3, + ```scala + def f[T: {C1 as c1, C2 as c2}](x: T): R + ``` + expands to + ```scala + def f[T](x: T)(using c1: C1, c2: C2): R + + 2. By rule 2, + ```scala + def f[T: {C1, C2}, U: C3](x: T)(using y: U, z: V): R + ``` + expands to + ```scala + def f[T, U](x: T)(using _: C1[T], _: C2[T], _: C3[U], y: U, z: V): R + ``` + The same expansion occurs if `y` and `z` are Scala 2 style `implicit` parameters. + 3. Assume the following trait definition: + ```scala + trait Parser[P]: + type Input + type Result + ``` + Here is a method `run` that runs a parser on an input of the required type: + ```scala + def run[P : Parser as p](in: p.Input): p.Result + ``` + By rule 1, this method definition is expanded to: + ```scala + def run[P](using p: Parser[P]](in: p.Input): p.Result + ``` + Note that the `using` clause is placed in front of the explicit parameter clause `(in: p.Result)` so that + the type `p.Result` can legally refer to the context parameter `p`. + +### Migration + +To ease migration, context bounds map in Scala 3.0 - 3.5 to old-style implicit parameters +for which arguments can be passed either with a `(using ...)` clause or with a normal application. From Scala 3.6 on, they will map to context parameters instead, as is described above. + +If the source version is `3.6-migration`, any pairing of an evidence context parameter stemming from a context bound with a normal argument will give a migration warning. The warning indicates that a `(using ...)` clause is needed instead. The rewrite can be done automatically under `-rewrite`. +## Context Bounds for Polymorphic Functions + +From Scala 3.6 on, context bounds can also be used in polymorphic function types and polymorphic function literals: + +```scala +type Comparer = [X: Ord] => (x: X, y: X) => Boolean +val less: Comparer = [X: Ord as ord] => (x: X, y: X) => + ord.compare(x, y) < 0 +``` + +The expansion of such context bounds is analogous to the expansion in method types, except that instead of adding a using clause in a method, we insert a [context function type](./context-functions.md). + +For instance, the `type` and `val` definitions above would expand to +```scala +type Comparer = [X] => (x: X, y: X) => Ord[X] ?=> Boolean +val less: Comparer = [X] => (x: X, y: X) => (ord: Ord[X]) ?=> + ord.compare(x, y) < 0 +``` + +The expansion of using clauses does look inside alias types. For instance, +here is a variation of the previous example that uses a parameterized type alias: +```scala +type Cmp[X] = (x: X, y: X) => Boolean +type Comparer2 = [X: Ord] => Cmp[X] +``` +The expansion of the right hand side of `Comparer2` expands the `Cmp[X]` alias +and then inserts the context function at the same place as what's done for `Comparer`: +```scala + [X] => (x: X, y: X) => Ord[X] ?=> Boolean +``` + +### Context Bounds for Type Members + +From Scala 3.6 on, context bounds can not only used for type parameters but also for abstract type members. + +**Example**: + +```scala + class Collection: + type Element: Ord +``` + +These context bounds have to expand differently from context bounds for type parameters since there is no parameter list to accommodate any generated witnesses. Instead, context bounds for abstract types map to +[deferred givens](./deferred-givens.md). + +For instance, the `Collection` class above expands to: +```scala + class Collection: + type Element + given Ord[Element] = deferred +``` +As is explain in the [section on deferred givens](./deferred-givens.md), `deferred` is a special name defined in the `scala.compiletime` package. + + ## Syntax +The new syntax of context bounds is as follows: + ```ebnf -TypeParamBounds ::= [SubtypeBounds] {ContextBound} -ContextBound ::= ‘:’ Type +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound + | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] ``` + +The syntax of function types and function literals +is generalized as follows to allow context bounds for generic type parameters. + +```ebnf +FunType ::= FunTypeArgs ('=>' | '?=>') Type + | DefTypeParamClause '=>' Type +FunExpr ::= FunParams ('=>' | '?=>') Expr + | DefTypeParamClause '=>' Expr +``` +The syntax for abstract type members is generalized as follows to allow context bounds: + +```scala +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +``` \ No newline at end of file diff --git a/docs/_docs/reference/contextual/conversions.md b/docs/_docs/reference/contextual/conversions.md index 1ce8d42074e7..cb063b949a71 100644 --- a/docs/_docs/reference/contextual/conversions.md +++ b/docs/_docs/reference/contextual/conversions.md @@ -12,7 +12,7 @@ abstract class Conversion[-T, +U] extends (T => U): ``` For example, here is an implicit conversion from `String` to `Token`: ```scala -given Conversion[String, Token] with +given Conversion[String, Token]: def apply(str: String): Token = new KeyWord(str) ``` Using an alias this can be expressed more concisely as: diff --git a/docs/_docs/reference/contextual/deferred-givens.md b/docs/_docs/reference/contextual/deferred-givens.md new file mode 100644 index 000000000000..e63e26858d29 --- /dev/null +++ b/docs/_docs/reference/contextual/deferred-givens.md @@ -0,0 +1,57 @@ +--- +layout: doc-page +title: "Deferred Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/deferred-givens.html +--- + +Scala 3.6 introduces a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +Such givens can be implemented automatically in subclasses. `deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord as ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation. +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using ord: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = ord +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +One can also provide an explicit implementation of a deferred given, as in the following example: + +```scala +class SortedString[A] extends Sorted: + type Element = String + override given Ord[String] = ... +``` + +Note that the implementing given needs an `override` modifier since the `deferred` given in class `Sorted` counts as a concrete (i.e. not abstract) definition. In a sense, `deferred` on the right-hand side in `Sorted` is like a (magic, compiler-supported) macro, with the peculiarity that the macro's implementation also affects subclasses. + +## Abstract Givens + +A given may also be an abstract member, with the restriction that it must have an explicit name. Example: + +```scala +trait HasOrd[T]: + given ord: Ord[T] +``` +An abstract given has the form `given name: Type` without a right-hand side or arguments to the type. + +Since Scala 3.6, abstract givens are made redundant by deferred givens. Deferred givens have better ergonomics, since they get naturally implemented in inheriting classes, so there is no longer any need for boilerplate to fill in definitions of abstract givens. + +It is therefore recommended that software architectures relying on abstract givens be migrated to use deferred givens instead. Abstract givens are still supported in Scala 3.6, but will likely be deprecated and phased out over time. diff --git a/docs/_docs/reference/contextual/derivation-macro.md b/docs/_docs/reference/contextual/derivation-macro.md index 4b8dcffec846..6540a5d68a4b 100644 --- a/docs/_docs/reference/contextual/derivation-macro.md +++ b/docs/_docs/reference/contextual/derivation-macro.md @@ -135,10 +135,10 @@ trait Eq[T]: def eqv(x: T, y: T): Boolean object Eq: - given Eq[String] with + given Eq[String]: def eqv(x: String, y: String) = x == y - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def eqProduct[T](body: (T, T) => Boolean): Eq[T] = diff --git a/docs/_docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md index ed0e005c1bd4..cbf736d88034 100644 --- a/docs/_docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -19,9 +19,9 @@ The `derives` clause generates the following given instances for the `Eq`, `Orde companion object of `Tree`: ```scala -given [T: Eq] : Eq[Tree[T]] = Eq.derived -given [T: Ordering] : Ordering[Tree[T]] = Ordering.derived -given [T: Show] : Show[Tree[T]] = Show.derived +given [T: Eq] => Eq[Tree[T]] = Eq.derived +given [T: Ordering] => Ordering[Tree[T]] = Ordering.derived +given [T: Show] => Show[Tree[T]] = Show.derived ``` We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Show` instances are _derived instances_. @@ -29,7 +29,7 @@ We say that `Tree` is the _deriving type_ and that the `Eq`, `Ordering` and `Sho **Note:** `derived` can be used manually, this is useful when you do not have control over the definition. For example we can implement `Ordering` for `Option`s like so: ```scala -given [T: Ordering]: Ordering[Option[T]] = Ordering.derived +given [T: Ordering] => Ordering[Option[T]] = Ordering.derived ``` It is discouraged to directly refer to the `derived` member if you can use a `derives` clause instead. @@ -44,7 +44,7 @@ For a class/trait/object/enum `DerivingType[T_1, ..., T_N] derives TC`, a derive The general "shape" of the derived instance is as follows: ```scala -given [...](using ...): TC[ ... DerivingType[...] ... ] = TC.derived +given [...] => (...) => TC[ ... DerivingType[...] ... ] = TC.derived ``` `TC.derived` should be an expression that conforms to the expected type on the left, potentially elaborated using term and/or type inference. @@ -62,7 +62,7 @@ There are two further cases depending on the kinds of arguments: The generated instance is then: ```scala -given [T_1: TC, ..., T_N: TC]: TC[DerivingType[T_1, ..., T_N]] = TC.derived +given [T_1: TC, ..., T_N: TC] => TC[DerivingType[T_1, ..., T_N]] = TC.derived ``` This is the most common case, and is the one that was highlighted in the introduction. @@ -92,7 +92,7 @@ This section covers cases where you can pair arguments of `F` and `DerivingType` The general shape will then be: ```scala -given [...]: TC[ [...] =>> DerivingType[...] ] = TC.derived +given [...] => TC[ [...] =>> DerivingType[...] ] = TC.derived ``` Where of course `TC` and `DerivingType` are applied to types of the correct kind. @@ -114,7 +114,7 @@ given TC[ [A_1, ..., A_K] =>> DerivingType ] = TC.derived If `F` takes fewer arguments than `DerivingType` (`K < N`), we fill in the remaining leftmost slots with type parameters of the given: ```scala -given [T_1, ... T_(N-K)]: TC[[A_1, ..., A_K] =>> DerivingType[T_1, ... T_(N-K), A_1, ..., A_K]] = TC.derived +given [T_1, ... T_(N-K)] => TC[[A_1, ..., A_K] =>> DerivingType[T_1, ... T_(N-K), A_1, ..., A_K]] = TC.derived ``` ### `TC` is the `CanEqual` type class @@ -142,7 +142,7 @@ generates the following given instance: ```scala object MyClass: ... - given [A_L, A_R, G_L[_], G_R[_]](using CanEqual[A_L, A_R]): CanEqual[MyClass[A_L, G_L], MyClass[A_R, G_R]] = CanEqual.derived + given [A_L, A_R, G_L[_], G_R[_]] => CanEqual[A_L, A_R] => CanEqual[MyClass[A_L, G_L], MyClass[A_R, G_R]] = CanEqual.derived ``` ### `TC` is not valid for automatic derivation @@ -419,7 +419,7 @@ trait Eq[T]: def eqv(x: T, y: T): Boolean object Eq: - given Eq[Int] with + given Eq[Int]: def eqv(x: Int, y: Int) = x == y def check(x: Any, y: Any, elem: Eq[?]): Boolean = @@ -468,7 +468,7 @@ In this case the code that is generated by the inline expansion for the derived following, after a little polishing, ```scala -given derived$Eq[T](using eqT: Eq[T]): Eq[Lst[T]] = +given derived$Eq[T] => (eqT: Eq[T]) => Eq[Lst[T]] = eqSum(summon[Mirror.Of[Lst[T]]], {/* cached lazily */ List( eqProduct(summon[Mirror.Of[Cns[T]]], {/* cached lazily */ @@ -491,12 +491,12 @@ As a third example, using a higher-level library such as Shapeless, the type cla `derived` method as, ```scala -given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with +given eqSum: [A] => (inst: => K0.CoproductInstances[Eq, A]) => Eq[A]: def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) ) -given eqProduct[A](using inst: => K0.ProductInstances[Eq, A]): Eq[A] with +given eqProduct: [A] => (inst: => K0.ProductInstances[Eq, A]) => Eq[A]: def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => Complete(!eqt.eqv(t0, t1))(false)(true) diff --git a/docs/_docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md index 8b9a3df5b84c..2aaa6a90e536 100644 --- a/docs/_docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -225,7 +225,7 @@ object List: extension [T](xs: List[List[T]]) def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) - given [T: Ordering]: Ordering[List[T]] with + given [T: Ordering] => Ordering[List[T]]: extension (xs: List[T]) def < (ys: List[T]): Boolean = ... end List diff --git a/docs/_docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md index 28442581e408..c9247b01183f 100644 --- a/docs/_docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -61,7 +61,7 @@ For instance, assuming the object ```scala object Instances: given intOrd: Ordering[Int] = ... - given listOrd[T: Ordering]: Ordering[List[T]] = ... + given listOrd: [T: Ordering] => Ordering[List[T]] = ... given ec: ExecutionContext = ... given im: Monoid[Int] = ... ``` diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index bf018278c9fc..2b360dfc7af0 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -14,28 +14,25 @@ trait Ord[T]: def < (y: T) = compare(x, y) < 0 def > (y: T) = compare(x, y) > 0 -given intOrd: Ord[Int] with +given intOrd: Ord[Int]: def compare(x: Int, y: Int) = if x < y then -1 else if x > y then +1 else 0 -given listOrd[T](using ord: Ord[T]): Ord[List[T]] with +given listOrd: [T: Ord] => Ord[List[T]]: def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 case (_, Nil) => +1 case (x :: xs1, y :: ys1) => - val fst = ord.compare(x, y) + val fst = summon[Ord[T]].compare(x, y) if fst != 0 then fst else compare(xs1, ys1) ``` This code defines a trait `Ord` with two given instances. `intOrd` defines a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens -for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` -themselves. The `using` clause in `listOrd` defines a condition: There must be a -given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. -Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]`. The clause `[T: Ord]` is a [context bound](./context-bounds.md) which defines a condition: There must be a given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). ## Anonymous Givens @@ -43,9 +40,9 @@ The name of a given can be left out. So the definitions of the last section can also be expressed like this: ```scala -given Ord[Int] with +given Ord[Int]: ... -given [T](using Ord[T]): Ord[List[T]] with +given [T: Ord] => Ord[List[T]]: ... ``` @@ -60,8 +57,7 @@ given_Ord_List ``` The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between -given instances of types that are "too similar". To avoid conflicts one can -use named instances. +given instances of types that are "too similar". To avoid conflicts one can use named instances. **Note:** To ensure robust binary compatibility, publicly available libraries should prefer named instances. @@ -82,113 +78,39 @@ Alias givens can be anonymous as well, e.g. ```scala given Position = enclosingTree.position -given (using config: Config): Factory = MemoizingFactory(config) -``` - -An alias given can have type parameters and context parameters just like any other given, -but it can only implement a single type. - -## Given Macros - -Given aliases can have the `inline` and `transparent` modifiers. -Example: - -```scala -transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ - // code producing a value of a subtype of Annotations -} -``` - -Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. - -Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. -Example: - -```scala -trait Show[T] { - inline def show(x: T): String -} - -inline given Show[Foo] with { - /*transparent*/ inline def show(x: Foo): String = ${ ... } -} - -def app = - // inlines `show` method call and removes the call to `given Show[Foo]` - summon[Show[Foo]].show(foo) -``` -Note that the inline methods within the given instances may be `transparent`. - -The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. -This is used to help dead code elimination of the given instances that are not used after inlining. - - -## Pattern-Bound Given Instances - -Given instances can also appear in patterns. Example: - -```scala -for given Context <- applicationContexts do - -pair match - case (ctx @ given Context, y) => ... -``` - -In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` -instance named `ctx` is established by matching against the first half of the `pair` selector. - -In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. - -## Negated Givens - -Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, -where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. -But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. - -For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit -search for `Q` fails, for example: - -```scala -import scala.util.NotGiven - -trait Tagged[A] - -case class Foo[A](value: Boolean) -object Foo: - given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) - given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) - -@main def test(): Unit = - given Tagged[Int]() - assert(summon[Foo[Int]].value) // fooTagged is found - assert(!summon[Foo[String]].value) // fooNotTagged is found ``` ## Given Instance Initialization -A given instance without type or context parameters is initialized on-demand, the first -time it is accessed. If a given has type or context parameters, a fresh instance -is created for each reference. +An unconditional given instance without parameters is initialized on-demand, the first +time it is accessed. If the given is a mere alias to some immutable value, the given is implemented as a simple forwarder, without incurring the cost of a field to hold a cached value. If a given is conditional, a fresh instance is created for each reference. ## Syntax -Here is the syntax for given instances: +Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](./more-givens.md). ```ebnf -TmplDef ::= ... - | ‘given’ GivenDef -GivenDef ::= [GivenSig] StructuralInstance - | [GivenSig] AnnotType ‘=’ Expr - | [GivenSig] AnnotType -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +TmplDef ::= ... | 'given' GivenDef +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType +GivenType ::= AnnotType1 {id [nl] AnnotType1} ``` -A given instance starts with the reserved word `given` and an optional _signature_. The signature -defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds -of given instances: +A given instance starts with the reserved keyword `given`, which is followed by + + - An optional name and a colon + - An optional list of conditions. + - The implemented type(s) and their implementation, in two forms: alias givens and structural givens. + - An _alias given_ implements a single type with a right hand side following `=`. + - A _structural given_ implements one or more class constructors with a + list of member definitions in a template body. -- A _structural instance_ contains one or more types or constructor applications, - followed by `with` and a template body that contains member definitions of the instance. -- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. -- An _abstract instance_ contains just the type, which is not followed by anything. +**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](./previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. diff --git a/docs/_docs/reference/contextual/more-givens.md b/docs/_docs/reference/contextual/more-givens.md new file mode 100644 index 000000000000..2f6dd63f7eab --- /dev/null +++ b/docs/_docs/reference/contextual/more-givens.md @@ -0,0 +1,202 @@ +--- +layout: doc-page +title: "Other Forms Of Givens" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +The concept of given instances is quite general. This page covers forms of givens that were not treated before. + +## Simple Structural Givens + +Some givens simply instantiate a class without needing an alias or additional member declarations. Example: + +```scala +class IntOrd extends Ord[Int]: + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given IntOrd() +``` +In this case, the given clause consists of just a class creation expression, such as `IntOrd()` above. + +## Conditional Givens with Parameters + +Conditional givens can also be defined with parameters. Example: +```scala +given (config: Config) => Factory = MemoizingFactory(config) +``` +Here, `(config: Config)` describes a context parameter expressing a condition: We can synthesize a given `Factory` _provided_ we can synthesize a given `config` of type `Config`. + +Type parameters and context parameters can be combined. For instance the `listOrd` instance above could alternatively be expressed like this: +```scala +given listOrd: [T] => Ord[T] => Ord[List[T]]: + ... + def compare(x: List[T], y: List[T]) = ... +``` +As the example shows, each parameter section is followed by an `=>`. + +It is also possible to name context parameters: +```scala +given listOrd: [T] => (ord: Ord[T]) => Ord[List[T]]: + ... +``` + +## By Name Givens + +Though in general we want to avoid re-evaluating a given, there are situations where such a re-evaluation may be necessary. For instance, say we have a mutable variable `curCtx` and we want to define a given that returns the current value of that variable. A normal given alias will not do since by default given aliases are mapped to lazy vals. In this case, we can specify a _by-name_ evaluation insteadby writing a conditional given with an empty parameter list: +```scala + val curCtx: Context + given context: () => Context = curCtx +``` +With this definition, each time a `Context` is summoned we evaluate the `context` function, which produces the current value of `curCtx`. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations: [A, T] => Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Structural givens can also have the `inline` modifier. But the `transparent` modifier is not allowed for them as their type is already known from the signature. + +Example: + +```scala +trait Show[T]: + inline def show(x: T): String + +inline given Show[Foo]: + inline def show(x: Foo): String = ${ ... } + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that inline methods within given instances may be `transparent`. + + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + + +We sometimes want to have an implicit search succeed if a given instance for some other type is _not_ available. There is a special class [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) that implements this kind of negation. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged: [A] => Tagged[A] => Foo[A] = Foo(true) + given fooNotTagged: [A] => NotGiven[Tagged[A]] => Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Summary + +Here is a summary of common forms of given clauses: + +```scala + // Simple typeclass + given Ord[Int]: + def compare(x: Int, y: Int) = ... + + // Parameterized typeclass with context bound + given [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with context parameter + given [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with named context parameter + given [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Deferred given + given Context = deferred + + // By-name given + given () => Context = curCtx +``` + +All of these clauses also exist in named form: +```scala + // Simple typeclass + given intOrd: Ord[Int]: + def compare(x: Int, y: Int) = ... + + // Parameterized typeclass with context bound + given listOrd: [A: Ord] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Parameterized typeclass with named context parameter + given listOrd: [A] => (ord: Ord[A]) => Ord[List[A]]: + def compare(x: List[A], y: List[A]) = ... + + // Simple alias + given intOrd: Ord[Int] = IntOrd() + + // Parameterized alias with context bound + given listOrd: [A: Ord] => Ord[List[A]] = + ListOrd[A] + + // Parameterized alias with context parameter + given listOrd: [A] => Ord[A] => Ord[List[A]] = + ListOrd[A] + + // Abstract or deferred given + given context: Context = deferred + + // By-name given + given context: () => Context = curCtx +``` diff --git a/docs/_docs/reference/contextual/multiversal-equality.md b/docs/_docs/reference/contextual/multiversal-equality.md index 6258973c0cda..fb980853ea8e 100644 --- a/docs/_docs/reference/contextual/multiversal-equality.md +++ b/docs/_docs/reference/contextual/multiversal-equality.md @@ -109,7 +109,7 @@ By the usual rules of [type class derivation](./derivation.md), this generates the following `CanEqual` instance in the companion object of `Box`: ```scala -given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = +given [T, U] => CanEqual[T, U] => CanEqual[Box[T], Box[U]] = CanEqual.derived ``` diff --git a/docs/_docs/reference/contextual/previous-givens.md b/docs/_docs/reference/contextual/previous-givens.md new file mode 100644 index 000000000000..a78d8a3751ea --- /dev/null +++ b/docs/_docs/reference/contextual/previous-givens.md @@ -0,0 +1,232 @@ +--- +layout: doc-page +title: "Previous Given Syntax" +nightlyOf: https://docs.scala-lang.org/scala3/reference/contextual/previous-givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) + def < (y: T) = compare(x, y) < 0 + def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note:** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note:** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Abstract Givens + +A given may be an abstract member, with the restriction that it must have an explicit name. + +```scala +trait HasOrd[T]: + given ord: Ord[T] +``` + +## More Structural Givens + +If an alias given instance is analogous to a lazy val, +and a structural given instance is analogous to an object, +albeit an object with an explicit type, +then a structural given may also be specified without an explicit type: + +```scala +class IntOrd extends Ord[Int]: + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given IntOrd() +``` + +Compare this syntax to: + +```scala +object intOrd extends IntOrd() +``` + +The empty parentheses are optional in the extends clause when defining a class, +but are required when defining a given. + +Further mixins are allowed as usual: + +```scala +given IntOrd() with OrdOps[Int] +``` + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +```ebnf +TmplDef ::= ... + | 'given' GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ':' +StructuralInstance ::= ConstrApp {'with' ConstrApp} [‘with’ TemplateBody] +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the name and type, which is not followed by anything. diff --git a/docs/_docs/reference/contextual/relationship-implicits.md b/docs/_docs/reference/contextual/relationship-implicits.md index fce07f51151a..4ff38f709200 100644 --- a/docs/_docs/reference/contextual/relationship-implicits.md +++ b/docs/_docs/reference/contextual/relationship-implicits.md @@ -15,7 +15,7 @@ Given instances can be mapped to combinations of implicit objects, classes and i 1. Given instances without parameters are mapped to implicit objects. For instance, ```scala - given intOrd: Ord[Int] with { ... } + given intOrd: Ord[Int] { ... } ``` maps to @@ -27,7 +27,7 @@ Given instances can be mapped to combinations of implicit objects, classes and i 2. Parameterized givens are mapped to combinations of classes and implicit methods. For instance, ```scala - given listOrd[T](using ord: Ord[T]): Ord[List[T]] with { ... } + given listOrd: [T] => (ord: Ord[T]) => Ord[List[T]] { ... } ``` maps to @@ -63,8 +63,8 @@ final implicit def given_Context = ctx Anonymous given instances get compiler synthesized names, which are generated in a reproducible way from the implemented type(s). For example, if the names of the `IntOrd` and `ListOrd` givens above were left out, the following names would be synthesized instead: ```scala -given given_Ord_Int: Ord[Int] with { ... } -given given_Ord_List[T](using ord: Ord[T]): Ord[List[T]] with { ... } +given given_Ord_Int: Ord[Int] { ... } +given given_Ord_List: [T] => (ord: Ord[T]) => Ord[List[T]] { ... } ``` The synthesized type names are formed from @@ -153,7 +153,7 @@ implicit def stringToToken(str: String): Token = new Keyword(str) one can write ```scala -given stringToToken: Conversion[String, Token] with +given stringToToken: Conversion[String, Token]: def apply(str: String): Token = KeyWord(str) ``` diff --git a/docs/_docs/reference/contextual/type-classes.md b/docs/_docs/reference/contextual/type-classes.md index 6a15ac3a83d4..0e1ccdf8d2c8 100644 --- a/docs/_docs/reference/contextual/type-classes.md +++ b/docs/_docs/reference/contextual/type-classes.md @@ -27,7 +27,7 @@ trait Monoid[T] extends SemiGroup[T]: An implementation of this `Monoid` type class for the type `String` can be the following: ```scala -given Monoid[String] with +given Monoid[String]: extension (x: String) def combine (y: String): String = x.concat(y) def unit: String = "" ``` @@ -35,7 +35,7 @@ given Monoid[String] with Whereas for the type `Int` one could write the following: ```scala -given Monoid[Int] with +given Monoid[Int]: extension (x: Int) def combine (y: Int): Int = x + y def unit: Int = 0 ``` @@ -43,22 +43,8 @@ given Monoid[Int] with This monoid can now be used as _context bound_ in the following `combineAll` method: ```scala -def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) -``` - -To get rid of the `summon[...]` we can define a `Monoid` object as follows: - -```scala -object Monoid: - def apply[T](using m: Monoid[T]) = m -``` - -Which would allow to re-write the `combineAll` method this way: - -```scala -def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(Monoid[T].unit)(_.combine(_)) +def combineAll[T: Monoid as m](xs: List[T]): T = + xs.foldLeft(m.unit)(_.combine(_)) ``` ## Functors @@ -77,7 +63,7 @@ Which could read as follows: "A `Functor` for the type constructor `F[_]` repres This way, we could define an instance of `Functor` for the `List` type: ```scala -given Functor[List] with +given Functor[List]: def map[A, B](x: List[A], f: A => B): List[B] = x.map(f) // List already has a `map` method ``` @@ -109,7 +95,7 @@ trait Functor[F[_]]: The instance of `Functor` for `List` now becomes: ```scala -given Functor[List] with +given Functor[List]: extension [A](xs: List[A]) def map[B](f: A => B): List[B] = xs.map(f) // List already has a `map` method @@ -159,7 +145,7 @@ end Monad A `List` can be turned into a monad via this `given` instance: ```scala -given listMonad: Monad[List] with +given listMonad: Monad[List]: def pure[A](x: A): List[A] = List(x) extension [A](xs: List[A]) @@ -176,7 +162,7 @@ it explicitly. `Option` is an other type having the same kind of behaviour: ```scala -given optionMonad: Monad[Option] with +given optionMonad: Monad[Option]: def pure[A](x: A): Option[A] = Option(x) extension [A](xo: Option[A]) @@ -223,7 +209,7 @@ type ConfigDependent[Result] = Config => Result The monad instance will look like this: ```scala -given configDependentMonad: Monad[ConfigDependent] with +given configDependentMonad: Monad[ConfigDependent]: def pure[A](x: A): ConfigDependent[A] = config => x @@ -244,7 +230,7 @@ type ConfigDependent = [Result] =>> Config => Result Using this syntax would turn the previous `configDependentMonad` into: ```scala -given configDependentMonad: Monad[[Result] =>> Config => Result] with +given configDependentMonad: Monad[[Result] =>> Config => Result]: def pure[A](x: A): Config => A = config => x @@ -259,7 +245,7 @@ end configDependentMonad It is likely that we would like to use this pattern with other kinds of environments than our `Config` trait. The Reader monad allows us to abstract away `Config` as a type _parameter_, named `Ctx` in the following definition: ```scala -given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with +given readerMonad: [Ctx] => Monad[[X] =>> Ctx => X]: def pure[A](x: A): Ctx => A = ctx => x diff --git a/docs/_docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md index 9177a2f47dc9..9d03a7d2cec5 100644 --- a/docs/_docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -115,7 +115,7 @@ Multiple `using` clauses are matched left-to-right in applications. Example: ```scala object global extends Universe { type Context = ... } -given ctx : global.Context with { type Symbol = ...; type Kind = ... } +given ctx : global.Context { type Symbol = ...; type Kind = ... } given sym : ctx.Symbol given kind: ctx.Kind diff --git a/docs/_docs/reference/enums/enums.md b/docs/_docs/reference/enums/enums.md index 8d4fca3268b0..4cad29cbd76a 100644 --- a/docs/_docs/reference/enums/enums.md +++ b/docs/_docs/reference/enums/enums.md @@ -147,16 +147,13 @@ We now want to deprecate the `Pluto` case. First we add the `scala.deprecated` a Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning, but within those scopes we can still reference it to implement introspection over the deprecated cases: ```scala -trait Deprecations[T <: reflect.Enum] { +trait Deprecations[T <: reflect.Enum]: extension (t: T) def isDeprecatedCase: Boolean -} -object Planet { - given Deprecations[Planet] with { +object Planet: + given Deprecations[Planet]: extension (p: Planet) def isDeprecatedCase = p == Pluto - } -} ``` We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. @@ -167,7 +164,8 @@ If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.c the class `java.lang.Enum`, which is imported by default, as follows: ```scala -enum Color extends Enum[Color] { case Red, Green, Blue } +enum Color extends Enum[Color]: + case Red, Green, Blue ``` The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. diff --git a/docs/_docs/reference/experimental/better-fors.md b/docs/_docs/reference/experimental/better-fors.md new file mode 100644 index 000000000000..a4c42c9fb380 --- /dev/null +++ b/docs/_docs/reference/experimental/better-fors.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Better fors" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/better-fors.html +--- + +The `betterFors` language extension improves the usability of `for`-comprehensions. + +The extension is enabled by the language import `import scala.language.experimental.betterFors` or by setting the command line option `-language:experimental.betterFors`. + +The biggest user facing change is the new ability to start `for`-comprehensions with aliases. This means that the following previously invalid code is now valid: + +```scala +for + as = List(1, 2, 3) + bs = List(4, 5, 6) + a <- as + b <- bs +yield a + b +``` + +The desugaring of this code is the same as if the aliases were introduced with `val`: + +```scala +val as = List(1, 2, 3) +val bs = List(4, 5, 6) +for + a <- as + b <- bs +yield a + b +``` + +Additionally this extension changes the way `for`-comprehensions are desugared. The desugaring is now done in a more intuitive way and the desugared code can be more efficient, because it avoids some unnecessary method calls. There are two main changes in the desugaring: + +1. **Simpler Desugaring for Pure Aliases**: + When an alias is not followed by a guard, the desugaring is simplified. The last generator and the aliases don't have to be wrapped in a tuple, and instead the aliases are simply introduced as local variables in a block with the next generator. + **Current Desugaring**: + ```scala + for { + a <- doSth(arg) + b = a + } yield a + b + ``` + Desugars to: + ```scala + doSth(arg).map { a => + val b = a + (a, b) + }.map { case (a, b) => + a + b + } + ``` + **New Desugaring**: + ```scala + doSth(arg).map { a => + val b = a + a + b + } + ``` + This change makes the desugaring more intuitive and avoids unnecessary `map` calls, when an alias is not followed by a guard. + +2. **Avoiding Redundant `map` Calls**: + When the result of the `for`-comprehension is the same expression as the last generator pattern, the desugaring avoids an unnecessary `map` call. but th eequality of the last pattern and the result has to be able to be checked syntactically, so it is either a variable or a tuple of variables. + **Current Desugaring**: + ```scala + for { + a <- List(1, 2, 3) + } yield a + ``` + Desugars to: + ```scala + List(1, 2, 3).map(a => a) + ``` + **New Desugaring**: + ```scala + List(1, 2, 3) + ``` + +For more details on the desugaring scheme see the comment in [`Desugar.scala#makeFor`](https://github.com/scala/scala3/blob/main/compiler/src/dotty/tools/dotc/ast/Desugar.scala#L1928). \ No newline at end of file diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md index fedc8fe66b65..ff480ffb638b 100644 --- a/docs/_docs/reference/experimental/cc.md +++ b/docs/_docs/reference/experimental/cc.md @@ -216,13 +216,13 @@ This widening is called _avoidance_; it is not specific to capture checking but ## Capability Classes -Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by letting these classes extend the `Capability` class defined in object `cap`. -The capture set of a capability class type is always `{cap}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +The capture set of a `Capability` subclass type is always `{cap}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: ```scala -import annotation.capability +import caps.Capability -@capability class FileSystem +class FileSystem extends Capability class Logger(using FileSystem): def log(s: String): Unit = ??? @@ -290,7 +290,7 @@ The captured references of a class include _local capabilities_ and _argument ca the local capabilities of a superclass are also local capabilities of its subclasses. Example: ```scala -@capability class Cap +class Cap extends caps.Capability def test(a: Cap, b: Cap, c: Cap) = class Super(y: Cap): @@ -317,7 +317,7 @@ The inference observes the following constraints: For instance, in ```scala -@capability class Cap +class Cap extends caps.Capability def test(c: Cap) = class A: val x: A = this @@ -502,7 +502,7 @@ Under the language import `language.experimental.captureChecking`, the code is i ``` To integrate exception and capture checking, only two changes are needed: - - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - `CanThrow` is declared as a class extending `Capability`, so all references to `CanThrow` instances are tracked. - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to capture the universal capability. @@ -635,9 +635,132 @@ To summarize, there are two "sweet spots" of data structure design: strict lists side-effecting or resource-aware code and lazy lists in purely functional code. Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. -## Function Type Shorthands +## Existential Capabilities -TBD +In fact, what is written as the top type `cap` can mean different capabilities, depending on scope. For instance, consider the function type +`() -> Iterator[T]^`. This is taken to mean +```scala + () -> Exists x. Iterator[T]^x +``` +In other words, it means an unknown type bound `x` by an "existential" in the scope of the function result. A `cap` in a function result is therefore different from a `cap` at the top-level or in a function parameter. + +Internally, an existential type is represented as a kind of dependent function type. The type above would be modelled as +```scala + () -> (x: Exists) -> Iterator[T]^x +``` +Here, `Exists` is a sealed trait in the `caps` object that serves to mark +dependent functions as representations of existentials. It should be noted +that this is strictly an internal representation. It is explained here because it can show up in error messages. It is generally not recommended to use this syntax in source code. Instead one should rely on the automatic expansion of `^` and `cap` to existentials, which can be +influenced by introducing the right alias types. The rules for this expansion are as follows: + + - If a function result type contains covariant occurrences of `cap`, + we replace these occurrences with a fresh existential variable which + is bound by a quantifier scoping over the result type. + - We might want to do the same expansion in function arguments, but right now this is not done. + - Occurrences of `cap` elsewhere are not translated. They can be seen as representing an existential at the top-level scope. + +**Examples:** + + - `A => B` is an alias type that expands to `(A -> B)^`, therefore + `() -> A => B` expands to `() -> Exists c. A ->{c} B`. + + - `() -> Iterator[A => B]` expands to `() -> Exists c. Iterator[A ->{c} B]` + + - `A -> B^` expands to `A -> Exists c.B^{c}`. + + - If we define `type Fun[T] = A -> T`, then `() -> Fun[B^]` expands to `() -> Exists c.Fun[B^{c}]`, which dealiases to `() -> Exists c.A -> B^{c}`. This demonstrates how aliases can be used to force existential binders to be in some specific outer scope. + + - If we define + ```scala + type F = A -> Fun[B^] + ``` + then the type alias expands to + ```scala + type F = A -> Exists c.A -> B^{c} + ``` + +**Typing Rules:** + + - When we typecheck the body of a function or method, any covariant occurrences of `cap` in the result type are bound with a fresh existential. + - Conversely, when we typecheck the application of a function or method, + with an existential result type `Exists ex.T`, the result of the application is `T` where every occurrence of the existentially bound + variable `ex` is replaced by `cap`. + +## Reach Capabilities + +Say you have a method `f` that takes an impure function argument which gets stored in a `var`: +```scala +def f(op: A => B) + var x: A ->{op} B = op + ... +``` +This is legal even though `var`s cannot have types with `cap` or existential capabilities. The trick is that the type of the variable `x` +is not `A => B` (this would be rejected), but is the "narrowed" type +`A ->{op} B`. In other words, all capabilities retained by values of `x` +are all also referred to by `op`, which justifies the replacement of `cap` by `op`. + +A more complicated situation is if we want to store successive values +held in a list. Example: +```scala +def f(ops: List[A => B]) + var xs = ops + var x: ??? = xs.head + while xs.nonEmpty do + xs = xs.tail + x = xs.head + ... +``` +Here, `x` cannot be given a type with an `ops` capability. In fact, `ops` is pure, i.e. it's capture set is empty, so it cannot be used as the name of a capability. What we would like to express is that `x` refers to +any operation "reachable" through `ops`. This can be expressed using a +_reach capability_ `ops*`. +```scala +def f(ops: List[A => B]) + var xs = ops + var x: A ->{ops*} B = xs.head + ... +``` +Reach capabilities take the form `x*` where `x` is syntactically a regular capability. If `x: T` then `x*` stands for any capability that appears covariantly in `T` and that is accessed through `x`. The least supertype of this capability is the set of all capabilities appearing covariantly in `T`. + +## Capability Polymorphism + +It is sometimes convenient to write operations that are parameterized with a capture set of capabilities. For instance consider a type of event sources +`Source` on which `Listener`s can be registered. Listeners can hold certain capabilities, which show up as a parameter to `Source`: +```scala + class Source[X^]: + private var listeners: Set[Listener^{X^}] = Set.empty + def register(x: Listener^{X^}): Unit = + listeners += x + + def allListeners: Set[Listener^{X^}] = listeners +``` +The type variable `X^` can be instantiated with a set of capabilities. It can occur in capture sets in its scope. For instance, in the example above +we see a variable `listeners` that has as type a `Set` of `Listeners` capturing `X^`. The `register` method takes a listener of this type +and assigns it to the variable. + +Capture set variables `X^` are represented as regular type variables with a +special upper bound `CapSet`. For instance, `Source` could be equivalently +defined as follows: +```scala + class Source[X <: CapSet^]: + ... +``` +`CapSet` is a sealed trait in the `caps` object. It cannot be instantiated or inherited, so its only purpose is to identify capture set type variables and types. Capture set variables can be inferred like regular type variables. When they should be instantiated explicitly one uses a capturing +type `CapSet`. For instance: +```scala + class Async extends caps.Capability + + def listener(async: Async): Listener^{async} = ??? + + def test1(async1: Async, others: List[Async]) = + val src = Source[CapSet^{async1, others*}] + ... +``` +Here, `src` is created as a `Source` on which listeners can be registered that refer to the `async` capability or to any of the capabilities in list `others`. So we can continue the example code above as follows: +```scala + src.register(listener(async1)) + others.map(listener).foreach(src.register) + val ls: Set[Listener^{async, others*}] = src.allListeners +``` ## Compilation Options diff --git a/docs/_docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md index 59dfed92da2a..1861b734bb47 100644 --- a/docs/_docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -34,9 +34,9 @@ TODO: complete 3. Functions * `(erased x1: T1, x2: T2, ..., xN: TN) => y : (erased T1, T2, ..., TN) => R` - * `(given x1: T1, erased x2: T2, ..., xN: TN) => y: (given T1, erased T2, ..., TN) => R` - * `(given erased T1) => R <:< erased T1 => R` - * `(given T1, erased T2) => R <:< (T1, erased T2) => R` + * `(using x1: T1, erased x2: T2, ..., xN: TN) => y: (using T1, erased T2, ..., TN) => R` + * `(using erased T1) => R <:< erased T1 => R` + * `(using T1, erased T2) => R <:< (T1, erased T2) => R` * ... Note that there is no subtype relation between `(erased T) => R` and `T => R` (or `(given erased T) => R` and `(given T) => R`). The `erased` parameters must match exactly in their respective positions. diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md index 3867b4d13f15..27d74259725d 100644 --- a/docs/_docs/reference/experimental/named-tuples.md +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -17,8 +17,9 @@ val persons: List[Person] = ... val minors = persons.filter: p => p.age < 18 ``` -Named bindings in tuples are similar to function parameters and arguments. We use `name: Type` for element types and `name = value` for element values. It is illegal to mix named and unnamed elements in a tuple, or to use the same same -name for two different elements. +Named bindings in tuples are similar to function parameters and arguments. +We use `name: Type` for element types and `name = value` for element values. +It is illegal to mix named and unnamed elements in a tuple, or to use the same name for two different elements. Fields of named tuples can be selected by their name, as in the line `p.age < 18` above. @@ -94,6 +95,24 @@ Bob match case (age = x, name = y) => ... ``` +### Pattern Matching with Named Fields in General + +We allow named patterns not just for named tuples but also for case classes. For instance: +```scala +city match + case c @ City(name = "London") => println(c.population) + case City(name = n, zip = 1026, population = pop) => println(pop) +``` + +Named constructor patterns are analogous to named tuple patterns. In both cases + + - every name must match the name some field of the selector, + - names can come in any order, + - not all fields of the selector need to be matched. + +Named patterns are compatible with extensible pattern matching simply because +`unapply` results can be named tuples. + ### Expansion Named tuples are in essence just a convenient syntax for regular tuples. In the internal representation, a named tuple type is represented at compile time as a pair of two tuples. One tuple contains the names as literal constant string types, the other contains the element types. The runtime representation of a named tuples consists of just the element values, whereas the names are forgotten. This is achieved by declaring `NamedTuple` @@ -109,7 +128,7 @@ NamedTuple[("name", "age"), (String, Int)] A `NamedTuple[N, V]` type is publicly known to be a supertype (but not a subtype) of its value paramater `V`, which means that regular tuples can be assigned to named tuples but not _vice versa_. -The `NamedTuple` object contains a number of extension methods for named tuples hat mirror the same functions in `Tuple`. Examples are +The `NamedTuple` object contains a number of extension methods for named tuples that mirror the same functions in `Tuple`. Examples are `apply`, `head`, `tail`, `take`, `drop`, `++`, `map`, or `zip`. Similar to `Tuple`, the `NamedTuple` object also contains types such as `Elem`, `Head`, `Concat` that describe the results of these extension methods. @@ -119,6 +138,47 @@ The translation of named tuples to instances of `NamedTuple` is fixed by the spe - All tuple operations also work with named tuples "out of the box". - Macro libraries can rely on this expansion. +### Computed Field Names + +The `Selectable` trait now has a `Fields` type member that can be instantiated +to a named tuple. + +```scala +trait Selectable: + type Fields <: NamedTuple.AnyNamedTuple +``` + +If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, +then the available fields and their types will be defined by that type. Assume `n: T` +is an element of the `Fields` type in some class `C` that implements `Selectable`, +that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. +Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. + +It is the task of the implementation of `selectDynamic` in `C` to ensure that its +computed result conforms to the predicted type `T`. + +As an example, assume we have a query type `Q[T]` defined as follows: + +```scala +trait Q[T] extends Selectable: + type Fields = NamedTuple.Map[NamedTuple.From[T], Q] + def selectDynamic(fieldName: String) = ... +``` + +Assume in the user domain: +```scala +case class City(zipCode: Int, name: String, population: Int) +val city: Q[City] +``` +Then +```scala +city.zipCode +``` +has type `Q[Int]` and it expands to +```scala +city.selectDynamic("zipCode").asInstanceOf[Q[Int]] +``` + ### The NamedTuple.From Type The `NamedTuple` object contains a type definition @@ -137,33 +197,36 @@ then `NamedTuple.From[City]` is the named tuple (zip: Int, name: String, population: Int) ``` The same works for enum cases expanding to case classes, abstract types with case classes as upper bound, alias types expanding to case classes -and singleton types with case classes as underlying type. +and singleton types with case classes as underlying type (in terms of the implementation, the `classSymbol` of a type must be a case class). `From` is also defined on named tuples. If `NT` is a named tuple type, then `From[NT] = NT`. +### Operations on Named Tuples + +The operations on named tuples are defined in object [scala.NamedTuple](https://www.scala-lang.org/api/3.x/scala/NamedTuple$.html). + ### Restrictions -The following restrictions apply to named tuple elements: +The following restrictions apply to named tuples and named pattern arguments: - 1. Either all elements of a tuple are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: + 1. Either all elements of a tuple or constructor pattern are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: ```scala val illFormed1 = ("Bob", age = 33) // error ``` - 2. Each element name in a named tuple must be unique. For instance, the following is in error: + 2. Each element name in a named tuple or constructor pattern must be unique. For instance, the following is in error: ```scala val illFormed2 = (name = "", age = 0, name = true) // error ``` - 3. Named tuples can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: + 3. Named tuples and case classes can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: ```scala (tuple: Tuple) match case (age = x) => // error ``` - 4. Regular selector names `_1`, `_2`, ... are not allowed as names in named tuples. +## Syntax Changes -### Syntax - -The syntax of Scala is extended as follows to support named tuples: +The syntax of Scala is extended as follows to support named tuples and +named constructor arguments: ``` SimpleType ::= ... | ‘(’ NameAndType {‘,’ NameAndType} ‘)’ @@ -178,31 +241,11 @@ Patterns ::= Pattern {‘,’ Pattern} NamedPattern ::= id '=' Pattern ``` -### Named Pattern Matching - -We allow named patterns not just for named tuples but also for case classes. -For instance: -```scala -city match - case c @ City(name = "London") => println(p.population) - case City(name = n, zip = 1026, population = pop) => println(pop) -``` - -Named constructor patterns are analogous to named tuple patterns. In both cases - - - either all fields are named or none is, - - every name must match the name some field of the selector, - - names can come in any order, - - not all fields of the selector need to be matched. - -This revives SIP 43, with a much simpler desugaring than originally proposed. -Named patterns are compatible with extensible pattern matching simply because -`unapply` results can be named tuples. - ### Source Incompatibilities There are some source incompatibilities involving named tuples of length one. First, what was previously classified as an assignment could now be interpreted as a named tuple. Example: + ```scala var age: Int (age = 1) @@ -221,43 +264,3 @@ c f (age = 1) ``` will now construct a tuple as second operand instead of passing a named parameter. -### Computed Field Names - -The `Selectable` trait now has a `Fields` type member that can be instantiated -to a named tuple. - -```scala -trait Selectable: - type Fields <: NamedTuple.AnyNamedTuple -``` - -If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, -then the available fields and their types will be defined by that type. Assume `n: T` -is an element of the `Fields` type in some class `C` that implements `Selectable`, -that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. -Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. - -It is the task of the implementation of `selectDynamic` in `C` to ensure that its -computed result conforms to the predicted type `T` - -As an example, assume we have a query type `Q[T]` defined as follows: - -```scala -trait Q[T] extends Selectable: - type Fields = NamedTuple.Map[NamedTuple.From[T], Q] - def selectDynamic(fieldName: String) = ... -``` - -Assume in the user domain: -```scala -case class City(zipCode: Int, name: String, population: Int) -val city: Q[City] -``` -Then -```scala -city.zipCode -``` -has type `Q[Int]` and it expands to -```scala -city.selectDynamic("zipCode").asInstanceOf[Q[Int]] -``` diff --git a/docs/_docs/reference/experimental/numeric-literals.md b/docs/_docs/reference/experimental/numeric-literals.md index 8b7aaa23f9e0..8317e9ff83c4 100644 --- a/docs/_docs/reference/experimental/numeric-literals.md +++ b/docs/_docs/reference/experimental/numeric-literals.md @@ -168,7 +168,7 @@ To accept `BigFloat` literals, all that's needed in addition is a `given` instan `FromDigits.Floating[BigFloat]`: ```scala - given FromDigits: FromDigits.Floating[BigFloat] with + given FromDigits: FromDigits.Floating[BigFloat]: def fromDigits(digits: String) = apply(digits) end BigFloat ``` @@ -205,7 +205,7 @@ object BigFloat: class FromDigits extends FromDigits.Floating[BigFloat]: def fromDigits(digits: String) = apply(digits) - given FromDigits with + given FromDigits: override inline def fromDigits(digits: String) = ${ fromDigitsImpl('digits) } diff --git a/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md b/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md new file mode 100644 index 000000000000..0c30a867b189 --- /dev/null +++ b/docs/_docs/reference/experimental/quoted-patterns-with-polymorphic-functions.md @@ -0,0 +1,54 @@ +--- +layout: doc-page +title: "Quoted Patterns with Polymorphic Functions" +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/quoted-patterns-with-polymorphic-functions.html +--- + +This feature extends the capability of quoted patterns with regard to polymorphic functions. It is not yet part of the Scala language standard. To use this feature, turn on the language feature [`experimental.quotedPatternsWithPolymorphicFunctions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$quotedPatternsWithPolymorphicFunctions$.html). This can be done with a language import +```scala +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions +``` +or by setting the command line option `-language:experimental.quotedPatternsWithPolymorphicFunctions`. + +## Background +Quoted patterns allows us to use quoted code as a pattern. Using quoted patterns, we can check if an expression is equivalent to another, or decompose it. Especially, higher-order patterns are useful when extracting code fraguments inside function bodies. + +```scala +def decomposeFunc(x: Expr[Any])(using Quotes): Expr[Int] = + x match + case '{ (a: Int, b: Int) => $y(a, b) : Int } => + '{ $y(0, 0) } + case _ => Expr(0) +``` + +In the example above, the first case matches the case where `x` is a function and `y` is bound to the body of the function. The higher-order pattern `$y(a, b)` states that it matches any code with free occurence of variables `a` and `b`. If it is `$y(a)` instead, an expression like `(a: Int, b: Int) => a + b` will not match because `a + b` has an occurence of `b`, which is not included in the higher-order pattern. + +## Motivation +This experimental feature extends this higher-order pattern syntax to allow type variables. + +```scala +def decomposePoly(x: Expr[Any])(using Quotes): Expr[Int] = + x match + case '{ [A] => (x: List[A]) => $y[A](x) : Int } => + '{ $y[Int](List(1, 2, 3)) } + case _ => Expr(0) +``` + +Now we can use a higher-order pattern `$y[A](x)` with type variables. `y` is bound to the body of code with occurences of `A` and `x`, and has the type `[A] => (x: List[A]) => Int`. + +## Type Dependency +If a higher-order pattern carries a value parameter with a type that has type parameters defined in the quoted pattern, those type parameters should also be captured in the higher-order pattern. For example, the following pattern will not be typed. + +``` +case '{ [A] => (x: List[A]) => $y(x) : Int } => +``` + +In this case, `x` has the type `List[A]`, which includes a type variable `A` that is defined in the pattern. However, the higher-order pattern `$y(x)` does not have any type parameters. This should be ill-typed. One can always avoid this kind of type errors by adding type parameters, like `$y[A](x)` + +## Implementation Restriction +Current implementation only allows type parameters that do not have bounds, because sound typing rules for such pattern is not clear yet. + +```scala +case '{ [A] => (x: List[A]) => $y(x) : Int } => // Allowed +case '{ [A <: Int] => (x: List[A]) => $y(x) : Int } => // Disallowed +``` diff --git a/docs/_docs/reference/experimental/runtimeChecked.md b/docs/_docs/reference/experimental/runtimeChecked.md new file mode 100644 index 000000000000..71fac3ad8728 --- /dev/null +++ b/docs/_docs/reference/experimental/runtimeChecked.md @@ -0,0 +1,133 @@ +--- +layout: doc-page +title: "The runtimeChecked method" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/runtimeChecked.html +--- + +The `runtimeChecked` method is an extension method, defined in `scala.Predef`. It can be called on any expression. An expression ending in `.runtimeChecked` is exempt from certain static checks in the compiler, for example pattern match exhaustivity. The idiom is intended to replace a `: @unchecked` type ascription in these cases. + +## Example + +A common use case for `runtimeChecked` is to assert that a pattern will always match, either for convenience, or because there is a known invariant that the types can not express. + +E.g. looking up an expected entry in a dynamically loaded dictionary-like structure: +```scala +// example 1 +trait AppConfig: + def get(key: String): Option[String] + +val config: AppConfig = ??? + +val Some(appVersion) = config.get("appVersion").runtimeChecked +``` + +or to assert that a value can only match some specific patterns: +```scala +// example 2 +enum Day: + case Mon, Tue, Wed, Thu, Fri, Sat, Sun + +val weekDay: Option[Day] = ??? + +weekDay.runtimeChecked match + case Some(Mon | Tue | Wed | Thu | Fri) => println("got weekday") +// case Some(Sat | Sun) => // weekend should not appear + case None => +``` + +In both of these cases, without `runtimeChecked` there would either be an error (example 1), or a warning (example 2), because statically, the compiler knows that there could be other cases at runtime - so is right to caution the programmer. + +```scala +// warning in example 2 when we don't add `.runtimeChecked`. +-- [E029] Pattern Match Exhaustivity Warning: ---------------------------------- +6 |weekDay match + |^^^^^^^ + |match may not be exhaustive. + | + |It would fail on pattern case: Some(Sat), Some(Sun) +``` + +## Safety + +The `runtimeChecked` method only turns off static checks that can be soundly performed at runtime. This means that patterns with unchecked type-tests will still generate warnings. For example: +```scala +scala> val xs = List(1: Any) + | xs.runtimeChecked match { + | case is: ::[Int] => is.head + | } +1 warning found +-- Unchecked Warning: --------------------------------------- +3 | case is: ::[Int] => is.head + | ^ + |the type test for ::[Int] cannot be checked at runtime + |because its type arguments can't be determined from List[Any] +val res0: Int = 1 +``` +As the warning hints, the type `::[Int]` can not be tested at runtime on a value of type `List[Any]`, so using `runtimeChecked` still protects the user against assertions that can not be validated. + +To fully avoid warnings, as with previous Scala versions, `@unchecked` should be put on the type argument: +```scala +scala> xs.runtimeChecked match { + | case is: ::[Int @unchecked] => is.head + | } +val res1: Int = 1 +``` + + +## Specification + +We add a new annotation `scala.internal.RuntimeChecked` as a part of the standard Scala 3 library. A programmer is not expected to use this annotation directly. + +```scala +package scala.annotation.internal + +final class RuntimeChecked extends Annotation +``` + +Any term that is the scrutinee of a pattern match, and that has a type annotated with `RuntimeChecked`, is exempt from pattern match exhaustivity checking. + + +The user facing API is augmented with a new extension method `scala.Predef.runtimeChecked`, qualified for any value: +```scala +package scala + +import scala.annotation.internal.RuntimeChecked + +object Predef: + ... + extension [T](x: T) + inline def runtimeChecked: x.type @RuntimeChecked = + x: @RuntimeChecked +``` + +The `runtimeChecked` method returns its argument, refining its type with the `RuntimeChecked` annotation. + +## Motivation + +As described in [Pattern Bindings](../changed-features/pattern-bindings.md), under `-source:future` it is an error for a pattern definition to be refutable. For instance, consider: +```scala +def xs: List[Any] = ??? +val y :: ys = xs +``` + +This compiled without warning in 3.0, became a warning in 3.2, and we would like to make it an error by default in a future 3.x version. +As an escape hatch in 3.2 we recommended to use a type ascription of `: @unchecked`: +``` +-- Warning: ../../new/test.scala:6:16 ----------------------- +6 | val y :: ys = xs + | ^^ + |pattern's type ::[Any] is more specialized than the right + |hand side expression's type List[Any] + | + |If the narrowing is intentional, this can be communicated + |by adding `: @unchecked` after the expression, + |which may result in a MatchError at runtime. +``` + +However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose. + +The `@unchecked` annotation is still retained for silencing warnings on unsound type tests. + +### Restoring Scala 2.13 semantics with runtimeChecked + +In Scala 3, the `: @unchecked` type ascription has the effect of turning off all pattern-match warnings on the match scrutinee - this differs from 2.13 in which it strictly turns off only pattern exhaustivity checking. `runtimeChecked` restores the semantics of Scala 2.13. diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index a78e764bbe7d..add5853e10ba 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -4,7 +4,7 @@ title: "Better Support for Type Classes" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html --- -Martin Odersky, 8.1.2024, edited 5.4.2024 +Martin Odersky, 8.1.2024, edited 5.4.2024 and 30.9.2024 A type class in Scala is a pattern where we define @@ -18,13 +18,14 @@ a bit cumbersome and limiting for standard generic programming patterns. Much ha This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. The bulk of the suggested improvements has been implemented and is available -under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: ``` scala compile -source:future -language:experimental.modularity ``` -It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. This SIP has been accepted for inclusion in the language and will be released in Scala 3.6. The remaining elements +that concern type classes are described in the following. There is also a separate [page on modularity improvements](../modularity.md) that describes proposed additions not directly related to type classes. ## Generalizing Context Bounds @@ -145,70 +146,6 @@ This makes writing instance definitions and using clauses quite pleasant. Exampl (more examples will follow below) - - -## Naming Context Bounds - -Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, -one cannot name the using parameter to which a context bound expands. - -For instance, consider a `reduce` method over `Monoid`s defined like this: - -```scala -def reduce[A : Monoid](xs: List[A]): A = ??? -``` -Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: -```scala -def reduce[A : Monoid](xs: List[A]): A = - xs.foldLeft(summon Monoid[A])(_ `combine` _) -``` -That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: -```scala -def reduce[A](xs: List[A])(using m: Monoid[A]): A = - xs.foldLeft(m)(_ `combine` _) -``` -Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: -```scala - trait Monoid[A] extends SemiGroup[A]: - def unit: A - object Monoid: - def unit[A](using m: Monoid[A]): A = m.unit - ... - def reduce[A : Monoid](xs: List[A]): A = - xs.foldLeft(Monoid.unit)(_ `combine` _) -``` -This is all accidental complexity which can be avoided by the following proposal. - -**Proposal:** Allow to name a context bound, like this: -```scala - def reduce[A : Monoid as m](xs: List[A]): A = - xs.foldLeft(m.unit)(_ `combine` _) -``` - -We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. - -**Benefits:** The new syntax is simple and clear. -It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. - -### New Syntax for Aggregate Context Bounds - -Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. - -**Proposal:** Allow to combine several context bounds inside `{...}`, analogous -to import clauses. Example: - -```scala - trait: - def showMax[X : {Ordering, Show}](x: X, y: X): String - class B extends A: - def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = - show.asString(ordering.max(x, y)) -``` - -The old syntax with multiple `:` should be phased out over time. - -**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. - ### Better Default Names for Context Bounds So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: @@ -233,216 +170,7 @@ The default naming convention reduces the need for named context bounds. But nam - They give an explanation what a single unnamed context bound expands to. -### Expansion of Context Bounds - -Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: -```scala -trait Parser[P]: - type Input - type Result -``` -Here is a method `run` that runs a parser on an input of the required type: - -```scala -def run[P : Parser](in: P.Input): P.Result -``` -Or, making clearer what happens by using an explicit name for the context bound: -```scala -def run[P : Parser as p](in: p.Input): p.Result -``` -With the current translation this does not work since it would be expanded to: -```scala - def run[P](x: p.Input)(using p: Parser[P]): p.Result -``` -Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. - -This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: - - 1. It would be a binary-incompatible change. - 2. Putting using clauses earlier can impair type inference. A type in - a using clause can be constrained by term arguments coming before that - clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. - -But there is an alternative which is feasible: - -**Proposal:** Map the context bounds of a method or class as follows: - - 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. - 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. - 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. - -Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. - -**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. - -Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows -references to these parameters to be precise, so that information about dependent type members is preserved. - - -## Context Bounds for Type Members - -It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. - -**Proposal**: Allow context bounds for type members. Example: - -```scala - class Collection: - type Element : Ord -``` - -The question is how these bounds are expanded. Context bounds on type parameters -are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. - -**Proposal:** Introduce a new way to implement a given definition in a trait like this: -```scala -given T = deferred -``` -`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. - -Deferred givens allow a clean implementation of context bounds in traits, -as in the following example: -```scala -trait Sorted: - type Element : Ord - -class SortedSet[A : Ord] extends Sorted: - type Element = A -``` -The compiler expands this to the following implementation: -```scala -trait Sorted: - type Element - given Ord[Element] = compiletime.deferred - -class SortedSet[A](using A: Ord[A]) extends Sorted: - type Element = A - override given Ord[Element] = A // i.e. the A defined by the using clause -``` - -The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. - -**Benefits:** - - - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. - - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. - -**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. -It is a concrete definition where the compiler will provide the correct implementation. - -## New Given Syntax - -A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. - -When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: -```scala -given [A](using Ord[A]): Ord[List[A]] with - def compare(x: List[A], y: List[A]) = ... -``` -The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. - -We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. - -Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. - -Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: -```scala -given String is Ord: - def compare(x: String, y: String) = ... - -given [A : Ord] => List[A] is Ord: - def compare(x: List[A], y: List[A]) = ... - -given Int is Monoid: - extension (x: Int) def combine(y: Int) = x + y - def unit = 0 -``` -Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. - -If explicit names are desired, we add them with `as` clauses: -```scala -given String is Ord as intOrd: - def compare(x: String, y: String) = ... - -given [A : Ord] => List[A] is Ord as listOrd: - def compare(x: List[A], y: List[A]) = ... - -given Int is Monoid as intMonoid: - extension (x: Int) def combine(y: Int) = x + y - def unit = 0 -``` - -The underlying principles are: - - - A `given` clause consists of the following elements: - - - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, - - the implemented _type_, - - an optional name binding using `as`, - - an implementation which consists of either an `=` and an expression, - or a template body. - - - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. - -This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s -in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. - -Changing something introduced just recently in Scala 3 is not fun, -but I believe these adjustments are preferable to let bad syntax -sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code -starts migrating. - -Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. - - -### Abolish Abstract Givens - -Another simplification is possible. So far we have special syntax for abstract givens: -```scala -given x: T -``` -The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance -consider a given that constructs a type tag: -```scala -class Tag[T] -``` -Then this works: -```scala -given Tag[String]() -given Tag[String] with {} -``` -But the following more natural syntax fails: -```scala -given Tag[String] -``` -The last line gives a rather cryptic error: -``` -1 |given Tag[String] - | ^ - | anonymous given cannot be abstract -``` -The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a -`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. - -Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. - -**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. - -This is less of a disruption than it might appear at first: - - - `given T` was illegal before since abstract givens could not be anonymous. - It now means a concrete given of class `T` with no member definitions. - - `given x: T` is legacy syntax for an abstract given. - - `given T as x = deferred` is the analogous new syntax, which is more powerful since - it allows for automatic instantiation. - - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. - -**Benefits:** - - - Simplification of the language since a feature is dropped - - Eliminate non-obvious and misleading syntax. - - -### Bonus: Fixing Singleton +## Fixing Singleton We know the current treatment of `Singleton` as a type bound is broken since `x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. @@ -464,7 +192,7 @@ def f[X: Singleton](x: X) = ... The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). -### Bonus: Precise Typing +##: Precise Typing This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: @@ -474,28 +202,6 @@ def f[X: Precise](x: X) = ... Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. -## Summary of Syntax Changes - -Here is the complete context-free syntax for all proposed features. -Overall the syntax for givens becomes a lot simpler than what it was before. - -``` -TmplDef ::= 'given' GivenDef -GivenDef ::= [GivenConditional '=>'] GivenSig -GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} -GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) - | ConstrApps ['as' id] TemplateBody -GivenType ::= AnnotType {id [nl] AnnotType} - -TypeDef ::= id [TypeParamClause] TypeAndCtxBounds -TypeParamBounds ::= TypeAndCtxBounds -TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] -ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' -ContextBound ::= Type ['as' id] -``` - - - ## Examples @@ -586,7 +292,7 @@ Here are some standard type classes, which were mostly already introduced at the def maximum[T: Ord](xs: List[T]): T = xs.reduce(_ `max` _) - given [T: Ord] => T is Ord as descending: + given descending: [T: Ord] => T is Ord: extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) def minimum[T: Ord](xs: List[T]) = @@ -766,17 +472,12 @@ Pattern2 ::= InfixPattern ['as' id] ## Summary -I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: +I have proposed some tweaks to Scala 3, which would increase its usability for modular, type class based, generic programming. The proposed changes are: 1. Allow context bounds over classes that define a `Self` member type. - 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. - 1. Add a new `{...}` syntax for multiple context bounds. - 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. 1. Add a predefined type alias `is`. - 1. Introduce a new cleaner syntax of given clauses. - -It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, -alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + 1. If a type parameter or member `T` has context bound `CB`, use `T` as the default name for the witness of `CB`. + 1. Cleanup `Singleton` and add a new trait `Precise` for non-widening instantiation of type variables., ## Conclusion diff --git a/docs/_docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md index 27a0a2c1bdcb..261f9002e1e6 100644 --- a/docs/_docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -121,7 +121,7 @@ Finally, the object defines `valueOfConstant` (and `valueOfTuple`) which can tra ```scala object Type: - given of[T <: AnyKind](using Quotes): Type[T] = ... + given of: [T <: AnyKind] => Quotes => Type[T] = ... def show[T <: AnyKind](using Type[T])(using Quotes): String = ... def valueOfConstant[T](using Type[T])(using Quotes): Option[T] = ... def valueOfTuple[T <: Tuple](using Type[T])(using Quotes): Option[T] = ... diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md index b63616185285..43eb5b733aeb 100644 --- a/docs/_docs/reference/metaprogramming/macros.md +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -100,7 +100,7 @@ We can implement a `ToExpr` using a `given` definition that will add the definit In the following example we show how to implement a `ToExpr[Option[T]]` for any liftable type `T. ```scala -given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with +given OptionToExpr: [T: {Type, ToExpr}] => ToExpr[Option[T]]: def apply(opt: Option[T])(using Quotes): Expr[Option[T]] = opt match case Some(x) => '{ Some[T]( ${Expr(x)} ) } @@ -420,7 +420,7 @@ These value extraction sub-patterns can be polymorphic using an instance of `Fro In the following example, we show the implementation of `OptionFromExpr` which internally uses the `FromExpr[T]` to extract the value using the `Expr(x)` pattern. ```scala -given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with +given OptionFromExpr: [T: {Type, FromExpr}] => FromExpr[Option[T]]: def unapply(x: Expr[Option[T]])(using Quotes): Option[Option[T]] = x match case '{ Some( ${Expr(x)} ) } => Some(Some(x)) diff --git a/docs/_docs/reference/metaprogramming/reflection.md b/docs/_docs/reference/metaprogramming/reflection.md index 68cb7dafcfbb..65ae2f733b7a 100644 --- a/docs/_docs/reference/metaprogramming/reflection.md +++ b/docs/_docs/reference/metaprogramming/reflection.md @@ -82,7 +82,8 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.jpath.toString + val jpath = pos.sourceFile.getJPath.getOrElse(report.errorAndAbort("virtual file not supported", pos)) + val path = pos.sourceFile.path // fallback for a virtual file val start = pos.start val end = pos.end val startLine = pos.startLine diff --git a/docs/_docs/reference/experimental/generalized-method-syntax.md b/docs/_docs/reference/other-new-features/generalized-method-syntax.md similarity index 92% rename from docs/_docs/reference/experimental/generalized-method-syntax.md rename to docs/_docs/reference/other-new-features/generalized-method-syntax.md index 072052c1ae10..2dd537cacdd8 100644 --- a/docs/_docs/reference/experimental/generalized-method-syntax.md +++ b/docs/_docs/reference/other-new-features/generalized-method-syntax.md @@ -1,15 +1,9 @@ --- layout: doc-page title: "Generalized Method Syntax" -nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/generalized-method-syntax.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/generalized-method-syntax.html --- -This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: - -```scala -import scala.language.experimental.clauseInterleaving -``` - The inclusion of using clauses is not the only way in which methods have been updated, type parameter clauses are now allowed in any number and at any position. ## Syntax Changes @@ -51,7 +45,7 @@ trait DB { } ``` -Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: +Note that simply replacing `V` by `k.Value` would not be equivalent. For example, if `k.Value` is `Some[Int]`, only the above allows: `getOrElse(k)[Option[Int]](None)`, which returns a `Number`. ## Details diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 66cf5a18fac9..0f78ff03583e 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -60,9 +60,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] @@ -177,12 +178,11 @@ ClassQualifier ::= ‘[’ id ‘]’ ### Types ``` Type ::= FunType - | HkTypeParamClause ‘=>>’ Type - | FunParamClause ‘=>>’ Type + | TypTypeParamClause ‘=>>’ Type | MatchType | InfixType FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - | HKTypeParamClause '=>' Type + | TypTypeParamClause '=>' Type FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause @@ -214,17 +214,21 @@ ParamValueType ::= Type [‘*’] TypeArgs ::= ‘[’ Types ‘]’ Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> TypeBounds ::= [‘>:’ Type] [‘<:’ Type] -TypeParamBounds ::= TypeBounds {‘:’ Type} +TypeAndCtxBounds ::= TypeBounds [':' ContextBounds] +ContextBounds ::= ContextBound + | ContextBound ':' ContextBounds -- to be deprecated + | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} ``` ### Expressions ``` Expr ::= FunParams (‘=>’ | ‘?=>’) Expr - | HkTypeParamClause ‘=>’ Expr + | TypTypeParamClause ‘=>’ Expr | Expr1 BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block - | HkTypeParamClause ‘=>’ Block + | TypTypeParamClause ‘=>’ Block | Expr1 FunParams ::= Bindings | id @@ -272,7 +276,7 @@ SimpleExpr ::= SimpleRef ColonArgument ::= colon [LambdaStart] indent (CaseClauses | Block) outdent LambdaStart ::= FunParams (‘=>’ | ‘?=>’) - | HkTypeParamClause ‘=>’ + | TypTypeParamClause ‘=>’ Quoted ::= ‘'’ ‘{’ Block ‘}’ | ‘'’ ‘[’ TypeBlock ‘]’ ExprSplice ::= spliceId -- if inside quoted block @@ -338,13 +342,16 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ### Type and Value Parameters ``` ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ -ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeAndCtxBounds + +DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ -TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds +TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeBounds HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ -HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) TypeBounds +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [HkTypeParamClause] TypeBounds ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ @@ -360,8 +367,6 @@ TypelessClauses ::= TypelessClause {TypelessClause} TypelessClause ::= DefTermParamClause | UsingParamClause -DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -430,22 +435,35 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [HkTypeParamClause] {FunParamClause}TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef | ‘enum’ EnumDef - | ‘given’ GivenDef + | 'given' (GivenDef | OldGivenDef) ClassDef ::= id ClassConstr [Template] ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present -GivenType ::= AnnotType {id [nl] AnnotType} + +GivenDef ::= [id ':'] GivenSig +GivenSig ::= GivenImpl + | '(' ')' '=>' GivenImpl + | GivenConditional '=>' GivenSig +GivenImpl ::= GivenType ([‘=’ Expr] | TemplateBody) + | ConstrApps TemplateBody +GivenConditional ::= DefTypeParamClause + | DefTermParamClause + | '(' FunArgTypes ')' + | GivenType +GivenType ::= AnnotType1 {id [nl] AnnotType1} + +OldGivenDef ::= [OldGivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -- syntax up to Scala 3.5, to be deprecated in the future +OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_spec/06-expressions.md b/docs/_spec/06-expressions.md index 5043e752ebe6..a633c30e0e4b 100644 --- a/docs/_spec/06-expressions.md +++ b/docs/_spec/06-expressions.md @@ -729,8 +729,9 @@ A _for loop_ `for (´\mathit{enums}\,´) ´e´` executes expression ´e´ for ea A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value definitions, or guards. -A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is matched in some way against pattern ´p´. -Optionally, `case` can appear in front of a generator pattern, this has no meaning in Scala 2 but will be [required in Scala 3 if `p` is not irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is deconstructed by the pattern ´p´. +The pattern must be [irrefutable](08-pattern-matching.html#irrefutable-patterns). +A _conditional generator_ `case ´p´ <- ´e´` tests whether elements produced by ´e´ match the pattern and discards the ones that do not match. A _value definition_ `´p´ = ´e´` binds the value name ´p´ (or several names in a pattern ´p´) to the result of evaluating the expression ´e´. A _guard_ `if ´e´` contains a boolean expression which restricts enumerated bindings. @@ -738,7 +739,7 @@ The precise meaning of generators and guards is defined by translation to invoca These methods can be implemented in different ways for different carrier types. The translation scheme is as follows. -In a first step, every generator `´p´ <- ´e´`, where ´p´ is not [irrefutable](08-pattern-matching.html#patterns) for the type of ´e´ is replaced by +In a first step, every generator `case ´p´ <- ´e´` is replaced by ```scala ´p´ <- ´e´.withFilter { case ´p´ => true; case _ => false } @@ -772,7 +773,7 @@ Then, the following rules are applied repeatedly until all comprehensions have b ´e´.foreach { case ´p´ => for (´p'´ <- ´e'; ...´) ´e''´ } ``` - - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter((´x_1, ..., x_n´) => ´g\,´)` where ´x_1, ..., x_n´ are the free variables of ´p´. + - A generator `´p´ <- ´e´` followed by a guard `if ´g´` is translated to a single generator `´p´ <- ´e´.withFilter({ case ´p´ => ´g\,´ })`. - A generator `´p´ <- ´e´` followed by a value definition `´p'´ = ´e'´` is translated to the following generator of pairs of values, where ´x´ and ´x'´ are fresh names: diff --git a/docs/_spec/13-syntax-summary.md b/docs/_spec/13-syntax-summary.md index 2dc971fc9840..f02b2210bb1d 100644 --- a/docs/_spec/13-syntax-summary.md +++ b/docs/_spec/13-syntax-summary.md @@ -49,9 +49,10 @@ idrest ::= {letter | digit} [‘_’ op] quoteId ::= ‘'’ alphaid spliceId ::= ‘$’ alphaid ; -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] floatingPointLiteral ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] diff --git a/docs/_spec/Gemfile b/docs/_spec/Gemfile index ec15529ceb37..dcb5701fd8e1 100644 --- a/docs/_spec/Gemfile +++ b/docs/_spec/Gemfile @@ -6,4 +6,4 @@ gem "jekyll", "3.6.3" gem "webrick" gem "rouge" # gem 's3_website' -gem "redcarpet", "3.5.1" +gem "redcarpet", "3.6.0" diff --git a/docs/_spec/Gemfile.lock b/docs/_spec/Gemfile.lock index 48efd373725e..c703a87bf993 100644 --- a/docs/_spec/Gemfile.lock +++ b/docs/_spec/Gemfile.lock @@ -33,7 +33,7 @@ GEM rb-fsevent (0.11.2) rb-inotify (0.10.1) ffi (~> 1.0) - redcarpet (3.5.1) + redcarpet (3.6.0) rouge (2.2.1) safe_yaml (1.0.5) sass (3.7.4) @@ -41,7 +41,7 @@ GEM sass-listen (4.0.0) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) - webrick (1.7.0) + webrick (1.8.2) PLATFORMS ruby @@ -49,7 +49,7 @@ PLATFORMS DEPENDENCIES jekyll (= 3.6.3) - redcarpet (= 3.5.1) + redcarpet (= 3.6.0) rouge webrick diff --git a/docs/_spec/TODOreference/metaprogramming/reflection.md b/docs/_spec/TODOreference/metaprogramming/reflection.md index b2d492657a4e..2af1d04d1b32 100644 --- a/docs/_spec/TODOreference/metaprogramming/reflection.md +++ b/docs/_spec/TODOreference/metaprogramming/reflection.md @@ -82,7 +82,8 @@ def macroImpl()(quotes: Quotes): Expr[Unit] = import quotes.reflect.* val pos = Position.ofMacroExpansion - val path = pos.sourceFile.jpath.toString + val jpath = pos.sourceFile.getJPath.getOrElse(report.errorAndAbort("virtual file not supported", pos)) + val path = pos.sourceFile.path // fallback for a virtual file val start = pos.start val end = pos.end val startLine = pos.startLine diff --git a/docs/sidebar.yml b/docs/sidebar.yml index efdab80595a6..a306d8bdf274 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -34,7 +34,11 @@ subsection: - page: reference/contextual/givens.md - page: reference/contextual/using-clauses.md - page: reference/contextual/context-bounds.md + - page: reference/contextual/deferred-givens.md - page: reference/contextual/given-imports.md + - page: reference/contextual/more-givens.md + - page: reference/contextual/previous-givens.md + hidden: true - page: reference/contextual/extension-methods.md - page: reference/contextual/right-associative-extension-methods.md - page: reference/contextual/type-classes.md @@ -157,6 +161,8 @@ subsection: - page: reference/experimental/named-tuples.md - page: reference/experimental/modularity.md - page: reference/experimental/typeclasses.md + - page: reference/experimental/runtimeChecked.md + - page: reference/experimental/better-fors.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index d64bb44c1a5d..38deb4c40c0f 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -987,7 +987,7 @@ class CompletionTest { @Test def importAnnotationAfterImport : Unit = code"""import java.lang.annotation; import annot${m1}""" - .completion(("annotation", Module, "scala.annotation")) + .completion(("annotation", Module, "java.lang.annotation")) @Test def completeTemplateConstrArgType: Unit = { code"""import scala.concurrent.Future @@ -1028,6 +1028,7 @@ class CompletionTest { ("ensuring", Method, "(cond: Boolean): Foo.Bar.type"), ("##", Method, "=> Int"), ("nn", Method, "=> Foo.Bar.type"), + ("runtimeChecked", Method, "=> Foo.Bar.type"), ("==", Method, "(x$0: Any): Boolean"), ("ensuring", Method, "(cond: Boolean, msg: => Any): Foo.Bar.type"), ("ne", Method, "(x$0: Object): Boolean"), @@ -1723,4 +1724,14 @@ class CompletionTest { .completion(m5, Set()) .completion(m6, Set()) + @Test def namedTupleCompletion: Unit = + code"""|import scala.language.experimental.namedTuples + | + |val person: (name: String, city: String) = + | (name = "Jamie", city = "Lausanne") + | + |val n = person.na$m1 + |""" + .completion(m1, Set(("name", Field, "String"))) + } diff --git a/language-server/test/dotty/tools/languageserver/HoverTest.scala b/language-server/test/dotty/tools/languageserver/HoverTest.scala index a2196f4a71f3..91f72e222432 100644 --- a/language-server/test/dotty/tools/languageserver/HoverTest.scala +++ b/language-server/test/dotty/tools/languageserver/HoverTest.scala @@ -227,7 +227,7 @@ class HoverTest { @Test def enums: Unit = { code"""|package example |enum TestEnum3: - | case ${m1}A${m2} // no tooltip + | case ${m1}A${m2} // no tooltip | |""" .hover(m1 to m2, hoverContent("example.TestEnum3")) diff --git a/library-aux/src/scala/AnyKind.scala b/library-aux/src/scala/AnyKind.scala new file mode 100644 index 000000000000..56d51be114ea --- /dev/null +++ b/library-aux/src/scala/AnyKind.scala @@ -0,0 +1,7 @@ +package scala + +/** The super-type of all types. + * + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. + */ +final abstract class AnyKind diff --git a/library-aux/src/scala/Matchable.scala b/library-aux/src/scala/Matchable.scala new file mode 100644 index 000000000000..598ded9d3bc3 --- /dev/null +++ b/library-aux/src/scala/Matchable.scala @@ -0,0 +1,7 @@ +package scala + +/** The base trait of types that can be safely pattern matched against. + * + * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. + */ +trait Matchable diff --git a/library-aux/src/scala/andType.scala b/library-aux/src/scala/andType.scala new file mode 100644 index 000000000000..de3c3ff36bc5 --- /dev/null +++ b/library-aux/src/scala/andType.scala @@ -0,0 +1,7 @@ +package scala + +/** The intersection of two types. + * + * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. + */ +type &[A, B] diff --git a/library-aux/src/scala/orType.scala b/library-aux/src/scala/orType.scala new file mode 100644 index 000000000000..ff1947a9498e --- /dev/null +++ b/library-aux/src/scala/orType.scala @@ -0,0 +1,7 @@ +package scala + +/** The union of two types. + * + * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. + */ +type |[A, B] diff --git a/library/src-non-bootstrapped/scala/annotation/experimental.scala b/library/src-non-bootstrapped/scala/annotation/experimental.scala deleted file mode 100644 index e879b47e12ff..000000000000 --- a/library/src-non-bootstrapped/scala/annotation/experimental.scala +++ /dev/null @@ -1,3 +0,0 @@ -package scala.annotation - -final class experimental extends StaticAnnotation diff --git a/library/src/scala/CanThrow.scala b/library/src/scala/CanThrow.scala index c7f23a393715..91c94229c43c 100644 --- a/library/src/scala/CanThrow.scala +++ b/library/src/scala/CanThrow.scala @@ -6,9 +6,9 @@ import annotation.{implicitNotFound, experimental, capability} * experimental.saferExceptions feature, a `throw Ex()` expression will require * a given of class `CanThrow[Ex]` to be available. */ -@experimental @capability +@experimental @implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - Adding a using clause `(using CanThrow[${E}])` to the definition of the enclosing method\n - Adding `throws ${E}` clause after the result type of the enclosing method\n - Wrapping this piece of code with a `try` block that catches ${E}") -erased class CanThrow[-E <: Exception] +erased class CanThrow[-E <: Exception] extends caps.Capability @experimental object unsafeExceptions: diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala index dc6e6c3144f6..6da7f940dc47 100644 --- a/library/src/scala/NamedTuple.scala +++ b/library/src/scala/NamedTuple.scala @@ -19,103 +19,33 @@ object NamedTuple: def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) + /** A named tuple expression will desugar to a call to `build`. For instance, + * `(name = "Lyra", age = 23)` will desugar to `build[("name", "age")]()(("Lyra", 23))`. + */ + inline def build[N <: Tuple]()[V <: Tuple](x: V): NamedTuple[N, V] = x + extension [V <: Tuple](x: V) inline def withNames[N <: Tuple]: NamedTuple[N, V] = x - export NamedTupleDecomposition.{Names, DropNames} + import NamedTupleDecomposition.{Names, DropNames} + export NamedTupleDecomposition.{ + Names, DropNames, + apply, size, init, head, last, tail, take, drop, splitAt, ++, map, reverse, zip, toList, toArray, toIArray + } extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + // ALL METHODS DEPENDING ON `toTuple` MUST BE EXPORTED FROM `NamedTupleDecomposition` /** The underlying tuple without the names */ inline def toTuple: V = x - /** The number of elements in this tuple */ - inline def size: Tuple.Size[V] = toTuple.size - // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea // in the end. - /** The value (without the name) at index `n` of this tuple */ - inline def apply(n: Int): Tuple.Elem[V, n.type] = - inline toTuple match - case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] - case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] - - /** The first element value of this tuple */ - inline def head: Tuple.Elem[V, 0] = apply(0) - - /** The tuple consisting of all elements of this tuple except the first one */ - inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = - toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] - - /** The last element value of this tuple */ - inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] - - /** The tuple consisting of all elements of this tuple except the last one */ - inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = - toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] - - /** The tuple consisting of the first `n` elements of this tuple, or all - * elements if `n` exceeds `size`. - */ - inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = - toTuple.take(n) - - /** The tuple consisting of all elements of this tuple except the first `n` ones, - * or no elements if `n` exceeds `size`. - */ - inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = - toTuple.drop(n) - - /** The tuple `(x.take(n), x.drop(n))` */ - inline def splitAt(n: Int): - (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], - NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = - // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but - // we get a type error then. Similar for other methods here. - toTuple.splitAt(n) - - /** The tuple consisting of all elements of this tuple followed by all elements - * of tuple `that`. The names of the two tuples must be disjoint. - */ - inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) - : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] - = toTuple ++ that.toTuple - // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? - /** The named tuple consisting of all element values of this tuple mapped by - * the polymorphic mapping function `f`. The names of elements are preserved. - * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. - */ - inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = - toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] - - /** The named tuple consisting of all elements of this tuple in reverse */ - inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = - toTuple.reverse - - /** The named tuple consisting of all elements values of this tuple zipped - * with corresponding element values in named tuple `that`. - * If the two tuples have different sizes, - * the extra elements of the larger tuple will be disregarded. - * The names of `x` and `that` at the same index must be the same. - * The result tuple keeps the same names as the operand tuples. - */ - inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = - toTuple.zip(that.toTuple) - - /** A list consisting of all element values */ - inline def toList: List[Tuple.Union[V]] = toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] - - /** An array consisting of all element values */ - inline def toArray: Array[Object] = toTuple.toArray - - /** An immutable array consisting of all element values */ - inline def toIArray: IArray[Object] = toTuple.toIArray - end extension /** The size of a named tuple, represented as a literal constant subtype of Int */ @@ -189,16 +119,16 @@ object NamedTuple: NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] /** A type specially treated by the compiler to represent all fields of a - * class argument `T` as a named tuple. Or, if `T` is already a named tyuple, + * class argument `T` as a named tuple. Or, if `T` is already a named tuple, * `From[T]` is the same as `T`. */ type From[T] <: AnyNamedTuple /** The type of the empty named tuple */ - type Empty = EmptyTuple.type + type Empty = NamedTuple[EmptyTuple, EmptyTuple] /** The empty named tuple */ - val Empty: Empty = EmptyTuple.asInstanceOf[Empty] + val Empty: Empty = EmptyTuple end NamedTuple @@ -206,6 +136,79 @@ end NamedTuple @experimental object NamedTupleDecomposition: import NamedTuple.* + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + /** The value (without the name) at index `n` of this tuple */ + inline def apply(n: Int): Elem[NamedTuple[N, V], n.type] = + inline x.toTuple match + case tup: NonEmptyTuple => tup(n).asInstanceOf[Elem[NamedTuple[N, V], n.type]] + case tup => tup.productElement(n).asInstanceOf[Elem[NamedTuple[N, V], n.type]] + + /** The number of elements in this tuple */ + inline def size: Size[NamedTuple[N, V]] = x.toTuple.size + + /** The first element value of this tuple */ + inline def head: Head[NamedTuple[N, V]] = apply(0) + + /** The last element value of this tuple */ + inline def last: Last[NamedTuple[N, V]] = apply(size - 1).asInstanceOf[Last[NamedTuple[N, V]]] + + /** The tuple consisting of all elements of this tuple except the last one */ + inline def init: Init[NamedTuple[N, V]] = + x.take(size - 1).asInstanceOf[Init[NamedTuple[N, V]]] + + /** The tuple consisting of all elements of this tuple except the first one */ + inline def tail: Tail[NamedTuple[N, V]] = x.toTuple.drop(1) + + /** The tuple consisting of the first `n` elements of this tuple, or all + * elements if `n` exceeds `size`. + */ + inline def take(n: Int): Take[NamedTuple[N, V], n.type] = x.toTuple.take(n) + + /** The tuple consisting of all elements of this tuple except the first `n` ones, + * or no elements if `n` exceeds `size`. + */ + inline def drop(n: Int): Drop[NamedTuple[N, V], n.type] = x.toTuple.drop(n) + + /** The tuple `(x.take(n), x.drop(n))` */ + inline def splitAt(n: Int): Split[NamedTuple[N, V], n.type] = x.toTuple.splitAt(n) + + /** The tuple consisting of all elements of this tuple followed by all elements + * of tuple `that`. The names of the two tuples must be disjoint. + */ + inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) + : Concat[NamedTuple[N, V], NamedTuple[N2, V2]] + = x.toTuple ++ that.toTuple + + /** The named tuple consisting of all element values of this tuple mapped by + * the polymorphic mapping function `f`. The names of elements are preserved. + * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. + */ + inline def map[F[_]](f: [t] => t => F[t]): Map[NamedTuple[N, V], F] = + x.toTuple.map[F](f) + + /** The named tuple consisting of all elements of this tuple in reverse */ + inline def reverse: Reverse[NamedTuple[N, V]] = x.toTuple.reverse + + /** The named tuple consisting of all element values of this tuple zipped + * with corresponding element values in named tuple `that`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `x` and `that` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + */ + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): Zip[NamedTuple[N, V], NamedTuple[N, V2]] = + x.toTuple.zip(that.toTuple) + + /** A list consisting of all element values */ + inline def toList: List[Tuple.Union[V]] = x.toTuple.toList + + /** An array consisting of all element values */ + inline def toArray: Array[Object] = x.toTuple.toArray + + /** An immutable array consisting of all element values */ + inline def toIArray: IArray[Object] = x.toTuple.toIArray + + end extension /** The names of a named tuple, represented as a tuple of literal string values. */ type Names[X <: AnyNamedTuple] <: Tuple = X match @@ -214,4 +217,3 @@ object NamedTupleDecomposition: /** The value types of a named tuple represented as a regular tuple. */ type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match case NamedTuple[_, x] => x - diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 8074fe3664e5..d07f2c89e004 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -22,8 +22,8 @@ sealed trait Tuple extends Product { runtime.Tuples.toIArray(this) /** Return a copy of `this` tuple with an element appended */ - inline def :* [This >: this.type <: Tuple, L] (x: L): Append[This, L] = - runtime.Tuples.append(x, this).asInstanceOf[Append[This, L]] + inline def :* [This >: this.type <: Tuple, L] (x: L): This :* L = + runtime.Tuples.append(x, this).asInstanceOf[This :* L] /** Return a new tuple by prepending the element to `this` tuple. * This operation is O(this.size) @@ -31,11 +31,35 @@ sealed trait Tuple extends Product { inline def *: [H, This >: this.type <: Tuple] (x: H): H *: This = runtime.Tuples.cons(x, this).asInstanceOf[H *: This] + /** Get the i-th element of this tuple. + * Equivalent to productElement but with a precise return type. + */ + inline def apply[This >: this.type <: Tuple](n: Int): Elem[This, n.type] = + runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] + + /** Get the head of this tuple */ + inline def head[This >: this.type <: Tuple]: Head[This] = + runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] + + /** Get the initial part of the tuple without its last element */ + inline def init[This >: this.type <: Tuple]: Init[This] = + runtime.Tuples.init(this).asInstanceOf[Init[This]] + + /** Get the last of this tuple */ + inline def last[This >: this.type <: Tuple]: Last[This] = + runtime.Tuples.last(this).asInstanceOf[Last[This]] + + /** Get the tail of this tuple. + * This operation is O(this.size) + */ + inline def tail[This >: this.type <: Tuple]: Tail[This] = + runtime.Tuples.tail(this).asInstanceOf[Tail[This]] + /** Return a new tuple by concatenating `this` tuple with `that` tuple. * This operation is O(this.size + that.size) */ - inline def ++ [This >: this.type <: Tuple](that: Tuple): Concat[This, that.type] = - runtime.Tuples.concat(this, that).asInstanceOf[Concat[This, that.type]] + inline def ++ [This >: this.type <: Tuple](that: Tuple): This ++ that.type = + runtime.Tuples.concat(this, that).asInstanceOf[This ++ that.type] /** Return the size (or arity) of the tuple */ inline def size[This >: this.type <: Tuple]: Size[This] = @@ -94,6 +118,9 @@ object Tuple { case x *: xs => x *: Append[xs, Y] } + /** An infix shorthand for `Append[X, Y]` */ + infix type :*[X <: Tuple, Y] = Append[X, Y] + /** Type of the head of a tuple */ type Head[X <: Tuple] = X match { case x *: _ => x @@ -123,6 +150,9 @@ object Tuple { case x1 *: xs1 => x1 *: Concat[xs1, Y] } + /** An infix shorthand for `Concat[X, Y]` */ + infix type ++[X <: Tuple, +Y <: Tuple] = Concat[X, Y] + /** Type of the element at position N in the tuple X */ type Elem[X <: Tuple, N <: Int] = X match { case x *: xs => @@ -166,7 +196,7 @@ object Tuple { * ``` * @syntax markdown */ - type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match { + type Filter[Tup <: Tuple, P[_ <: Union[Tup]] <: Boolean] <: Tuple = Tup match { case EmptyTuple => EmptyTuple case h *: t => P[h] match { case true => h *: Filter[t, P] @@ -175,15 +205,12 @@ object Tuple { } /** Given two tuples, `A1 *: ... *: An * At` and `B1 *: ... *: Bn *: Bt` - * where at least one of `At` or `Bt` is `EmptyTuple` or `Tuple`, - * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: Ct` - * where `Ct` is `EmptyTuple` if `At` or `Bt` is `EmptyTuple`, otherwise `Ct` is `Tuple`. + * where at least one of `At` or `Bt` is `EmptyTuple`, + * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: EmptyTuple`. */ type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match { case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] - case (EmptyTuple, _) => EmptyTuple - case (_, EmptyTuple) => EmptyTuple - case _ => Tuple + case _ => EmptyTuple } /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ @@ -304,33 +331,7 @@ case object EmptyTuple extends Tuple { } /** Tuple of arbitrary non-zero arity */ -sealed trait NonEmptyTuple extends Tuple { - import Tuple.* - - /** Get the i-th element of this tuple. - * Equivalent to productElement but with a precise return type. - */ - inline def apply[This >: this.type <: NonEmptyTuple](n: Int): Elem[This, n.type] = - runtime.Tuples.apply(this, n).asInstanceOf[Elem[This, n.type]] - - /** Get the head of this tuple */ - inline def head[This >: this.type <: NonEmptyTuple]: Head[This] = - runtime.Tuples.apply(this, 0).asInstanceOf[Head[This]] - - /** Get the initial part of the tuple without its last element */ - inline def init[This >: this.type <: NonEmptyTuple]: Init[This] = - runtime.Tuples.init(this).asInstanceOf[Init[This]] - - /** Get the last of this tuple */ - inline def last[This >: this.type <: NonEmptyTuple]: Last[This] = - runtime.Tuples.last(this).asInstanceOf[Last[This]] - - /** Get the tail of this tuple. - * This operation is O(this.size) - */ - inline def tail[This >: this.type <: NonEmptyTuple]: Tail[This] = - runtime.Tuples.tail(this).asInstanceOf[Tail[This]] -} +sealed trait NonEmptyTuple extends Tuple @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple diff --git a/library/src/scala/annotation/capability.scala b/library/src/scala/annotation/capability.scala index 4696ed6a015e..d3453e3c8168 100644 --- a/library/src/scala/annotation/capability.scala +++ b/library/src/scala/annotation/capability.scala @@ -11,4 +11,6 @@ import annotation.experimental * THere, the capture set of any instance of `CanThrow` is assumed to be * `{*}`. */ -@experimental final class capability extends StaticAnnotation +@experimental +@deprecated("To make a class a capability, let it derive from the `Capability` trait instead") +final class capability extends StaticAnnotation diff --git a/library/src-bootstrapped/scala/annotation/experimental.scala b/library/src/scala/annotation/experimental.scala similarity index 100% rename from library/src-bootstrapped/scala/annotation/experimental.scala rename to library/src/scala/annotation/experimental.scala diff --git a/library/src/scala/annotation/internal/RuntimeChecked.scala b/library/src/scala/annotation/internal/RuntimeChecked.scala new file mode 100644 index 000000000000..d2106d720156 --- /dev/null +++ b/library/src/scala/annotation/internal/RuntimeChecked.scala @@ -0,0 +1,11 @@ +package scala.annotation.internal + +import scala.annotation.Annotation +import scala.annotation.experimental + +/**An annotation marking an intention that all checks on a value can be reliably performed at runtime. + * + * The compiler will remove certain static checks except those that can't be performed at runtime. + */ +@experimental +final class RuntimeChecked() extends Annotation diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala index c7fc8e7ba584..9911ef920116 100644 --- a/library/src/scala/caps.scala +++ b/library/src/scala/caps.scala @@ -1,19 +1,39 @@ package scala -import annotation.experimental +import annotation.{experimental, compileTimeOnly, retainsCap} @experimental object caps: - class Cap // should be @erased + trait Capability extends Any + + /** The universal capture reference */ + val cap: Capability = new Capability() {} /** The universal capture reference (deprecated) */ @deprecated("Use `cap` instead") - val `*`: Cap = cap + val `*`: Capability = cap - /** The universal capture reference */ - val cap: Cap = Cap() + @deprecated("Use `Capability` instead") + type Cap = Capability + + /** Carrier trait for capture set type parameters */ + trait CapSet extends Any - given Cap = cap + /** A type constraint expressing that the capture set `C` needs to contain + * the capability `R` + */ + sealed trait Contains[C <: CapSet @retainsCap, R <: Singleton] + + /** The only implementation of `Contains`. The constraint that `{R} <: C` is + * added separately by the capture checker. + */ + given containsImpl[C <: CapSet @retainsCap, R <: Singleton]: Contains[C, R]() + + /** A wrapper indicating a type variable in a capture argument list of a + * @retains annotation. E.g. `^{x, Y^}` is represented as `@retains(x, capsOf[Y])`. + */ + @compileTimeOnly("Should be be used only internally by the Scala compiler") + def capsOf[CS]: Any = ??? /** Reach capabilities x* which appear as terms in @retains annotations are encoded * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets @@ -21,6 +41,22 @@ import annotation.experimental */ extension (x: Any) def reachCapability: Any = x + /** A trait to allow expressing existential types such as + * + * (x: Exists) => A ->{x} B + */ + sealed trait Exists extends Capability + + /** This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ + final class untrackedCaptures extends annotation.StaticAnnotation + + /** This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ + final class unbox extends annotation.StaticAnnotation + object unsafe: extension [T](x: T) @@ -31,22 +67,19 @@ import annotation.experimental def unsafeAssumePure: T = x /** If argument is of type `cs T`, converts to type `box cs T`. This - * avoids the error that would be raised when boxing `*`. + * avoids the error that would be raised when boxing `cap`. */ - @deprecated(since = "3.3") def unsafeBox: T = x /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + * avoids the error that would be raised when unboxing `cap`. */ - @deprecated(since = "3.3") def unsafeUnbox: T = x extension [T, U](f: T => U) /** If argument is of type `box cs T`, converts to type `cs T`. This - * avoids the error that would be raised when unboxing `*`. + * avoids the error that would be raised when unboxing `cap`. */ - @deprecated(since = "3.3") def unsafeBoxFunArg: T => U = f end unsafe diff --git a/library/src/scala/compiletime/ops/any.scala b/library/src/scala/compiletime/ops/any.scala index e3f030c33634..b3c1930f9715 100644 --- a/library/src/scala/compiletime/ops/any.scala +++ b/library/src/scala/compiletime/ops/any.scala @@ -13,7 +13,7 @@ object any: * ``` * @syntax markdown */ - type ==[X, Y] <: Boolean + infix type ==[X, Y] <: Boolean /** Inequality comparison of two singleton types. * ```scala @@ -26,7 +26,7 @@ object any: * ``` * @syntax markdown */ - type !=[X, Y] <: Boolean + infix type !=[X, Y] <: Boolean /** Tests if a type is a constant. * ```scala diff --git a/library/src/scala/compiletime/ops/boolean.scala b/library/src/scala/compiletime/ops/boolean.scala index f6a8c3d3b37e..3e1b5650a519 100644 --- a/library/src/scala/compiletime/ops/boolean.scala +++ b/library/src/scala/compiletime/ops/boolean.scala @@ -25,7 +25,7 @@ object boolean: * ``` * @syntax markdown */ - type ^[X <: Boolean, Y <: Boolean] <: Boolean + infix type ^[X <: Boolean, Y <: Boolean] <: Boolean /** Conjunction of two `Boolean` singleton types. * ```scala @@ -37,7 +37,7 @@ object boolean: * ``` * @syntax markdown */ - type &&[X <: Boolean, Y <: Boolean] <: Boolean + infix type &&[X <: Boolean, Y <: Boolean] <: Boolean /** Disjunction of two `Boolean` singleton types. * ```scala @@ -49,4 +49,4 @@ object boolean: * ``` * @syntax markdown */ - type ||[X <: Boolean, Y <: Boolean] <: Boolean + infix type ||[X <: Boolean, Y <: Boolean] <: Boolean diff --git a/library/src/scala/compiletime/ops/double.scala b/library/src/scala/compiletime/ops/double.scala index 0e038904221e..4bb4527f14a9 100644 --- a/library/src/scala/compiletime/ops/double.scala +++ b/library/src/scala/compiletime/ops/double.scala @@ -11,7 +11,7 @@ object double: * ``` * @syntax markdown */ - type +[X <: Double, Y <: Double] <: Double + infix type +[X <: Double, Y <: Double] <: Double /** Subtraction of two `Double` singleton types. * ```scala @@ -22,7 +22,7 @@ object double: * ``` * @syntax markdown */ - type -[X <: Double, Y <: Double] <: Double + infix type -[X <: Double, Y <: Double] <: Double /** Multiplication of two `Double` singleton types. * ```scala @@ -33,7 +33,7 @@ object double: * ``` * @syntax markdown */ - type *[X <: Double, Y <: Double] <: Double + infix type *[X <: Double, Y <: Double] <: Double /** Integer division of two `Double` singleton types. * ```scala @@ -44,7 +44,7 @@ object double: * ``` * @syntax markdown */ - type /[X <: Double, Y <: Double] <: Double + infix type /[X <: Double, Y <: Double] <: Double /** Remainder of the division of `X` by `Y`. * ```scala @@ -55,7 +55,7 @@ object double: * ``` * @syntax markdown */ - type %[X <: Double, Y <: Double] <: Double + infix type %[X <: Double, Y <: Double] <: Double /** Less-than comparison of two `Double` singleton types. * ```scala @@ -67,7 +67,7 @@ object double: * ``` * @syntax markdown */ - type <[X <: Double, Y <: Double] <: Boolean + infix type <[X <: Double, Y <: Double] <: Boolean /** Greater-than comparison of two `Double` singleton types. * ```scala @@ -79,7 +79,7 @@ object double: * ``` * @syntax markdown */ - type >[X <: Double, Y <: Double] <: Boolean + infix type >[X <: Double, Y <: Double] <: Boolean /** Greater-or-equal comparison of two `Double` singleton types. * ```scala @@ -91,7 +91,7 @@ object double: * ``` * @syntax markdown */ - type >=[X <: Double, Y <: Double] <: Boolean + infix type >=[X <: Double, Y <: Double] <: Boolean /** Less-or-equal comparison of two `Double` singleton types. * ```scala @@ -103,7 +103,7 @@ object double: * ``` * @syntax markdown */ - type <=[X <: Double, Y <: Double] <: Boolean + infix type <=[X <: Double, Y <: Double] <: Boolean /** Absolute value of an `Double` singleton type. * ```scala @@ -114,7 +114,7 @@ object double: * ``` * @syntax markdown */ - type Abs[X <: Double] <: Double + infix type Abs[X <: Double] <: Double /** Negation of an `Double` singleton type. * ```scala @@ -181,4 +181,4 @@ object double: * ``` * @syntax markdown */ - type ToFloat[X <: Double] <: Float \ No newline at end of file + type ToFloat[X <: Double] <: Float diff --git a/library/src/scala/compiletime/ops/float.scala b/library/src/scala/compiletime/ops/float.scala index d7be87be3d9c..bd9b5c75f1f1 100644 --- a/library/src/scala/compiletime/ops/float.scala +++ b/library/src/scala/compiletime/ops/float.scala @@ -11,7 +11,7 @@ object float: * ``` * @syntax markdown */ - type +[X <: Float, Y <: Float] <: Float + infix type +[X <: Float, Y <: Float] <: Float /** Subtraction of two `Float` singleton types. * ```scala @@ -22,7 +22,7 @@ object float: * ``` * @syntax markdown */ - type -[X <: Float, Y <: Float] <: Float + infix type -[X <: Float, Y <: Float] <: Float /** Multiplication of two `Float` singleton types. * ```scala @@ -33,7 +33,7 @@ object float: * ``` * @syntax markdown */ - type *[X <: Float, Y <: Float] <: Float + infix type *[X <: Float, Y <: Float] <: Float /** Integer division of two `Float` singleton types. * ```scala @@ -44,7 +44,7 @@ object float: * ``` * @syntax markdown */ - type /[X <: Float, Y <: Float] <: Float + infix type /[X <: Float, Y <: Float] <: Float /** Remainder of the division of `X` by `Y`. * ```scala @@ -55,7 +55,7 @@ object float: * ``` * @syntax markdown */ - type %[X <: Float, Y <: Float] <: Float + infix type %[X <: Float, Y <: Float] <: Float /** Less-than comparison of two `Float` singleton types. * ```scala @@ -67,7 +67,7 @@ object float: * ``` * @syntax markdown */ - type <[X <: Float, Y <: Float] <: Boolean + infix type <[X <: Float, Y <: Float] <: Boolean /** Greater-than comparison of two `Float` singleton types. * ```scala @@ -79,7 +79,7 @@ object float: * ``` * @syntax markdown */ - type >[X <: Float, Y <: Float] <: Boolean + infix type >[X <: Float, Y <: Float] <: Boolean /** Greater-or-equal comparison of two `Float` singleton types. * ```scala @@ -91,7 +91,7 @@ object float: * ``` * @syntax markdown */ - type >=[X <: Float, Y <: Float] <: Boolean + infix type >=[X <: Float, Y <: Float] <: Boolean /** Less-or-equal comparison of two `Float` singleton types. * ```scala @@ -103,7 +103,7 @@ object float: * ``` * @syntax markdown */ - type <=[X <: Float, Y <: Float] <: Boolean + infix type <=[X <: Float, Y <: Float] <: Boolean /** Absolute value of an `Float` singleton type. * ```scala diff --git a/library/src/scala/compiletime/ops/int.scala b/library/src/scala/compiletime/ops/int.scala index ed4a3c3c3261..b8ec370421e2 100644 --- a/library/src/scala/compiletime/ops/int.scala +++ b/library/src/scala/compiletime/ops/int.scala @@ -29,7 +29,7 @@ object int: * ``` * @syntax markdown */ - type +[X <: Int, Y <: Int] <: Int + infix type +[X <: Int, Y <: Int] <: Int /** Subtraction of two `Int` singleton types. * ```scala @@ -40,7 +40,7 @@ object int: * ``` * @syntax markdown */ - type -[X <: Int, Y <: Int] <: Int + infix type -[X <: Int, Y <: Int] <: Int /** Multiplication of two `Int` singleton types. * ```scala @@ -51,7 +51,7 @@ object int: * ``` * @syntax markdown */ - type *[X <: Int, Y <: Int] <: Int + infix type *[X <: Int, Y <: Int] <: Int /** Integer division of two `Int` singleton types. * ```scala @@ -62,7 +62,7 @@ object int: * ``` * @syntax markdown */ - type /[X <: Int, Y <: Int] <: Int + infix type /[X <: Int, Y <: Int] <: Int /** Remainder of the division of `X` by `Y`. * ```scala @@ -73,7 +73,7 @@ object int: * ``` * @syntax markdown */ - type %[X <: Int, Y <: Int] <: Int + infix type %[X <: Int, Y <: Int] <: Int /** Binary left shift of `X` by `Y`. * ```scala @@ -84,7 +84,7 @@ object int: * ``` * @syntax markdown */ - type <<[X <: Int, Y <: Int] <: Int + infix type <<[X <: Int, Y <: Int] <: Int /** Binary right shift of `X` by `Y`. * ```scala @@ -95,7 +95,7 @@ object int: * ``` * @syntax markdown */ - type >>[X <: Int, Y <: Int] <: Int + infix type >>[X <: Int, Y <: Int] <: Int /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala @@ -106,7 +106,7 @@ object int: * ``` * @syntax markdown */ - type >>>[X <: Int, Y <: Int] <: Int + infix type >>>[X <: Int, Y <: Int] <: Int /** Bitwise xor of `X` and `Y`. * ```scala @@ -117,7 +117,7 @@ object int: * ``` * @syntax markdown */ - type ^[X <: Int, Y <: Int] <: Int + infix type ^[X <: Int, Y <: Int] <: Int /** Less-than comparison of two `Int` singleton types. * ```scala @@ -129,7 +129,7 @@ object int: * ``` * @syntax markdown */ - type <[X <: Int, Y <: Int] <: Boolean + infix type <[X <: Int, Y <: Int] <: Boolean /** Greater-than comparison of two `Int` singleton types. * ```scala @@ -141,7 +141,7 @@ object int: * ``` * @syntax markdown */ - type >[X <: Int, Y <: Int] <: Boolean + infix type >[X <: Int, Y <: Int] <: Boolean /** Greater-or-equal comparison of two `Int` singleton types. * ```scala @@ -153,7 +153,7 @@ object int: * ``` * @syntax markdown */ - type >=[X <: Int, Y <: Int] <: Boolean + infix type >=[X <: Int, Y <: Int] <: Boolean /** Less-or-equal comparison of two `Int` singleton types. * ```scala @@ -165,7 +165,7 @@ object int: * ``` * @syntax markdown */ - type <=[X <: Int, Y <: Int] <: Boolean + infix type <=[X <: Int, Y <: Int] <: Boolean /** Bitwise and of `X` and `Y`. * ```scala diff --git a/library/src/scala/compiletime/ops/long.scala b/library/src/scala/compiletime/ops/long.scala index 25563ac70367..3bda31e4979c 100644 --- a/library/src/scala/compiletime/ops/long.scala +++ b/library/src/scala/compiletime/ops/long.scala @@ -27,7 +27,7 @@ object long: * ``` * @syntax markdown */ - type +[X <: Long, Y <: Long] <: Long + infix type +[X <: Long, Y <: Long] <: Long /** Subtraction of two `Long` singleton types. * ```scala @@ -38,7 +38,7 @@ object long: * ``` * @syntax markdown */ - type -[X <: Long, Y <: Long] <: Long + infix type -[X <: Long, Y <: Long] <: Long /** Multiplication of two `Long` singleton types. * ```scala @@ -49,7 +49,7 @@ object long: * ``` * @syntax markdown */ - type *[X <: Long, Y <: Long] <: Long + infix type *[X <: Long, Y <: Long] <: Long /** Integer division of two `Long` singleton types. * ```scala @@ -60,7 +60,7 @@ object long: * ``` * @syntax markdown */ - type /[X <: Long, Y <: Long] <: Long + infix type /[X <: Long, Y <: Long] <: Long /** Remainder of the division of `X` by `Y`. * ```scala @@ -71,7 +71,7 @@ object long: * ``` * @syntax markdown */ - type %[X <: Long, Y <: Long] <: Long + infix type %[X <: Long, Y <: Long] <: Long /** Binary left shift of `X` by `Y`. * ```scala @@ -82,7 +82,7 @@ object long: * ``` * @syntax markdown */ - type <<[X <: Long, Y <: Long] <: Long + infix type <<[X <: Long, Y <: Long] <: Long /** Binary right shift of `X` by `Y`. * ```scala @@ -93,7 +93,7 @@ object long: * ``` * @syntax markdown */ - type >>[X <: Long, Y <: Long] <: Long + infix type >>[X <: Long, Y <: Long] <: Long /** Binary right shift of `X` by `Y`, filling the left with zeros. * ```scala @@ -104,7 +104,7 @@ object long: * ``` * @syntax markdown */ - type >>>[X <: Long, Y <: Long] <: Long + infix type >>>[X <: Long, Y <: Long] <: Long /** Bitwise xor of `X` and `Y`. * ```scala @@ -115,7 +115,7 @@ object long: * ``` * @syntax markdown */ - type ^[X <: Long, Y <: Long] <: Long + infix type ^[X <: Long, Y <: Long] <: Long /** Less-than comparison of two `Long` singleton types. * ```scala @@ -127,7 +127,7 @@ object long: * ``` * @syntax markdown */ - type <[X <: Long, Y <: Long] <: Boolean + infix type <[X <: Long, Y <: Long] <: Boolean /** Greater-than comparison of two `Long` singleton types. * ```scala @@ -139,7 +139,7 @@ object long: * ``` * @syntax markdown */ - type >[X <: Long, Y <: Long] <: Boolean + infix type >[X <: Long, Y <: Long] <: Boolean /** Greater-or-equal comparison of two `Long` singleton types. * ```scala @@ -151,7 +151,7 @@ object long: * ``` * @syntax markdown */ - type >=[X <: Long, Y <: Long] <: Boolean + infix type >=[X <: Long, Y <: Long] <: Boolean /** Less-or-equal comparison of two `Long` singleton types. * ```scala @@ -163,7 +163,7 @@ object long: * ``` * @syntax markdown */ - type <=[X <: Long, Y <: Long] <: Boolean + infix type <=[X <: Long, Y <: Long] <: Boolean /** Bitwise and of `X` and `Y`. * ```scala diff --git a/library/src/scala/compiletime/ops/string.scala b/library/src/scala/compiletime/ops/string.scala index 63caa9ae6371..90515c62e55b 100644 --- a/library/src/scala/compiletime/ops/string.scala +++ b/library/src/scala/compiletime/ops/string.scala @@ -11,7 +11,7 @@ object string: * ``` * @syntax markdown */ - type +[X <: String, Y <: String] <: String + infix type +[X <: String, Y <: String] <: String /** Length of a `String` singleton type. * ```scala diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index a3896a1eeb06..8215ae2452a3 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -52,7 +52,6 @@ def uninitialized: Nothing = ??? * that implement the enclosing trait and that do not contain an explicit overriding * definition of that given. */ -@experimental @compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") def deferred: Nothing = ??? diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index f1045e5bdaca..d1385a0193d6 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -256,7 +256,7 @@ object Expr { private def tupleTypeFromSeq(seq: Seq[Expr[Any]])(using Quotes): quotes.reflect.TypeRepr = import quotes.reflect.* val consRef = Symbol.classSymbol("scala.*:").typeRef - seq.foldLeft(TypeRepr.of[EmptyTuple]) { (ts, expr) => + seq.foldRight(TypeRepr.of[EmptyTuple]) { (expr, ts) => AppliedType(consRef, expr.asTerm.tpe :: ts :: Nil) } diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index d048d8d728d5..7a98d6f6f761 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -2640,7 +2640,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // ----- Types ---------------------------------------------------- /** A type, type constructors, type bounds or NoPrefix */ - type TypeRepr + type TypeRepr <: Matchable /** Module object of `type TypeRepr` */ val TypeRepr: TypeReprModule @@ -3963,6 +3963,42 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => // Keep: `flags` doc aligned with QuotesImpl's `validBindFlags` def newBind(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr): Symbol + /** Generate a new type symbol for a type alias with the given parent, name and type + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the TypeDef constructor. + * + * @param parent The owner of the type + * @param name The name of the type + * @param flags extra flags to with which symbol can be constructed. Can be `Private` | `Protected` | `Override` | `Final` | `Infix` | `Local` + * @param tpe The rhs the type alias + * @param privateWithin the symbol within which this new type symbol should be private. May be noSymbol. + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + @experimental + // Keep: `flags` doc aligned with QuotesImpl's `validTypeAliasFlags` + def newTypeAlias(parent: Symbol, name: String, flags: Flags, tpe: TypeRepr, privateWithin: Symbol): Symbol + + /** Generate a new type symbol for a type bounds with the given parent, name and type + * + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the TypeDef constructor. + * + * @param parent The owner of the type + * @param name The name of the type + * @param flags extra flags to with which symbol can be constructed. `Deferred` flag will be added. Can be `Private` | `Protected` | `Override` | `Deferred` | `Final` | `Infix` | `Local` + * @param tpe The bounds of the type + * @param privateWithin the symbol within which this new type symbol should be private. May be noSymbol. + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + @experimental + // Keep: `flags` doc aligned with QuotesImpl's `validBoundedTypeFlags` + def newBoundedType(parent: Symbol, name: String, flags: Flags, tpe: TypeBounds, privateWithin: Symbol): Symbol + /** Definition not available */ def noSymbol: Symbol diff --git a/library/src/scala/quoted/runtime/Patterns.scala b/library/src/scala/quoted/runtime/Patterns.scala index 91ad23c62a98..f8e172d30f62 100644 --- a/library/src/scala/quoted/runtime/Patterns.scala +++ b/library/src/scala/quoted/runtime/Patterns.scala @@ -1,6 +1,7 @@ package scala.quoted.runtime import scala.annotation.{Annotation, compileTimeOnly} +import scala.annotation.experimental @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns`") object Patterns { @@ -26,6 +27,14 @@ object Patterns { @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns.higherOrderHole`") def higherOrderHole[U](args: Any*): U = ??? + /** A higher order splice in a quoted pattern is desugared by the compiler into a call to this method. + * + * Calling this method in source has undefined behavior at compile-time + */ + @experimental + @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns.higherOrderHoleWithTypes`") + def higherOrderHoleWithTypes[U, T](args: Any*): U = ??? + /** A splice of a name in a quoted pattern is that marks the definition of a type splice. * * Adding this annotation in source has undefined behavior at compile-time diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index e38e016f5182..15220ea2410a 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -52,13 +52,29 @@ object LazyVals { * Used to indicate the state of a lazy val that is being * evaluated and of which other threads await the result. */ - final class Waiting extends CountDownLatch(1) with LazyValControlState + final class Waiting extends CountDownLatch(1) with LazyValControlState { + /* #20856 If not fully evaluated yet, serialize as if not-evaluat*ing* yet. + * This strategy ensures the "serializability" condition of parallel + * programs--not to be confused with the data being `java.io.Serializable`. + * Indeed, if thread A is evaluating the lazy val while thread B attempts + * to serialize its owner object, there is also an alternative schedule + * where thread B serializes the owner object *before* A starts evaluating + * the lazy val. Therefore, forcing B to see the non-evaluating state is + * correct. + */ + private def writeReplace(): Any = null + } /** * Used to indicate the state of a lazy val that is currently being * evaluated with no other thread awaiting its result. */ - object Evaluating extends LazyValControlState + object Evaluating extends LazyValControlState { + /* #20856 If not fully evaluated yet, serialize as if not-evaluat*ing* yet. + * See longer comment in `Waiting.writeReplace()`. + */ + private def writeReplace(): Any = null + } /** * Used to indicate the state of a lazy val that has been evaluated to diff --git a/library/src-bootstrapped/scala/runtime/TupledFunctions.scala b/library/src/scala/runtime/TupledFunctions.scala similarity index 100% rename from library/src-bootstrapped/scala/runtime/TupledFunctions.scala rename to library/src/scala/runtime/TupledFunctions.scala diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index efb54c54d50b..66dc486d2a1d 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -350,7 +350,7 @@ object Tuples { } } - def tail(self: NonEmptyTuple): Tuple = (self: Any) match { + def tail(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlTail(xxl) case _ => specialCaseTail(self) } @@ -558,16 +558,16 @@ object Tuples { } } - def init(self: NonEmptyTuple): Tuple = (self: Any) match { + def init(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlInit(xxl) case _ => specialCaseInit(self) } - def last(self: NonEmptyTuple): Any = (self: Any) match { + def last(self: Tuple): Any = (self: Any) match { case self: Product => self.productElement(self.productArity - 1) } - def apply(self: NonEmptyTuple, n: Int): Any = + def apply(self: Tuple, n: Int): Any = self.productElement(n) // Benchmarks showed that this is faster than doing (it1 zip it2).copyToArray(...) diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 77b014b80466..996f68d4e122 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -1,6 +1,7 @@ package scala.runtime.stdLibPatches import scala.annotation.experimental +import scala.annotation.internal.RuntimeChecked object Predef: import compiletime.summonFrom @@ -80,4 +81,19 @@ object Predef: @experimental infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } + extension [T](x: T) + /**Asserts that a term should be exempt from static checks that can be reliably checked at runtime. + * @example {{{ + * val xs: Option[Int] = Option(1) + * xs.runtimeChecked match + * case Some(x) => x // `Some(_)` can be checked at runtime, so no warning + * }}} + * @example {{{ + * val xs: List[Int] = List(1,2,3) + * val y :: ys = xs.runtimeChecked // `_ :: _` can be checked at runtime, so no warning + * }}} + */ + @experimental + inline def runtimeChecked: x.type @RuntimeChecked = x: @RuntimeChecked + end Predef diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index b9f9d47bb0b1..547710d55293 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -64,9 +64,10 @@ object language: /** Adds support for clause interleaving: * Methods can now have as many type clauses as they like, this allows to have type bounds depend on terms: `def f(x: Int)[A <: x.type]: A` * - * @see [[http://dotty.epfl.ch/docs/reference/other-new-features/explicit-nulls.html]] + * @see [[https://github.com/scala/improvement-proposals/blob/main/content/clause-interleaving.md]] */ @compileTimeOnly("`clauseInterleaving` can only be used at compile time in import statements") + @deprecated("`clauseInterleaving` is now standard, no language import is needed", since = "3.6") object clauseInterleaving /** Experimental support for pure function type syntax @@ -93,7 +94,7 @@ object language: /** Experimental support for named tuples. * - * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/named-tuples]] */ @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") object namedTuples @@ -123,7 +124,22 @@ object language: * @see [[https://github.com/scala/improvement-proposals/pull/84]] */ @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") + @deprecated("The experimental.betterMatchTypeExtractors language import is no longer needed since the feature is now standard. It now has no effect, including when setting an older source version.", since = "3.6") object betterMatchTypeExtractors + + /** Experimental support for quote pattern matching with polymorphic functions + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/quoted-patterns-with-polymorphic-functions]] + */ + @compileTimeOnly("`quotedPatternsWithPolymorphicFunctions` can only be used at compile time in import statements") + object quotedPatternsWithPolymorphicFunctions + + /** Experimental support for improvements in `for` comprehensions + * + * @see [[https://github.com/scala/improvement-proposals/pull/79]] + */ + @compileTimeOnly("`betterFors` can only be used at compile time in import statements") + object betterFors end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. diff --git a/pkgs/README.md b/pkgs/README.md new file mode 100644 index 000000000000..9369fb822da1 --- /dev/null +++ b/pkgs/README.md @@ -0,0 +1,16 @@ +

Configuration for Chocolatey

+ +Official support for Chocolatey started by the release of Scala 3.6.0 + +> [!IMPORTANT] +> This folder contains the templates to generate the configuration for Chocolatey. +> The `scala.nuspec` and `chocolateyInstall.ps1` files needs to be rewritten by changing the following placeholders: +> - @LAUNCHER_VERSION@ : Placeholder for the current scala version to deploy +> - @LAUNCHER_URL@ : Placeholder for the URL to the windows zip released on GitHub +> - @LAUNCHER_SHA256@ : Placeholder for the SHA256 of the msi file released on GitHub + +## Important information + +- How to create a *Chocolatey* package: https://docs.chocolatey.org/en-us/create/create-packages/ +- Guidelines to follow for the package icon: https://docs.chocolatey.org/en-us/create/create-packages/#package-icon-guidelines +- `.nuspec` format specification: https://learn.microsoft.com/en-gb/nuget/reference/nuspec diff --git a/pkgs/chocolatey/README.md b/pkgs/chocolatey/README.md new file mode 100644 index 000000000000..fac301082bac --- /dev/null +++ b/pkgs/chocolatey/README.md @@ -0,0 +1,10 @@ +

Configuration for Chocolatey

+ +Official support for Chocolatey started by the release of Scala 3.6.0 + +> [!IMPORTANT] +> This folder contains the templates to generate the configuration for Chocolatey. +> The `scala.nuspec` and `chocolateyInstall.ps1` files needs to be rewritten by changing the following placeholders: +> - @LAUNCHER_VERSION@ : Placeholder for the current scala version to deploy +> - @LAUNCHER_URL@ : Placeholder for the URL to the windows zip released on GitHub +> - @LAUNCHER_SHA256@ : Placeholder for the SHA256 of the msi file released on GitHub diff --git a/pkgs/chocolatey/icon.svg b/pkgs/chocolatey/icon.svg new file mode 100644 index 000000000000..0ccb404b5624 --- /dev/null +++ b/pkgs/chocolatey/icon.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/pkgs/chocolatey/scala.nuspec b/pkgs/chocolatey/scala.nuspec new file mode 100644 index 000000000000..83033fe4b349 --- /dev/null +++ b/pkgs/chocolatey/scala.nuspec @@ -0,0 +1,25 @@ + + + + scala + @LAUNCHER_VERSION@ + Scala + scala + scala + scala + Scala + Official release of the Scala Programming Language on Chocolatey. + https://github.com/scala/scala3/tree/main/pkgs/chocolatey + https://github.com/scala/scala3 + https://scala-lang.org/ + https://github.com/scala/scala3/issues + © 2002-2025, LAMP/EPFL + https://cdn.jsdelivr.net/gh/scala/scala3@a046b0014ffd9536144d67a48f8759901b96d12f/pkgs/chocolatey/icon.svg + https://github.com/scala/scala3/blob/main/LICENSE + true + https://github.com/scala/scala3/releases + + + + + diff --git a/pkgs/chocolatey/tools/chocolateyInstall.ps1 b/pkgs/chocolatey/tools/chocolateyInstall.ps1 new file mode 100644 index 000000000000..3117efadaf0e --- /dev/null +++ b/pkgs/chocolatey/tools/chocolateyInstall.ps1 @@ -0,0 +1,46 @@ +$ErrorActionPreference = 'Stop'; + +$unzipLocation = Split-Path -Parent $MyInvocation.MyCommand.Definition # Get the root of chocolatey folder +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageName)" # Append the package's name +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageVersion)" # Append the package's version + +# Configure the installation arguments +$packageArgs = @{ + packageName = 'scala' + Url64 = '@LAUNCHER_URL@' + UnzipLocation = $unzipLocation + Checksum64 = '@LAUNCHER_SHA256@' + ChecksumType64 = 'SHA256' +} + +## In case we are running in the CI, add the authorisation header to fetch the zip +## See: https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact +if ($env:DOTTY_CI_INSTALLATION) { + Write-Host "Installing the Chocolatey package in Scala 3's CI" + $packageArgs += @{ + Options = @{ + Headers = @{ + Accept = 'application/vnd.github+json' + Authorization = "Bearer $env:DOTTY_CI_INSTALLATION" + } + } + } +} + +Install-ChocolateyZipPackage @packageArgs + +# Find the path to the bin directory to create the shims +if($env:DOTTY_CI_INSTALLATION) { + $scalaBinPath = Join-Path $unzipLocation 'bin' # Update this path if the structure inside the ZIP changes +} else { + $extractedDir = Get-ChildItem -Path $unzipLocation | Where-Object { $_.PSIsContainer } | Select-Object -First 1 + $scalaBinPath = Join-Path $unzipLocation $extractedDir | Join-Path -ChildPath 'bin' +} + +# Iterate through the .bat files in the bin directory and create shims +Write-Host "Creating shims for .bat file from $scalaBinPath" +Get-ChildItem -Path $scalaBinPath -Filter '*.bat' | ForEach-Object { + $file = $_.FullName + Write-Host "Creating shim for $file..." + Install-BinFile -Name $_.BaseName -Path $file +} diff --git a/pkgs/chocolatey/tools/chocolateyUninstall.ps1 b/pkgs/chocolatey/tools/chocolateyUninstall.ps1 new file mode 100644 index 000000000000..387914af5d09 --- /dev/null +++ b/pkgs/chocolatey/tools/chocolateyUninstall.ps1 @@ -0,0 +1,21 @@ +$ErrorActionPreference = 'Stop'; + +$unzipLocation = Split-Path -Parent $MyInvocation.MyCommand.Definition # Get the root of chocolatey folder +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageName)" # Append the package's name +$unzipLocation = Join-Path $unzipLocation "$($env:chocolateyPackageVersion)" # Append the package's version + +# Find the path to the bin directory to create the shims +if($env:DOTTY_CI_INSTALLATION) { + $scalaBinPath = Join-Path $unzipLocation 'bin' # Update this path if the structure inside the ZIP changes + } else { + $extractedDir = Get-ChildItem -Path $unzipLocation | Where-Object { $_.PSIsContainer } | Select-Object -First 1 + $scalaBinPath = Join-Path $unzipLocation $extractedDir | Join-Path -ChildPath 'bin' + } + +# Iterate through the .bat files in the bin directory and remove shims +Write-Host "Removing shims for .bat file from $scalaBinPath" +Get-ChildItem -Path $scalaBinPath -Filter '*.bat' | ForEach-Object { + $file = $_.FullName + Write-Host "Removing shim for $file..." + Uninstall-BinFile -Name $_.BaseName -Path $file +} diff --git a/pkgs/msi/README.md b/pkgs/msi/README.md new file mode 100644 index 000000000000..7904ef383277 --- /dev/null +++ b/pkgs/msi/README.md @@ -0,0 +1,9 @@ + +## Important information + +- We can only build `msi` packages with stable version number (no RCs, nor Nightlies). +Example of the error message when building with an RC + +``` +error CNDL0108 : The Product/@Version attribute's value, '3.5.1-RC1', is not a valid version. Legal version values should look like 'x.x.x.x' where x is an integer from 0 to 65534 +``` diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index bf814ef682e0..1b44dce8c642 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -269,7 +269,14 @@ object AutoImports: private def importName(sym: Symbol): String = if indexedContext.importContext.toplevelClashes(sym) then s"_root_.${sym.fullNameBackticked(false)}" - else sym.fullNameBackticked(false) + else + sym.ownersIterator.zipWithIndex.foldLeft((List.empty[String], false)) { case ((acc, isDone), (sym, idx)) => + if(isDone || sym.isEmptyPackage || sym.isRoot) (acc, true) + else indexedContext.rename(sym) match + case Some(renamed) => (renamed :: acc, true) + case None if !sym.isPackageObject => (sym.nameBackticked(false) :: acc, false) + case None => (acc, false) + }._1.mkString(".") end AutoImportsGenerator private def autoImportPosition( @@ -313,13 +320,14 @@ object AutoImports: case _ => None - def skipUsingDirectivesOffset( - firstObjectPos: Int = firstMemberDefinitionStart(tree).getOrElse(0) - ): Int = + def skipUsingDirectivesOffset(firstObjectPos: Int = firstMemberDefinitionStart(tree).getOrElse(0)): Int = val firstObjectLine = pos.source.offsetToLine(firstObjectPos) + comments .takeWhile(comment => - !comment.isDocComment && pos.source.offsetToLine(comment.span.end) + 1 < firstObjectLine + val commentLine = pos.source.offsetToLine(comment.span.end) + val isFirstObjectComment = commentLine + 1 == firstObjectLine && !comment.raw.startsWith("//>") + commentLine < firstObjectLine && !isFirstObjectComment ) .lastOption .fold(0)(_.span.end + 1) diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index ded7845ffa4e..e35556ad11c9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -13,7 +13,6 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile -import dotty.tools.pc.AutoImports.* import dotty.tools.pc.completions.CompletionPos import dotty.tools.pc.utils.InteractiveEnrichments.* @@ -67,7 +66,8 @@ final class AutoImportsProvider( val results = symbols.result.filter(isExactMatch(_, name)) if results.nonEmpty then - val correctedPos = CompletionPos.infer(pos, params, path).toSourcePosition + val correctedPos = + CompletionPos.infer(pos, params, path, wasCursorApplied = false).toSourcePosition val mkEdit = path match // if we are in import section just specify full name diff --git a/presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala b/presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala new file mode 100644 index 000000000000..f5715c2780a9 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/CachingDriver.scala @@ -0,0 +1,56 @@ +package dotty.tools.pc + +import java.net.URI +import java.util as ju + +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.reporting.Diagnostic +import dotty.tools.dotc.util.SourceFile + +import scala.compiletime.uninitialized + +/** + * CachingDriver is a wrapper class that provides a compilation cache for InteractiveDriver. + * CachingDriver skips running compilation if + * - the target URI of `run` is the same as the previous target URI + * - the content didn't change since the last compilation. + * + * This compilation cache enables Metals to skip compilation and re-use + * the typed tree under the situation like developers + * sequentially hover on the symbols in the same file without any changes. + * + * Note: we decided to cache only if the target URI is the same as in the previous run + * because of `InteractiveDriver.currentCtx` that should return the context that + * refers to the last compiled source file. + * It would be ideal if we could update currentCtx even when we skip the compilation, + * but we struggled to do that. See the discussion https://github.com/scalameta/metals/pull/4225#discussion_r941138403 + * To avoid the complexity related to currentCtx, + * we decided to cache only when the target URI only if the same as the previous run. + */ +class CachingDriver(override val settings: List[String]) extends InteractiveDriver(settings): + + @volatile private var lastCompiledURI: URI = uninitialized + + private def alreadyCompiled(uri: URI, content: Array[Char]): Boolean = + compilationUnits.get(uri) match + case Some(unit) + if lastCompiledURI == uri && + ju.Arrays.equals(unit.source.content(), content) => + true + case _ => false + + override def run(uri: URI, source: SourceFile): List[Diagnostic] = + val diags = + if alreadyCompiled(uri, source.content) then Nil + else super.run(uri, source) + lastCompiledURI = uri + diags + + override def run(uri: URI, sourceCode: String): List[Diagnostic] = + val diags = + if alreadyCompiled(uri, sourceCode.toCharArray().nn) then Nil + else super.run(uri, sourceCode) + lastCompiledURI = uri + diags + +end CachingDriver diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 231960ec5116..9fb84ee1f513 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -12,6 +12,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* +import dotty.tools.pc.utils.InteractiveEnrichments.companion class CompilerSearchVisitor( visitSymbol: Symbol => Boolean @@ -27,7 +28,7 @@ class CompilerSearchVisitor( owner.isStatic && owner.isPublic private def isAccessible(sym: Symbol): Boolean = try - sym != NoSymbol && sym.isPublic && sym.isStatic || isAccessibleImplicitClass(sym) + (sym != NoSymbol && sym.isAccessibleFrom(ctx.owner.info) && sym.isStatic) || isAccessibleImplicitClass(sym) catch case err: AssertionError => logger.log(Level.WARNING, err.getMessage()) @@ -91,11 +92,12 @@ class CompilerSearchVisitor( range: org.eclipse.lsp4j.Range ): Int = val gsym = SemanticdbSymbols.inverseSemanticdbSymbol(symbol).headOption - gsym - .filter(isAccessible) - .map(visitSymbol) - .map(_ => 1) - .getOrElse(0) + val matching = for + sym0 <- gsym.toList + sym <- if sym0.companion.is(Flags.Synthetic) then List(sym0, sym0.companion) else List(sym0) + if isAccessible(sym) + yield visitSymbol(sym) + matching.size def shouldVisitPackage(pkg: String): Boolean = isAccessible(requiredPackage(normalizePackage(pkg))) diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index 4416d0c0d000..c72a0602f1ce 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -13,6 +13,7 @@ import dotty.tools.dotc.ast.Trees.* import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.DeepFolder import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.MethodType import dotty.tools.dotc.core.Types.PolyType @@ -116,9 +117,15 @@ final class ExtractMethodProvider( typeParams.toList.sortBy(_.decodedName), ) end localRefs + val optEnclosing = + path.dropWhile(src => !src.sourcePos.encloses(range)) match + case Nil => None + case _ :: (app @ Apply(fun, args)) :: _ if args.exists(ImplicitParameters.isSyntheticArg(_)) => Some(app) + case found :: _ => Some(found) + val edits = for - enclosing <- path.find(src => src.sourcePos.encloses(range)) + enclosing <- optEnclosing extracted = extractFromBlock(enclosing) head <- extracted.headOption expr <- extracted.lastOption @@ -131,11 +138,14 @@ final class ExtractMethodProvider( val exprType = prettyPrint(expr.typeOpt.widen) val name = genName(indexedCtx.scopeSymbols.map(_.decodedName).toSet, "newMethod") - val (methodParams, typeParams) = + val (allMethodParams, typeParams) = localRefs(extracted, stat.sourcePos, extractedPos) - val methodParamsText = methodParams - .map(sym => s"${sym.decodedName}: ${prettyPrint(sym.info)}") - .mkString(", ") + val (methodParams, implicitParams) = allMethodParams.partition(!_.isOneOf(Flags.GivenOrImplicit)) + def toParamText(params: List[Symbol]) = + params.map(sym => s"${sym.decodedName}: ${prettyPrint(sym.info)}") + .mkString(", ") + val methodParamsText = toParamText(methodParams) + val implicitParamsText = if implicitParams.nonEmpty then s"(given ${toParamText(implicitParams)})" else "" val typeParamsText = typeParams .map(_.decodedName) match case Nil => "" @@ -155,7 +165,7 @@ final class ExtractMethodProvider( if noIndent && extracted.length > 1 then (" {", s"$newIndent}") else ("", "") val defText = - s"def $name$typeParamsText($methodParamsText): $exprType =$obracket\n${toExtract}\n$cbracket\n$newIndent" + s"def $name$typeParamsText($methodParamsText)$implicitParamsText: $exprType =$obracket\n${toExtract}\n$cbracket\n$newIndent" val replacedText = s"$name($exprParamsText)" List( new l.TextEdit( diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index 6b74e3aa2ec1..7c2c34cf5ebb 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -36,8 +36,8 @@ sealed trait IndexedContext: Result.InScope // when all the conflicting symbols came from an old version of the file case Some(symbols) if symbols.nonEmpty && symbols.forall(_.isStale) => Result.Missing - case Some(_) => Result.Conflict - case None => Result.Missing + case Some(symbols) if symbols.exists(rename(_).isEmpty) => Result.Conflict + case _ => Result.Missing end lookupSym /** diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala new file mode 100644 index 000000000000..260a28392093 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala @@ -0,0 +1,134 @@ +package dotty.tools.pc + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.typer.Applications.UnapplyArgs +import dotty.tools.dotc.util.NoSourcePosition +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.pc.IndexedContext +import dotty.tools.pc.printer.ShortenedTypePrinter +import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam +import dotty.tools.pc.utils.InteractiveEnrichments.* + +import scala.meta.internal.metals.ReportContext +import scala.meta.pc.OffsetParams +import scala.meta.pc.SymbolSearch + +class InferExpectedType( + search: SymbolSearch, + driver: InteractiveDriver, + params: OffsetParams +)(implicit rc: ReportContext): + val uri = params.uri().nn + val code = params.text().nn + + val sourceFile = SourceFile.virtual(uri, code) + driver.run(uri, sourceFile) + + val ctx = driver.currentCtx + val pos = driver.sourcePosition(params) + + def infer() = + driver.compilationUnits.get(uri) match + case Some(unit) => + val path = + Interactive.pathTo(driver.openedTrees(uri), pos)(using ctx) + val newctx = ctx.fresh.setCompilationUnit(unit) + val tpdPath = + Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using + newctx + ) + val locatedCtx = + Interactive.contextOfPath(tpdPath)(using newctx) + val indexedCtx = IndexedContext(locatedCtx) + val printer = + ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedCtx) + InterCompletionType.inferType(path)(using newctx).map{ + tpe => printer.tpe(tpe) + } + case None => None + +object InterCompletionType: + def inferType(path: List[Tree])(using Context): Option[Type] = + path match + case (lit: Literal) :: Select(Literal(_), _) :: Apply(Select(Literal(_), _), List(Literal(Constant(null)))) :: rest => inferType(rest, lit.span) + case ident :: rest => inferType(rest, ident.span) + case _ => None + + def inferType(path: List[Tree], span: Span)(using Context): Option[Type] = + path match + case Typed(expr, tpt) :: _ if expr.span.contains(span) && !tpt.tpe.isErroneous => Some(tpt.tpe) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case Bind(_, body) :: rest if body.span.contains(span) => inferType(rest, span) + case Alternative(_) :: rest => inferType(rest, span) + case Try(block, _, _) :: rest if block.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Try(_, cases, _) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => inferType(rest, span) + case If(cond, _, _) :: rest if cond.span.contains(span) => Some(Symbols.defn.BooleanType) + case CaseDef(_, _, body) :: Match(_, cases) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => + inferType(rest, span) + case NamedArg(_, arg) :: rest if arg.span.contains(span) => inferType(rest, span) + // x match + // case @@ + case CaseDef(pat, _, _) :: Match(sel, cases) :: rest if pat.span.contains(span) && cases.exists(_.span.contains(span)) && !sel.tpe.isErroneous => + sel.tpe match + case tpe: TermRef => Some(tpe.symbol.info).filterNot(_.isErroneous) + case tpe => Some(tpe) + // List(@@) + case SeqLiteral(_, tpe) :: _ if !tpe.tpe.isErroneous => + Some(tpe.tpe) + // val _: T = @@ + // def _: T = @@ + case (defn: ValOrDefDef) :: rest if !defn.tpt.tpe.isErroneous => Some(defn.tpt.tpe) + case UnApply(fun, _, pats) :: _ => + val ind = pats.indexWhere(_.span.contains(span)) + if ind < 0 then None + else Some(UnapplyArgs(fun.tpe.finalResultType, fun, pats, NoSourcePosition).argTypes(ind)) + // f(@@) + case (app: Apply) :: rest => + val param = + for { + ind <- app.args.zipWithIndex.collectFirst { + case (arg, id) if arg.span.contains(span) => id + } + params <- app.symbol.paramSymss.find(!_.exists(_.isTypeParam)) + param <- params.get(ind) + } yield param.info + param match + // def f[T](a: T): T = ??? + // f[Int](@@) + // val _: Int = f(@@) + case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => + for { + (typeParams, args) <- + app match + case Apply(TypeApply(fun, args), _) => + val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) + typeParams.map((_, args.map(_.tpe))) + // val f: (j: "a") => Int + // f(@@) + case Apply(Select(v, StdNames.nme.apply), _) => + v.symbol.info match + case AppliedType(des, args) => + Some((des.typeSymbol.typeParams, args)) + case _ => None + case _ => None + ind = typeParams.indexOf(t.symbol) + tpe <- args.get(ind) + if !tpe.isErroneous + } yield tpe + case Some(tpe) => Some(tpe) + case _ => None + case _ => None + diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala deleted file mode 100644 index 819c3f2fc9c9..000000000000 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsDriver.scala +++ /dev/null @@ -1,58 +0,0 @@ -package dotty.tools.pc - -import java.net.URI -import java.util as ju - -import dotty.tools.dotc.interactive.InteractiveDriver -import dotty.tools.dotc.reporting.Diagnostic -import dotty.tools.dotc.util.SourceFile - -import scala.compiletime.uninitialized - -/** - * MetalsDriver is a wrapper class that provides a compilation cache for InteractiveDriver. - * MetalsDriver skips running compilation if - * - the target URI of `run` is the same as the previous target URI - * - the content didn't change since the last compilation. - * - * This compilation cache enables Metals to skip compilation and re-use - * the typed tree under the situation like developers - * sequentially hover on the symbols in the same file without any changes. - * - * Note: we decided to cache only if the target URI is the same as in the previous run - * because of `InteractiveDriver.currentCtx` that should return the context that - * refers to the last compiled source file. - * It would be ideal if we could update currentCtx even when we skip the compilation, - * but we struggled to do that. See the discussion https://github.com/scalameta/metals/pull/4225#discussion_r941138403 - * To avoid the complexity related to currentCtx, - * we decided to cache only when the target URI only if the same as the previous run. - */ -class MetalsDriver( - override val settings: List[String] -) extends InteractiveDriver(settings): - - @volatile private var lastCompiledURI: URI = uninitialized - - private def alreadyCompiled(uri: URI, content: Array[Char]): Boolean = - compilationUnits.get(uri) match - case Some(unit) - if lastCompiledURI == uri && - ju.Arrays.equals(unit.source.content(), content) => - true - case _ => false - - override def run(uri: URI, source: SourceFile): List[Diagnostic] = - val diags = - if alreadyCompiled(uri, source.content) then Nil - else super.run(uri, source) - lastCompiledURI = uri - diags - - override def run(uri: URI, sourceCode: String): List[Diagnostic] = - val diags = - if alreadyCompiled(uri, sourceCode.toCharArray().nn) then Nil - else super.run(uri, sourceCode) - lastCompiledURI = uri - diags - -end MetalsDriver diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index c447123c8725..1ebfd405768e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -2,6 +2,7 @@ package dotty.tools.pc import java.nio.file.Paths +import dotty.tools.pc.PcSymbolSearch.* import scala.meta.internal.metals.CompilerOffsetParams import scala.meta.pc.OffsetParams import scala.meta.pc.VirtualFileParams @@ -28,312 +29,13 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.utils.InteractiveEnrichments.* -abstract class PcCollector[T]( - driver: InteractiveDriver, - params: VirtualFileParams -): - private val caseClassSynthetics: Set[Name] = Set(nme.apply, nme.copy) - val uri = params.uri().nn - val filePath = Paths.get(uri).nn - val sourceText = params.text().nn - val text = sourceText.toCharArray().nn - val source = - SourceFile.virtual(filePath.toString(), sourceText) - driver.run(uri, source) - given ctx: Context = driver.currentCtx - - val unit = driver.currentCtx.run.nn.units.head - val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) - val offset = params match - case op: OffsetParams => op.offset() - case _ => 0 - val offsetParams = - params match - case op: OffsetParams => op - case _ => CompilerOffsetParams(uri, sourceText, 0, params.token().nn) - val pos = driver.sourcePosition(offsetParams) - val rawPath = - Interactive - .pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) - .dropWhile(t => // NamedArg anyway doesn't have symbol - t.symbol == NoSymbol && !t.isInstanceOf[NamedArg] || - // same issue https://github.com/scala/scala3/issues/15937 as below - t.isInstanceOf[TypeTree] - ) - - val path = rawPath match - // For type it will sometimes go into the wrong tree since TypeTree also contains the same span - // https://github.com/scala/scala3/issues/15937 - case TypeApply(sel: Select, _) :: tail if sel.span.contains(pos.span) => - Interactive.pathTo(sel, pos.span) ::: rawPath - case _ => rawPath +trait PcCollector[T]: + self: WithCompilationUnit => def collect( parent: Option[Tree] )(tree: Tree| EndMarker, pos: SourcePosition, symbol: Option[Symbol]): T - def symbolAlternatives(sym: Symbol) = - def member(parent: Symbol) = parent.info.member(sym.name).symbol - def primaryConstructorTypeParam(owner: Symbol) = - for - typeParams <- owner.primaryConstructor.paramSymss.headOption - param <- typeParams.find(_.name == sym.name) - if (param.isType) - yield param - def additionalForEnumTypeParam(enumClass: Symbol) = - if enumClass.is(Flags.Enum) then - val enumOwner = - if enumClass.is(Flags.Case) - then - Option.when(member(enumClass).is(Flags.Synthetic))( - enumClass.maybeOwner.companionClass - ) - else Some(enumClass) - enumOwner.toSet.flatMap { enumOwner => - val symsInEnumCases = enumOwner.children.toSet.flatMap(enumCase => - if member(enumCase).is(Flags.Synthetic) - then primaryConstructorTypeParam(enumCase) - else None - ) - val symsInEnumOwner = - primaryConstructorTypeParam(enumOwner).toSet + member(enumOwner) - symsInEnumCases ++ symsInEnumOwner - } - else Set.empty - val all = - if sym.is(Flags.ModuleClass) then - Set(sym, sym.companionModule, sym.companionModule.companion) - else if sym.isClass then - Set(sym, sym.companionModule, sym.companion.moduleClass) - else if sym.is(Flags.Module) then - Set(sym, sym.companionClass, sym.moduleClass) - else if sym.isTerm && (sym.owner.isClass || sym.owner.isConstructor) - then - val info = - if sym.owner.isClass then sym.owner.info else sym.owner.owner.info - Set( - sym, - info.member(sym.asTerm.name.setterName).symbol, - info.member(sym.asTerm.name.getterName).symbol - ) ++ sym.allOverriddenSymbols.toSet - // type used in primary constructor will not match the one used in the class - else if sym.isTypeParam && sym.owner.isPrimaryConstructor then - Set(sym, member(sym.maybeOwner.maybeOwner)) - ++ additionalForEnumTypeParam(sym.maybeOwner.maybeOwner) - else if sym.isTypeParam then - primaryConstructorTypeParam(sym.maybeOwner).toSet - ++ additionalForEnumTypeParam(sym.maybeOwner) + sym - else Set(sym) - all.filter(s => s != NoSymbol && !s.isError) - end symbolAlternatives - - private def isGeneratedGiven(df: NamedDefTree)(using Context) = - val nameSpan = df.nameSpan - df.symbol.is(Flags.Given) && sourceText.substring( - nameSpan.start, - nameSpan.end - ) != df.name.toString() - - // First identify the symbol we are at, comments identify @@ as current cursor position - def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = - val sought = path match - /* reference of an extension paramter - * extension [EF](<>: List[EF]) - * def double(ys: List[EF]) = <> ++ ys - */ - case (id: Ident) :: _ - if id.symbol - .is(Flags.Param) && id.symbol.owner.is(Flags.ExtensionMethod) => - Some(findAllExtensionParamSymbols(id.sourcePos, id.name, id.symbol)) - /** - * Workaround for missing symbol in: - * class A[T](a: T) - * val x = new <>(1) - */ - case t :: (n: New) :: (sel: Select) :: _ - if t.symbol == NoSymbol && sel.symbol.isConstructor => - Some(symbolAlternatives(sel.symbol.owner), namePos(t)) - /** - * Workaround for missing symbol in: - * class A[T](a: T) - * val x = <>[Int](1) - */ - case (sel @ Select(New(t), _)) :: (_: TypeApply) :: _ - if sel.symbol.isConstructor => - Some(symbolAlternatives(sel.symbol.owner), namePos(t)) - /* simple identifier: - * val a = val@@ue + value - */ - case (id: Ident) :: _ => - Some(symbolAlternatives(id.symbol), id.sourcePos) - /* simple selector: - * object.val@@ue - */ - case (sel: Select) :: _ if selectNameSpan(sel).contains(pos.span) => - Some(symbolAlternatives(sel.symbol), pos.withSpan(sel.nameSpan)) - /* named argument: - * foo(nam@@e = "123") - */ - case (arg: NamedArg) :: (appl: Apply) :: _ => - val realName = arg.name.stripModuleClassSuffix.lastPart - if pos.span.start > arg.span.start && pos.span.end < arg.span.point + realName.length - then - val length = realName.toString.backticked.length() - val pos = arg.sourcePos.withSpan( - arg.span - .withEnd(arg.span.start + length) - .withPoint(arg.span.start) - ) - appl.symbol.paramSymss.flatten.find(_.name == arg.name).map { s => - // if it's a case class we need to look for parameters also - if caseClassSynthetics(s.owner.name) && s.owner.is(Flags.Synthetic) - then - ( - Set( - s, - s.owner.owner.companion.info.member(s.name).symbol, - s.owner.owner.info.member(s.name).symbol - ) - .filter(_ != NoSymbol), - pos, - ) - else (Set(s), pos) - } - else None - end if - /* all definitions: - * def fo@@o = ??? - * class Fo@@o = ??? - * etc. - */ - case (df: NamedDefTree) :: _ - if df.nameSpan.contains(pos.span) && !isGeneratedGiven(df) => - Some(symbolAlternatives(df.symbol), pos.withSpan(df.nameSpan)) - /* enum cases with params - * enum Foo: - * case B@@ar[A](i: A) - */ - case (df: NamedDefTree) :: Template(_, _, self, _) :: _ - if (df.name == nme.apply || df.name == nme.unapply) && df.nameSpan.isZeroExtent => - Some(symbolAlternatives(self.tpt.symbol), self.sourcePos) - /** - * For traversing annotations: - * @JsonNo@@tification("") - * def params() = ??? - */ - case (df: MemberDef) :: _ if df.span.contains(pos.span) => - val annotTree = df.mods.annotations.find { t => - t.span.contains(pos.span) - } - collectTrees(annotTree).flatMap { t => - soughtSymbols( - Interactive.pathTo(t, pos.span) - ) - }.headOption - - /* Import selectors: - * import scala.util.Tr@@y - */ - case (imp: Import) :: _ if imp.span.contains(pos.span) => - imp - .selector(pos.span) - .map(sym => (symbolAlternatives(sym), sym.sourcePos)) - - case _ => None - - sought match - case None => seekInExtensionParameters() - case _ => sought - - end soughtSymbols - - lazy val extensionMethods = - NavigateAST - .untypedPath(pos.span)(using compilatonUnitContext) - .collectFirst { case em @ ExtMethods(_, _) => em } - - private def findAllExtensionParamSymbols( - pos: SourcePosition, - name: Name, - sym: Symbol - ) = - val symbols = - for - methods <- extensionMethods.map(_.methods) - symbols <- collectAllExtensionParamSymbols( - unit.tpdTree, - ExtensionParamOccurence(name, pos, sym, methods) - ) - yield symbols - symbols.getOrElse((symbolAlternatives(sym), pos)) - end findAllExtensionParamSymbols - - private def seekInExtensionParameters() = - def collectParams( - extMethods: ExtMethods - ): Option[ExtensionParamOccurence] = - NavigateAST - .pathTo(pos.span, extMethods.paramss.flatten)(using - compilatonUnitContext - ) - .collectFirst { - case v: untpd.ValOrTypeDef => - ExtensionParamOccurence( - v.name, - v.namePos, - v.symbol, - extMethods.methods - ) - case i: untpd.Ident => - ExtensionParamOccurence( - i.name, - i.sourcePos, - i.symbol, - extMethods.methods - ) - } - - for - extensionMethodScope <- extensionMethods - occurrence <- collectParams(extensionMethodScope) - symbols <- collectAllExtensionParamSymbols( - path.headOption.getOrElse(unit.tpdTree), - occurrence - ) - yield symbols - end seekInExtensionParameters - - private def collectAllExtensionParamSymbols( - tree: tpd.Tree, - occurrence: ExtensionParamOccurence - ): Option[(Set[Symbol], SourcePosition)] = - occurrence match - case ExtensionParamOccurence(_, namePos, symbol, _) - if symbol != NoSymbol && !symbol.isError && !symbol.owner.is( - Flags.ExtensionMethod - ) => - Some((symbolAlternatives(symbol), namePos)) - case ExtensionParamOccurence(name, namePos, _, methods) => - val symbols = - for - method <- methods.toSet - symbol <- - Interactive.pathTo(tree, method.span) match - case (d: DefDef) :: _ => - d.paramss.flatten.collect { - case param if param.name.decoded == name.decoded => - param.symbol - } - case _ => Set.empty[Symbol] - if (symbol != NoSymbol && !symbol.isError) - withAlt <- symbolAlternatives(symbol) - yield withAlt - if symbols.nonEmpty then Some((symbols, namePos)) else None - end collectAllExtensionParamSymbols - - def result(): List[T] = - params match - case _: OffsetParams => resultWithSought() - case _ => resultAllOccurences().toList + def allowZeroExtentImplicits: Boolean = false def resultAllOccurences(): Set[T] = def noTreeFilter = (_: Tree) => true @@ -341,55 +43,56 @@ abstract class PcCollector[T]( traverseSought(noTreeFilter, noSoughtFilter) - def resultWithSought(): List[T] = - soughtSymbols(path) match - case Some((sought, _)) => - lazy val owners = sought - .flatMap { s => Set(s.owner, s.owner.companionModule) } - .filter(_ != NoSymbol) - lazy val soughtNames: Set[Name] = sought.map(_.name) - - /* - * For comprehensions have two owners, one for the enumerators and one for - * yield. This is a heuristic to find that out. - */ - def isForComprehensionOwner(named: NameTree) = - soughtNames(named.name) && - scala.util - .Try(named.symbol.owner) - .toOption - .exists(_.isAnonymousFunction) && - owners.exists(o => - o.span.exists && o.span.point == named.symbol.owner.span.point - ) - - def soughtOrOverride(sym: Symbol) = - sought(sym) || sym.allOverriddenSymbols.exists(sought(_)) + def resultWithSought(sought: Set[Symbol]): List[T] = + lazy val owners = sought + .flatMap { s => Set(s.owner, s.owner.companionModule) } + .filter(_ != NoSymbol) + lazy val soughtNames: Set[Name] = sought.map(_.name) + + /* + * For comprehensions have two owners, one for the enumerators and one for + * yield. This is a heuristic to find that out. + */ + def isForComprehensionOwner(named: NameTree) = + soughtNames(named.name) && + scala.util + .Try(named.symbol.owner) + .toOption + .exists(_.isAnonymousFunction) && + owners.exists(o => + o.span.exists && o.span.point == named.symbol.owner.span.point + ) - def soughtTreeFilter(tree: Tree): Boolean = - tree match - case ident: Ident - if soughtOrOverride(ident.symbol) || - isForComprehensionOwner(ident) => - true - case sel: Select if soughtOrOverride(sel.symbol) => true - case df: NamedDefTree - if soughtOrOverride(df.symbol) && !df.symbol.isSetter => - true - case imp: Import if owners(imp.expr.symbol) => true - case _ => false + def soughtOrOverride(sym: Symbol) = + sought(sym) || sym.allOverriddenSymbols.exists(sought(_)) - def soughtFilter(f: Symbol => Boolean): Boolean = - sought.exists(f) + def soughtTreeFilter(tree: Tree): Boolean = + tree match + case ident: Ident + if soughtOrOverride(ident.symbol) || + isForComprehensionOwner(ident) => + true + case sel: Select if soughtOrOverride(sel.symbol) => true + case df: NamedDefTree + if soughtOrOverride(df.symbol) && !df.symbol.isSetter => + true + case imp: Import if owners(imp.expr.symbol) => true + case _ => false - traverseSought(soughtTreeFilter, soughtFilter).toList + def soughtFilter(f: Symbol => Boolean): Boolean = + sought.exists(f) - case None => Nil + traverseSought(soughtTreeFilter, soughtFilter).toList + end resultWithSought extension (span: Span) def isCorrect = !span.isZeroExtent && span.exists && span.start < sourceText.size && span.end <= sourceText.size + extension (tree: Tree) + def isCorrectSpan = + tree.span.isCorrect || (allowZeroExtentImplicits && tree.symbol.is(Flags.Implicit)) + def traverseSought( filter: Tree => Boolean, soughtFilter: (Symbol => Boolean) => Boolean @@ -410,7 +113,7 @@ abstract class PcCollector[T]( * All indentifiers such as: * val a = <> */ - case ident: Ident if ident.span.isCorrect && filter(ident) => + case ident: Ident if ident.isCorrectSpan && filter(ident) => // symbols will differ for params in different ext methods, but source pos will be the same if soughtFilter(_.sourcePos == ident.symbol.sourcePos) then @@ -425,7 +128,7 @@ abstract class PcCollector[T]( * val x = new <>(1) */ case sel @ Select(New(t), _) - if sel.span.isCorrect && + if sel.isCorrectSpan && sel.symbol.isConstructor && t.symbol == NoSymbol => if soughtFilter(_ == sel.symbol.owner) then @@ -440,7 +143,7 @@ abstract class PcCollector[T]( * val a = hello.<> */ case sel: Select - if sel.span.isCorrect && filter(sel) && + if sel.isCorrectSpan && filter(sel) && !sel.isForComprehensionMethod => occurrences + collect( sel, @@ -453,7 +156,7 @@ abstract class PcCollector[T]( */ case df: NamedDefTree if df.span.isCorrect && df.nameSpan.isCorrect && - filter(df) && !isGeneratedGiven(df) => + filter(df) && !isGeneratedGiven(df, sourceText) => def collectEndMarker = EndMarker.getPosition(df, pos, sourceText).map: collect(EndMarker(df.symbol), _) @@ -572,35 +275,9 @@ abstract class PcCollector[T]( val traverser = new PcCollector.DeepFolderWithParent[Set[T]](collectNamesWithParent) - val all = traverser(Set.empty[T], unit.tpdTree) - all + traverser(Set.empty[T], unit.tpdTree) end traverseSought - // @note (tgodzik) Not sure currently how to get rid of the warning, but looks to correctly - // @nowarn - private def collectTrees(trees: Iterable[Positioned]): Iterable[Tree] = - trees.collect { case t: Tree => - t - } - - // NOTE: Connected to https://github.com/scala/scala3/issues/16771 - // `sel.nameSpan` is calculated incorrectly in (1 + 2).toString - // See test DocumentHighlightSuite.select-parentheses - private def selectNameSpan(sel: Select): Span = - val span = sel.span - if span.exists then - val point = span.point - if sel.name.toTermName == nme.ERROR then Span(point) - else if sel.qualifier.span.start > span.point then // right associative - val realName = sel.name.stripModuleClassSuffix.lastPart - Span(span.start, span.start + realName.length, point) - else Span(point, span.end, point) - else span - - private def namePos(tree: Tree): SourcePosition = - tree match - case sel: Select => sel.sourcePos.withSpan(selectNameSpan(sel)) - case _ => tree.sourcePos end PcCollector object PcCollector: @@ -656,3 +333,21 @@ object EndMarker: ) end getPosition end EndMarker + +abstract class WithSymbolSearchCollector[T]( + driver: InteractiveDriver, + params: OffsetParams, +) extends WithCompilationUnit(driver, params) + with PcSymbolSearch + with PcCollector[T]: + def result(): List[T] = + soughtSymbols.toList.flatMap { case (sought, _) => + resultWithSought(sought) + } + +abstract class SimpleCollector[T]( + driver: InteractiveDriver, + params: VirtualFileParams, +) extends WithCompilationUnit(driver, params) + with PcCollector[T]: + def result(): List[T] = resultAllOccurences().toList diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index fc97dd1f1176..3b2284bef1d0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -131,13 +131,13 @@ class PcDefinitionProvider( otherDefs.headOption.orElse(exportedDefs.headOption) match case Some(srcTree) => val pos = srcTree.namePos - pos.toLocation match - case None => DefinitionResultImpl.empty - case Some(loc) => + if pos.exists then + val loc = new Location(params.uri().toString(), pos.toLsp) DefinitionResultImpl( SemanticdbSymbols.symbolName(sym), - List(loc).asJava + List(loc).asJava, ) + else DefinitionResultImpl.empty case None => DefinitionResultImpl.empty else diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala index d9b94ebb82a3..0c1af215b7f7 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala @@ -14,7 +14,7 @@ import org.eclipse.lsp4j.DocumentHighlightKind final class PcDocumentHighlightProvider( driver: InteractiveDriver, params: OffsetParams -) extends PcCollector[DocumentHighlight](driver, params): +) extends WithSymbolSearchCollector[DocumentHighlight](driver, params): def collect( parent: Option[Tree] diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index c4fdb97c0418..9c0e6bcfa9d8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -3,6 +3,8 @@ package dotty.tools.pc import java.nio.file.Paths +import scala.annotation.tailrec + import scala.meta.internal.metals.ReportContext import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.printer.ShortenedTypePrinter @@ -12,7 +14,6 @@ import scala.meta.internal.pc.LabelPart.* import scala.meta.pc.InlayHintsParams import scala.meta.pc.SymbolSearch -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags @@ -194,10 +195,10 @@ object ImplicitConversion: def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = if (params.implicitConversions()) { tree match - case Apply(fun: Ident, args) if isSynthetic(fun) => + case Apply(fun: Ident, args) if isSynthetic(fun) && args.exists(!_.span.isZeroExtent) => implicitConversion(fun, args) case Apply(Select(fun, name), args) - if name == nme.apply && isSynthetic(fun) => + if name == nme.apply && isSynthetic(fun) && args.exists(!_.span.isZeroExtent) => implicitConversion(fun, args) case _ => None } else None @@ -218,7 +219,7 @@ object ImplicitParameters: if (params.implicitParameters()) { tree match case Apply(fun, args) - if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => + if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent && !args.exists(isQuotes(_)) => val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) val allImplicit = providedArgs.isEmpty || providedArgs.forall { case Ident(name) => name == nme.MISSING @@ -229,10 +230,12 @@ object ImplicitParameters: case _ => None } else None - private def isSyntheticArg(tree: Tree)(using Context) = tree match + @tailrec + def isSyntheticArg(tree: Tree)(using Context): Boolean = tree match case tree: Ident => - tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) && - !isQuotes(tree) + tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) + case Apply(fun, _ ) if tree.span.isZeroExtent => isSyntheticArg(fun) + case TypeApply(fun, _ ) if tree.span.isZeroExtent => isSyntheticArg(fun) case _ => false // Decorations for Quotes are rarely useful diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index 38b5e8d0069b..bbba44d0d84f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -22,9 +22,9 @@ import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l final class PcInlineValueProviderImpl( - val driver: InteractiveDriver, + driver: InteractiveDriver, val params: OffsetParams -) extends PcCollector[Option[Occurence]](driver, params) +) extends WithSymbolSearchCollector[Option[Occurence]](driver, params) with InlineValueProvider: val position: l.Position = pos.toLsp.getStart().nn diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala new file mode 100644 index 000000000000..49ed313faec4 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/PcReferencesProvider.scala @@ -0,0 +1,69 @@ +package dotty.tools.pc + +import scala.language.unsafeNulls + +import scala.jdk.CollectionConverters.* + +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.ReferencesRequest +import scala.meta.pc.ReferencesResult + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourcePosition +import org.eclipse.lsp4j +import org.eclipse.lsp4j.Location +import dotty.tools.pc.utils.InteractiveEnrichments.* +import scala.meta.internal.pc.PcReferencesResult + +class PcReferencesProvider( + driver: InteractiveDriver, + request: ReferencesRequest, +) extends WithCompilationUnit(driver, request.file()) with PcCollector[Option[(String, Option[lsp4j.Range])]]: + + override def allowZeroExtentImplicits: Boolean = true + + private def soughtSymbols = + if(request.offsetOrSymbol().isLeft()) { + val offsetParams = CompilerOffsetParams( + request.file().uri(), + request.file().text(), + request.offsetOrSymbol().getLeft() + ) + val symbolSearch = new WithCompilationUnit(driver, offsetParams) with PcSymbolSearch + symbolSearch.soughtSymbols.map(_._1) + } else { + SymbolProvider.compilerSymbol(request.offsetOrSymbol().getRight()).map(symbolAlternatives(_)) + } + + def collect(parent: Option[Tree])( + tree: Tree | EndMarker, + toAdjust: SourcePosition, + symbol: Option[Symbol], + ): Option[(String, Option[lsp4j.Range])] = + val (pos, _) = toAdjust.adjust(text) + tree match + case t: DefTree if !request.includeDefinition() => + val sym = symbol.getOrElse(t.symbol) + Some(SemanticdbSymbols.symbolName(sym), None) + case t: Tree => + val sym = symbol.getOrElse(t.symbol) + Some(SemanticdbSymbols.symbolName(sym), Some(pos.toLsp)) + case _ => None + + def references(): List[ReferencesResult] = + soughtSymbols match + case Some(sought) if sought.nonEmpty => + resultWithSought(sought) + .flatten + .groupMap(_._1) { case (_, optRange) => + optRange.map(new Location(request.file().uri().toString(), _)) + } + .map { case (symbol, locs) => + PcReferencesResult(symbol, locs.flatten.asJava) + } + .toList + case _ => Nil +end PcReferencesProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala index 94482767f917..666ccf9c614f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala @@ -16,7 +16,7 @@ final class PcRenameProvider( driver: InteractiveDriver, params: OffsetParams, name: Option[String] -) extends PcCollector[l.TextEdit](driver, params): +) extends WithSymbolSearchCollector[l.TextEdit](driver, params): private val forbiddenMethods = Set("equals", "hashCode", "unapply", "unary_!", "!") def canRenameSymbol(sym: Symbol)(using Context): Boolean = @@ -25,7 +25,7 @@ final class PcRenameProvider( || sym.source.path.isWorksheet) def prepareRename(): Option[l.Range] = - soughtSymbols(path).flatMap((symbols, pos) => + soughtSymbols.flatMap((symbols, pos) => if symbols.forall(canRenameSymbol) then Some(pos.toLsp) else None ) @@ -42,13 +42,10 @@ final class PcRenameProvider( ) end collect - def rename( - ): List[l.TextEdit] = - val (symbols, _) = soughtSymbols(path).getOrElse(Set.empty, pos) + def rename(): List[l.TextEdit] = + val (symbols, _) = soughtSymbols.getOrElse(Set.empty, pos) if symbols.nonEmpty && symbols.forall(canRenameSymbol(_)) - then - val res = result() - res + then result() else Nil end rename end PcRenameProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index a5332f1e4ff6..216d9318197b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -60,7 +60,7 @@ final class PcSemanticTokensProvider( case _ => !df.rhs.isEmpty case _ => false - object Collector extends PcCollector[Option[Node]](driver, params): + object Collector extends SimpleCollector[Option[Node]](driver, params): override def collect( parent: Option[Tree] )(tree: Tree | EndMarker, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] = diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala new file mode 100644 index 000000000000..fd3d74f16c16 --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala @@ -0,0 +1,275 @@ +package dotty.tools.pc + +import dotty.tools.pc.PcSymbolSearch.* + +import dotty.tools.dotc.ast.NavigateAST +import dotty.tools.dotc.ast.Positioned +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.ast.untpd +import dotty.tools.dotc.ast.untpd.ExtMethods +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Names.* +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.util.SourcePosition +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.pc.utils.InteractiveEnrichments.* + +trait PcSymbolSearch: + self: WithCompilationUnit => + + private val caseClassSynthetics: Set[Name] = Set(nme.apply, nme.copy) + + lazy val rawPath = + Interactive + .pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) + .dropWhile(t => // NamedArg anyway doesn't have symbol + t.symbol == NoSymbol && !t.isInstanceOf[NamedArg] || + // same issue https://github.com/lampepfl/dotty/issues/15937 as below + t.isInstanceOf[TypeTree] + ) + + lazy val extensionMethods = + NavigateAST + .untypedPath(pos.span)(using compilatonUnitContext) + .collectFirst { case em @ ExtMethods(_, _) => em } + + lazy val path = rawPath match + // For type it will sometimes go into the wrong tree since TypeTree also contains the same span + // https://github.com/lampepfl/dotty/issues/15937 + case TypeApply(sel: Select, _) :: tail if sel.span.contains(pos.span) => + Interactive.pathTo(sel, pos.span) ::: rawPath + case _ => rawPath + + lazy val soughtSymbols: Option[(Set[Symbol], SourcePosition)] = + soughtSymbols(path) + + def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = + val sought = path match + /* reference of an extension paramter + * extension [EF](<>: List[EF]) + * def double(ys: List[EF]) = <> ++ ys + */ + case (id: Ident) :: _ + if id.symbol + .is(Flags.Param) && id.symbol.owner.is(Flags.ExtensionMethod) => + Some(findAllExtensionParamSymbols(id.sourcePos, id.name, id.symbol)) + /** + * Workaround for missing symbol in: + * class A[T](a: T) + * val x = new <>(1) + */ + case t :: (n: New) :: (sel: Select) :: _ + if t.symbol == NoSymbol && sel.symbol.isConstructor => + Some(symbolAlternatives(sel.symbol.owner), namePos(t)) + /** + * Workaround for missing symbol in: + * class A[T](a: T) + * val x = <>[Int](1) + */ + case (sel @ Select(New(t), _)) :: (_: TypeApply) :: _ + if sel.symbol.isConstructor => + Some(symbolAlternatives(sel.symbol.owner), namePos(t)) + /* simple identifier: + * val a = val@@ue + value + */ + case (id: Ident) :: _ => + Some(symbolAlternatives(id.symbol), id.sourcePos) + /* simple selector: + * object.val@@ue + */ + case (sel: Select) :: _ if selectNameSpan(sel).contains(pos.span) => + Some(symbolAlternatives(sel.symbol), pos.withSpan(sel.nameSpan)) + /* named argument: + * foo(nam@@e = "123") + */ + case (arg: NamedArg) :: (appl: Apply) :: _ => + val realName = arg.name.stripModuleClassSuffix.lastPart + if pos.span.start > arg.span.start && pos.span.end < arg.span.point + realName.length + then + val length = realName.toString.backticked.length() + val pos = arg.sourcePos.withSpan( + arg.span + .withEnd(arg.span.start + length) + .withPoint(arg.span.start) + ) + appl.symbol.paramSymss.flatten.find(_.name == arg.name).map { s => + // if it's a case class we need to look for parameters also + if caseClassSynthetics(s.owner.name) && s.owner.is(Flags.Synthetic) + then + ( + Set( + s, + s.owner.owner.companion.info.member(s.name).symbol, + s.owner.owner.info.member(s.name).symbol + ) + .filter(_ != NoSymbol), + pos, + ) + else (Set(s), pos) + } + else None + end if + /* all definitions: + * def fo@@o = ??? + * class Fo@@o = ??? + * etc. + */ + case (df: NamedDefTree) :: _ + if df.nameSpan.contains(pos.span) && !isGeneratedGiven(df, sourceText) => + Some(symbolAlternatives(df.symbol), pos.withSpan(df.nameSpan)) + /* enum cases with params + * enum Foo: + * case B@@ar[A](i: A) + */ + case (df: NamedDefTree) :: Template(_, _, self, _) :: _ + if (df.name == nme.apply || df.name == nme.unapply) && df.nameSpan.isZeroExtent => + Some(symbolAlternatives(self.tpt.symbol), self.sourcePos) + /** + * For traversing annotations: + * @JsonNo@@tification("") + * def params() = ??? + */ + case (df: MemberDef) :: _ if df.span.contains(pos.span) => + val annotTree = df.mods.annotations.find { t => + t.span.contains(pos.span) + } + collectTrees(annotTree).flatMap { t => + soughtSymbols( + Interactive.pathTo(t, pos.span) + ) + }.headOption + + /* Import selectors: + * import scala.util.Tr@@y + */ + case (imp: Import) :: _ if imp.span.contains(pos.span) => + imp + .selector(pos.span) + .map(sym => (symbolAlternatives(sym), sym.sourcePos)) + + case _ => None + + sought match + case None => seekInExtensionParameters() + case _ => sought + + end soughtSymbols + + private def seekInExtensionParameters() = + def collectParams( + extMethods: ExtMethods + ): Option[ExtensionParamOccurence] = + NavigateAST + .pathTo(pos.span, extMethods.paramss.flatten)(using + compilatonUnitContext + ) + .collectFirst { + case v: untpd.ValOrTypeDef => + ExtensionParamOccurence( + v.name, + v.namePos, + v.symbol, + extMethods.methods + ) + case i: untpd.Ident => + ExtensionParamOccurence( + i.name, + i.sourcePos, + i.symbol, + extMethods.methods + ) + } + + for + extensionMethodScope <- extensionMethods + occurrence <- collectParams(extensionMethodScope) + symbols <- collectAllExtensionParamSymbols( + path.headOption.getOrElse(unit.tpdTree), + occurrence + ) + yield symbols + end seekInExtensionParameters + + private def collectAllExtensionParamSymbols( + tree: tpd.Tree, + occurrence: ExtensionParamOccurence, + ): Option[(Set[Symbol], SourcePosition)] = + occurrence match + case ExtensionParamOccurence(_, namePos, symbol, _) + if symbol != NoSymbol && !symbol.isError && !symbol.owner.is( + Flags.ExtensionMethod + ) => + Some((symbolAlternatives(symbol), namePos)) + case ExtensionParamOccurence(name, namePos, _, methods) => + val symbols = + for + method <- methods.toSet + symbol <- + Interactive.pathTo(tree, method.span) match + case (d: DefDef) :: _ => + d.paramss.flatten.collect { + case param if param.name.decoded == name.decoded => + param.symbol + } + case _ => Set.empty[Symbol] + if (symbol != NoSymbol && !symbol.isError) + withAlt <- symbolAlternatives(symbol) + yield withAlt + if symbols.nonEmpty then Some((symbols, namePos)) else None + end collectAllExtensionParamSymbols + + private def findAllExtensionParamSymbols( + pos: SourcePosition, + name: Name, + sym: Symbol, + ) = + val symbols = + for + methods <- extensionMethods.map(_.methods) + symbols <- collectAllExtensionParamSymbols( + unit.tpdTree, + ExtensionParamOccurence(name, pos, sym, methods), + ) + yield symbols + symbols.getOrElse((symbolAlternatives(sym), pos)) + end findAllExtensionParamSymbols +end PcSymbolSearch + +object PcSymbolSearch: + // NOTE: Connected to https://github.com/lampepfl/dotty/issues/16771 + // `sel.nameSpan` is calculated incorrectly in (1 + 2).toString + // See test DocumentHighlightSuite.select-parentheses + def selectNameSpan(sel: Select): Span = + val span = sel.span + if span.exists then + val point = span.point + if sel.name.toTermName == nme.ERROR then Span(point) + else if sel.qualifier.span.start > span.point then // right associative + val realName = sel.name.stripModuleClassSuffix.lastPart + Span(span.start, span.start + realName.length, point) + else Span(point, span.end, point) + else span + + def collectTrees(trees: Iterable[Positioned]): Iterable[Tree] = + trees.collect { case t: Tree => t } + + def namePos(tree: Tree)(using Context): SourcePosition = + tree match + case sel: Select => sel.sourcePos.withSpan(selectNameSpan(sel)) + case _ => tree.sourcePos + + def isGeneratedGiven(df: NamedDefTree, sourceText: String)(using Context) = + val nameSpan = df.nameSpan + df.symbol.is(Flags.Given) && sourceText.substring( + nameSpan.start, + nameSpan.end, + ) != df.name.toString() + +end PcSymbolSearch + diff --git a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala index ef5aaf4e5ed0..1443fbcf37cc 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala @@ -8,13 +8,14 @@ import scala.meta.internal.pc.CompilerAccess import scala.meta.pc.PresentationCompilerConfig import dotty.tools.dotc.reporting.StoreReporter +import dotty.tools.dotc.interactive.InteractiveDriver class Scala3CompilerAccess( config: PresentationCompilerConfig, sh: Option[ScheduledExecutorService], newCompiler: () => Scala3CompilerWrapper )(using ec: ExecutionContextExecutor, rc: ReportContext) - extends CompilerAccess[StoreReporter, MetalsDriver]( + extends CompilerAccess[StoreReporter, InteractiveDriver]( config, sh, newCompiler, diff --git a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala index de4fb282edc9..968c144625a3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerWrapper.scala @@ -4,11 +4,12 @@ import scala.meta.internal.pc.CompilerWrapper import scala.meta.internal.pc.ReporterAccess import dotty.tools.dotc.reporting.StoreReporter +import dotty.tools.dotc.interactive.InteractiveDriver -class Scala3CompilerWrapper(driver: MetalsDriver) - extends CompilerWrapper[StoreReporter, MetalsDriver]: +class Scala3CompilerWrapper(driver: InteractiveDriver) + extends CompilerWrapper[StoreReporter, InteractiveDriver]: - override def compiler(): MetalsDriver = driver + override def compiler(): InteractiveDriver = driver override def resetReporter(): Unit = val ctx = driver.currentCtx diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 86aa895cb4fc..218d92c38ffa 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -30,13 +30,16 @@ import scala.meta.pc.{PcSymbolInformation as IPcSymbolInformation} import dotty.tools.dotc.reporting.StoreReporter import dotty.tools.pc.completions.CompletionProvider +import dotty.tools.pc.InferExpectedType import dotty.tools.pc.completions.OverrideCompletions import dotty.tools.pc.buildinfo.BuildInfo +import dotty.tools.pc.SymbolInformationProvider +import dotty.tools.dotc.interactive.InteractiveDriver import org.eclipse.lsp4j.DocumentHighlight import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l -import scala.meta.internal.pc.SymbolInformationProvider + case class ScalaPresentationCompiler( buildTargetIdentifier: String = "", @@ -48,7 +51,8 @@ case class ScalaPresentationCompiler( sh: Option[ScheduledExecutorService] = None, config: PresentationCompilerConfig = PresentationCompilerConfigImpl(), folderPath: Option[Path] = None, - reportsLevel: ReportLevel = ReportLevel.Info + reportsLevel: ReportLevel = ReportLevel.Info, + completionItemPriority: CompletionItemPriority = (_: String) => 0, ) extends PresentationCompiler: def this() = this("", None, Nil, Nil) @@ -56,27 +60,38 @@ case class ScalaPresentationCompiler( val scalaVersion = BuildInfo.scalaVersion private val forbiddenOptions = Set("-print-lines", "-print-tasty") - private val forbiddenDoubleOptions = Set("-release") + private val forbiddenDoubleOptions = Set.empty[String] given ReportContext = folderPath .map(StdReportContext(_, _ => buildTargetName, reportsLevel)) .getOrElse(EmptyReportContext) + override def withCompletionItemPriority( + priority: CompletionItemPriority + ): PresentationCompiler = + copy(completionItemPriority = priority) + override def withBuildTargetName(buildTargetName: String) = copy(buildTargetName = Some(buildTargetName)) override def withReportsLoggerLevel(level: String): PresentationCompiler = copy(reportsLevel = ReportLevel.fromString(level)) - val compilerAccess: CompilerAccess[StoreReporter, MetalsDriver] = + val compilerAccess: CompilerAccess[StoreReporter, InteractiveDriver] = Scala3CompilerAccess( config, sh, - () => new Scala3CompilerWrapper(newDriver) - )(using - ec - ) + () => new Scala3CompilerWrapper(CachingDriver(driverSettings)) + )(using ec) + + val driverSettings = + val implicitSuggestionTimeout = List("-Ximport-suggestion-timeout", "0") + val defaultFlags = List("-color:never") + val filteredOptions = removeDoubleOptions(options.filterNot(forbiddenOptions)) + + filteredOptions ::: defaultFlags ::: implicitSuggestionTimeout ::: "-classpath" :: classpath + .mkString(File.pathSeparator) :: Nil private def removeDoubleOptions(options: List[String]): List[String] = options match @@ -85,19 +100,6 @@ case class ScalaPresentationCompiler( case head :: tail => head :: removeDoubleOptions(tail) case Nil => options - def newDriver: MetalsDriver = - val implicitSuggestionTimeout = List("-Ximport-suggestion-timeout", "0") - val defaultFlags = List("-color:never") - val filteredOptions = removeDoubleOptions( - options.filterNot(forbiddenOptions) - ) - val settings = - filteredOptions ::: defaultFlags ::: implicitSuggestionTimeout ::: "-classpath" :: classpath - .mkString( - File.pathSeparator - ) :: Nil - new MetalsDriver(settings) - override def semanticTokens( params: VirtualFileParams ): CompletableFuture[ju.List[Node]] = @@ -139,10 +141,12 @@ case class ScalaPresentationCompiler( new CompletionProvider( search, driver, + () => InteractiveDriver(driverSettings), params, config, buildTargetIdentifier, - folderPath + folderPath, + completionItemPriority ).completions() } @@ -178,6 +182,28 @@ case class ScalaPresentationCompiler( PcDocumentHighlightProvider(driver, params).highlights.asJava } + override def references( + params: ReferencesRequest + ): CompletableFuture[ju.List[ReferencesResult]] = + compilerAccess.withNonInterruptableCompiler(Some(params.file()))( + List.empty[ReferencesResult].asJava, + params.file().token, + ) { access => + val driver = access.compiler() + PcReferencesProvider(driver, params) + .references() + .asJava + } + + def inferExpectedType(params: OffsetParams): CompletableFuture[ju.Optional[String]] = + compilerAccess.withInterruptableCompiler(Some(params))( + Optional.empty(), + params.token, + ) { access => + val driver = access.compiler() + new InferExpectedType(search, driver, params).infer().asJava + } + def shutdown(): Unit = compilerAccess.shutdown() diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala b/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala index 2bb8023cee08..5a4c135fdc4c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScriptFirstImportPosition.scala @@ -1,6 +1,5 @@ package dotty.tools.pc -import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Comments.Comment object ScriptFirstImportPosition: diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index edfd9c95fa84..bd16d2ce2aa9 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -1,6 +1,5 @@ package dotty.tools.pc -import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.* diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index 0743361f255d..ccda618078b8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -1,13 +1,12 @@ -package scala.meta.internal.pc +package dotty.tools.pc +import scala.collection.mutable import scala.util.control.NonFatal import scala.meta.pc.PcSymbolKind import scala.meta.pc.PcSymbolProperty import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Denotations.Denotation -import dotty.tools.dotc.core.Denotations.MultiDenotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.nme @@ -15,58 +14,20 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.pc.utils.InteractiveEnrichments.deepDealias import dotty.tools.pc.SemanticdbSymbols import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols +import dotty.tools.pc.utils.InteractiveEnrichments.stripBackticks +import scala.meta.internal.pc.PcSymbolInformation +import scala.meta.internal.pc.SymbolInfo +import dotty.tools.dotc.core.Denotations.{Denotation, MultiDenotation} class SymbolInformationProvider(using Context): - private def toSymbols( - pkg: String, - parts: List[(String, Boolean)], - ): List[Symbol] = - def loop( - owners: List[Symbol], - parts: List[(String, Boolean)], - ): List[Symbol] = - parts match - case (head, isClass) :: tl => - val foundSymbols = - owners.flatMap { owner => - val next = - if isClass then owner.info.member(typeName(head)) - else owner.info.member(termName(head)) - next.allSymbols - } - if foundSymbols.nonEmpty then loop(foundSymbols, tl) - else Nil - case Nil => owners - - val pkgSym = - if pkg == "_empty_" then requiredPackage(nme.EMPTY_PACKAGE) - else requiredPackage(pkg) - loop(List(pkgSym), parts) - end toSymbols def info(symbol: String): Option[PcSymbolInformation] = - val index = symbol.lastIndexOf("/") - val pkg = normalizePackage(symbol.take(index + 1)) - - def loop( - symbol: String, - acc: List[(String, Boolean)], - ): List[(String, Boolean)] = - if symbol.isEmpty() then acc.reverse - else - val newSymbol = symbol.takeWhile(c => c != '.' && c != '#') - val rest = symbol.drop(newSymbol.size) - loop(rest.drop(1), (newSymbol, rest.headOption.exists(_ == '#')) :: acc) - val names = - loop(symbol.drop(index + 1).takeWhile(_ != '('), List.empty) - - val foundSymbols = - try toSymbols(pkg, names) - catch case NonFatal(e) => Nil + val foundSymbols = SymbolProvider.compilerSymbols(symbol) val (searchedSymbol, alternativeSymbols) = - foundSymbols.partition: compilerSymbol => + foundSymbols.partition(compilerSymbol => SemanticdbSymbols.symbolName(compilerSymbol) == symbol + ) searchedSymbol match case Nil => None @@ -76,11 +37,25 @@ class SymbolInformationProvider(using Context): if classSym.isClass then classSym.asClass.parentSyms.map(SemanticdbSymbols.symbolName) else Nil + val allParents = + val visited = mutable.Set[Symbol]() + def collect(sym: Symbol): Unit = { + visited += sym + if sym.isClass + then sym.asClass.parentSyms.foreach { + case parent if !visited(parent) => + collect(parent) + case _ => + } + } + collect(classSym) + visited.toList.map(SemanticdbSymbols.symbolName) val dealisedSymbol = if sym.isAliasType then sym.info.deepDealias.typeSymbol else sym val classOwner = sym.ownersIterator.drop(1).find(s => s.isClass || s.is(Flags.Module)) val overridden = sym.denot.allOverriddenSymbols.toList + val memberDefAnnots = sym.info.membersBasedOnFlags(Flags.Method, Flags.EmptyFlags).flatMap(_.allSymbols).flatMap(_.denot.annotations) val pcSymbolInformation = PcSymbolInformation( @@ -95,6 +70,9 @@ class SymbolInformationProvider(using Context): properties = if sym.is(Flags.Abstract) then List(PcSymbolProperty.ABSTRACT) else Nil, + recursiveParents = allParents, + annotations = sym.denot.annotations.map(_.symbol.showFullName), + memberDefsAnnotations = memberDefAnnots.map(_.symbol.showFullName).toList ) Some(pcSymbolInformation) @@ -115,8 +93,50 @@ class SymbolInformationProvider(using Context): else if sym.is(Flags.TypeParam) then PcSymbolKind.TYPE_PARAMETER else if sym.isType then PcSymbolKind.TYPE else PcSymbolKind.UNKNOWN_KIND +end SymbolInformationProvider + +object SymbolProvider: + + def compilerSymbol(symbol: String)(using Context): Option[Symbol] = + compilerSymbols(symbol).find(sym => SemanticdbSymbols.symbolName(sym) == symbol) + + def compilerSymbols(symbol: String)(using Context): List[Symbol] = + try toSymbols(SymbolInfo.getPartsFromSymbol(symbol)) + catch case NonFatal(e) => Nil private def normalizePackage(pkg: String): String = pkg.replace("/", ".").nn.stripSuffix(".") -end SymbolInformationProvider + private def toSymbols(info: SymbolInfo.SymbolParts)(using Context): List[Symbol] = + def collectSymbols(denotation: Denotation): List[Symbol] = + denotation match + case MultiDenotation(denot1, denot2) => + collectSymbols(denot1) ++ collectSymbols(denot2) + case denot => List(denot.symbol) + + def loop( + owners: List[Symbol], + parts: List[(String, Boolean)], + ): List[Symbol] = + parts match + case (head, isClass) :: tl => + val foundSymbols = + owners.flatMap { owner => + val name = head.stripBackticks + val next = + if isClass then owner.info.member(typeName(name)) + else owner.info.member(termName(name)) + collectSymbols(next).filter(_.exists) + } + if foundSymbols.nonEmpty then loop(foundSymbols, tl) + else Nil + case Nil => owners + + val pkgSym = + if info.packagePart == "_empty_/" then requiredPackage(nme.EMPTY_PACKAGE) + else requiredPackage(normalizePackage(info.packagePart)) + val found = loop(List(pkgSym), info.names) + info.paramName match + case Some(name) => found.flatMap(_.paramSymss.flatten.find(_.showName == name)) + case _ => found + end toSymbols diff --git a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala new file mode 100644 index 000000000000..8110db269b3b --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala @@ -0,0 +1,105 @@ +package dotty.tools.pc + +import scala.language.unsafeNulls + +import java.nio.file.Paths + +import scala.meta as m + +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.OffsetParams +import scala.meta.pc.VirtualFileParams + +import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.NameOps.* +import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourceFile +import dotty.tools.pc.utils.InteractiveEnrichments.* + +class WithCompilationUnit( + val driver: InteractiveDriver, + params: VirtualFileParams, +): + val uri = params.uri() + val filePath = Paths.get(uri) + val sourceText = params.text + val text = sourceText.toCharArray() + val source = + SourceFile.virtual(filePath.toString, sourceText) + driver.run(uri, source) + given ctx: Context = driver.currentCtx + + private val run = driver.currentCtx.run + val unit = run.units.head + val compilatonUnitContext = ctx.fresh.setCompilationUnit(unit) + val offset = params match + case op: OffsetParams => op.offset() + case _ => 0 + val offsetParams = + params match + case op: OffsetParams => op + case _ => + CompilerOffsetParams(params.uri(), params.text(), 0, params.token()) + val pos = driver.sourcePosition(offsetParams) + + // First identify the symbol we are at, comments identify @@ as current cursor position + def symbolAlternatives(sym: Symbol)(using Context) = + def member(parent: Symbol) = parent.info.member(sym.name).symbol + def primaryConstructorTypeParam(owner: Symbol) = + for + typeParams <- owner.primaryConstructor.paramSymss.headOption + param <- typeParams.find(_.name == sym.name) + if (param.isType) + yield param + def additionalForEnumTypeParam(enumClass: Symbol) = + if enumClass.is(Flags.Enum) then + val enumOwner = + if enumClass.is(Flags.Case) + then + // we check that the type parameter is the one from enum class + // and not an enum case type parameter with the same name + Option.when(member(enumClass).is(Flags.Synthetic))( + enumClass.maybeOwner.companionClass + ) + else Some(enumClass) + enumOwner.toSet.flatMap { enumOwner => + val symsInEnumCases = enumOwner.children.toSet.flatMap(enumCase => + if member(enumCase).is(Flags.Synthetic) + then primaryConstructorTypeParam(enumCase) + else None + ) + val symsInEnumOwner = + primaryConstructorTypeParam(enumOwner).toSet + member(enumOwner) + symsInEnumCases ++ symsInEnumOwner + } + else Set.empty + val all = + if sym.is(Flags.ModuleClass) then + Set(sym, sym.companionModule, sym.companionModule.companion) + else if sym.isClass then + Set(sym, sym.companionModule, sym.companion.moduleClass) + else if sym.is(Flags.Module) then + Set(sym, sym.companionClass, sym.moduleClass) + else if sym.isTerm && (sym.owner.isClass || sym.owner.isConstructor) + then + val info = + if sym.owner.isClass then sym.owner.info else sym.owner.owner.info + Set( + sym, + info.member(sym.asTerm.name.setterName).symbol, + info.member(sym.asTerm.name.getterName).symbol, + ) ++ sym.allOverriddenSymbols.toSet + // type used in primary constructor will not match the one used in the class + else if sym.isTypeParam && sym.owner.isPrimaryConstructor then + Set(sym, member(sym.maybeOwner.maybeOwner)) + ++ additionalForEnumTypeParam(sym.maybeOwner.maybeOwner) + else if sym.isTypeParam then + primaryConstructorTypeParam(sym.maybeOwner).toSet + ++ additionalForEnumTypeParam(sym.maybeOwner) + sym + else Set(sym) + all.filter(s => s != NoSymbol && !s.isError) + end symbolAlternatives + +end WithCompilationUnit diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index ad571ff843c3..6d89cb663b9c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -22,7 +22,8 @@ case class CompletionPos( identEnd: Int, query: String, originalCursorPosition: SourcePosition, - sourceUri: URI + sourceUri: URI, + withCURSOR: Boolean ): def queryEnd: Int = originalCursorPosition.point def stripSuffixEditRange: l.Range = new l.Range(originalCursorPosition.offsetToPos(queryStart), originalCursorPosition.offsetToPos(identEnd)) @@ -34,17 +35,19 @@ object CompletionPos: def infer( sourcePos: SourcePosition, offsetParams: OffsetParams, - adjustedPath: List[Tree] + adjustedPath: List[Tree], + wasCursorApplied: Boolean )(using Context): CompletionPos = val identEnd = adjustedPath match case (refTree: RefTree) :: _ if refTree.name.toString.contains(Cursor.value) => refTree.span.end - Cursor.value.length + case (refTree: RefTree) :: _ => refTree.span.end case _ => sourcePos.end val query = Completion.completionPrefix(adjustedPath, sourcePos) val start = sourcePos.end - query.length() - CompletionPos(start, identEnd, query.nn, sourcePos, offsetParams.uri.nn) + CompletionPos(start, identEnd, query.nn, sourcePos, offsetParams.uri.nn, wasCursorApplied) /** * Infer the indentation by counting the number of spaces in the given line. diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 9cd98de33141..adaeadb12978 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -14,9 +14,14 @@ import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Phases -import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.Names.DerivedName import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.parsing.Tokens +import dotty.tools.dotc.profile.Profiler import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator @@ -32,36 +37,79 @@ import org.eclipse.lsp4j.InsertTextFormat import org.eclipse.lsp4j.InsertTextMode import org.eclipse.lsp4j.Range as LspRange import org.eclipse.lsp4j.TextEdit +import scala.meta.pc.CompletionItemPriority + +object CompletionProvider: + val allKeywords = + val softKeywords = Tokens.softModifierNames + nme.as + nme.derives + nme.extension + nme.throws + nme.using + Tokens.keywords.toList.map(Tokens.tokenString) ++ softKeywords.map(_.toString) class CompletionProvider( search: SymbolSearch, - driver: InteractiveDriver, + cachingDriver: InteractiveDriver, + freshDriver: () => InteractiveDriver, params: OffsetParams, config: PresentationCompilerConfig, buildTargetIdentifier: String, - folderPath: Option[Path] + folderPath: Option[Path], + referenceCounter: CompletionItemPriority )(using reports: ReportContext): def completions(): CompletionList = val uri = params.uri().nn val text = params.text().nn - val code = applyCompletionCursor(params) + val (wasCursorApplied, code) = applyCompletionCursor(params) val sourceFile = SourceFile.virtual(uri, code) + + /** Creating a new fresh driver is way slower than reusing existing one, + * but runnig a compilation has side effects that modifies the state of the driver. + * We don't want to affect cachingDriver state with compilation including "CURSOR" suffix. + * + * We could in theory save this fresh driver for reuse, but it is a choice between extra memory usage and speed. + * The scenario in which "CURSOR" is applied (empty query or query equal to any keyword) has a slim chance of happening. + */ + + val driver = if wasCursorApplied then freshDriver() else cachingDriver driver.run(uri, sourceFile) - val ctx = driver.currentCtx + given ctx: Context = driver.currentCtx val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => + val newctx = ctx.fresh + .setCompilationUnit(unit) + .setProfiler(Profiler()(using ctx)) + .withPhase(Phases.typerPhase(using ctx)) + val tpdPath0 = Interactive.pathTo(unit.tpdTree, pos.span)(using newctx) + val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx) + + val tpdPath = tpdPath0 match + case Select(qual, name) :: tail + /** If for any reason we end up in param after lifting, we want to inline the synthetic val: + * List(1).iterator.sliding@@ will be transformed into: + * + * 1| val $1$: Iterator[Int] = List.apply[Int]([1 : Int]*).iterator + * 2| { + * 3| def $anonfun(size: Int, step: Int): $1$.GroupedIterator[Int] = + * 4| $1$.sliding[Int](size, step) + * 5| closure($anonfun) + * 6| }:((Int, Int) => Iterator[Int]#GroupedIterator[Int]) + * + * With completion being run at line 4 at @@: + * 4| $1$.sliding@@[Int](size, step) + * + */ + if qual.symbol.is(Flags.Synthetic) && qual.symbol.name.isInstanceOf[DerivedName] => + qual.symbol.defTree match + case valdef: ValDef => Select(valdef.rhs, name) :: tail + case _ => tpdPath0 + case _ => tpdPath0 - val newctx = ctx.fresh.setCompilationUnit(unit).withPhase(Phases.typerPhase(using ctx)) - val tpdPath = Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using newctx) - val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath, pos)(using newctx) val locatedCtx = Interactive.contextOfPath(tpdPath)(using newctx) val indexedCtx = IndexedContext(locatedCtx) - val completionPos = CompletionPos.infer(pos, params, adjustedPath)(using locatedCtx) + val completionPos = CompletionPos.infer(pos, params, adjustedPath, wasCursorApplied)(using locatedCtx) val autoImportsGen = AutoImports.generator( completionPos.toSourcePosition, @@ -86,7 +134,8 @@ class CompletionProvider( folderPath, autoImportsGen, unit.comments, - driver.settings + driver.settings, + referenceCounter ).completions() val items = completions.zipWithIndex.map { case (item, idx) => @@ -123,23 +172,30 @@ class CompletionProvider( * Otherwise, completion poisition doesn't point at any tree * because scala parser trim end position to the last statement pos. */ - private def applyCompletionCursor(params: OffsetParams): String = + private def applyCompletionCursor(params: OffsetParams): (Boolean, String) = val text = params.text().nn val offset = params.offset().nn + val query = Completion.naiveCompletionPrefix(text, offset) - val isStartMultilineComment = - val i = params.offset() - i >= 3 && (text.charAt(i - 1) match - case '*' => - text.charAt(i - 2) == '*' && - text.charAt(i - 3) == '/' - case _ => false - ) - if isStartMultilineComment then - // Insert potentially missing `*/` to avoid comment out all codes after the "/**". - text.substring(0, offset).nn + Cursor.value + "*/" + text.substring(offset) + if offset > 0 && text.charAt(offset - 1).isUnicodeIdentifierPart + && !CompletionProvider.allKeywords.contains(query) then false -> text else - text.substring(0, offset).nn + Cursor.value + text.substring(offset) + val isStartMultilineComment = + + val i = params.offset() + i >= 3 && (text.charAt(i - 1) match + case '*' => + text.charAt(i - 2) == '*' && + text.charAt(i - 3) == '/' + case _ => false + ) + true -> ( + if isStartMultilineComment then + // Insert potentially missing `*/` to avoid comment out all codes after the "/**". + text.substring(0, offset).nn + Cursor.value + "*/" + text.substring(offset) + else + text.substring(0, offset).nn + Cursor.value + text.substring(offset) + ) end applyCompletionCursor private def completionItems( @@ -172,7 +228,7 @@ class CompletionProvider( Select(Apply(Select(Select(_, name), _), _), _), _ ) :: _ => - name == StdNames.nme.StringContext + name == nme.StringContext // "My name is $name" case Literal(Constant(_: String)) :: _ => true diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 9071b2cd2a23..90b285bffb3a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -101,13 +101,13 @@ object CompletionValue: )(using Context): String = if symbol.isConstructor then s"${snippetAffix.toPrefix}${label}${description(printer)}" else if symbol.is(Method) then s"${label}${description(printer)}" - else if symbol.is(Mutable) then s"$label: ${description(printer)}" + else if symbol.is(Mutable) then s"$label${description(printer)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then s"${labelWithSuffix(printer)}${description(printer)}" else if symbol.isType then labelWithSuffix(printer) else if symbol.isTerm && symbol.info.typeSymbol.is(Module) then s"${label}${description(printer)}" - else s"$label: ${description(printer)}" + else s"$label${description(printer)}" protected def labelWithSuffix(printer: ShortenedTypePrinter)(using Context): String = if snippetAffix.addLabelSnippet @@ -119,7 +119,10 @@ object CompletionValue: else label override def description(printer: ShortenedTypePrinter)(using Context): String = - printer.completionSymbol(denotation) + def info = denotation.info.widenTermRefExpr + val isVal = !(symbol.is(Module) || symbol.is(Method) || symbol.isType || info.typeSymbol.is(Module)) + val prefix = if isVal then ": " else "" + prefix ++ printer.completionSymbol(denotation) end Symbolic @@ -178,9 +181,10 @@ object CompletionValue: override def completionItemDataKind: Integer = CompletionSource.WorkspaceKind.ordinal override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = + def isMethodOrValue = !(symbol.isType || symbol.is(Module)) if symbol.isConstructor || symbol.name == nme.apply then s"${snippetAffix.toPrefix}${label}${description(printer)} - ${printer.fullNameString(importSymbol.effectiveOwner)}" - else if symbol.is(Method) then + else if isMethodOrValue then s"${labelWithSuffix(printer)} - ${printer.fullNameString(symbol.effectiveOwner)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then s"${labelWithSuffix(printer)} -${description(printer)}" @@ -199,7 +203,7 @@ object CompletionValue: CompletionItemKind.Method override def completionItemDataKind: Integer = CompletionSource.ImplicitClassKind.ordinal override def description(printer: ShortenedTypePrinter)(using Context): String = - s"${printer.completionSymbol(denotation)} (implicit)" + s"${super.description(printer)} (implicit)" /** * CompletionValue for extension methods via SymbolSearch @@ -339,6 +343,9 @@ object CompletionValue: override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = label + + override def description(printer: ShortenedTypePrinter)(using Context): String = + printer.completionSymbol(denotation) end CaseKeyword case class Document(label: String, doc: String, description: String) @@ -354,6 +361,15 @@ object CompletionValue: description override def insertMode: Option[InsertTextMode] = Some(InsertTextMode.AsIs) + case class SingletonValue(label: String, info: Type, override val range: Option[Range]) + extends CompletionValue: + override def insertText: Option[String] = Some(label) + override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = + s"$label: ${printer.tpe(info)}" + + override def completionItemKind(using Context): CompletionItemKind = + CompletionItemKind.Constant + def namedArg(label: String, sym: ParamSymbol)(using Context ): CompletionValue = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index fb39102399ba..05dbe1ef5a43 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -5,7 +5,6 @@ import java.nio.file.Path import java.nio.file.Paths import scala.collection.mutable -import scala.meta.internal.metals.Fuzzy import scala.meta.internal.metals.ReportContext import scala.meta.internal.mtags.CoursierComplete import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering, CompletionFuzzy} @@ -27,15 +26,12 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.Completion.Mode -import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.SrcPos import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.buildinfo.BuildInfo import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.utils.InteractiveEnrichments.* -import dotty.tools.dotc.core.Denotations.SingleDenotation -import dotty.tools.dotc.interactive.Interactive class Completions( text: String, @@ -50,7 +46,8 @@ class Completions( workspace: Option[Path], autoImports: AutoImportsGenerator, comments: List[Comment], - options: List[String] + options: List[String], + completionItemPriority: CompletionItemPriority )(using ReportContext): given context: Context = ctx @@ -60,18 +57,25 @@ class Completions( private lazy val shouldAddSnippet = path match - /* In case of `method@@()` we should not add snippets and the path - * will contain apply as the parent of the current tree. - */ - case (fun) :: (appl: GenericApply) :: _ if appl.fun == fun => - false - case _ :: (withcursor @ Select(fun, name)) :: (appl: GenericApply) :: _ - if appl.fun == withcursor && name.decoded == Cursor.value => - false case (_: (Import | Export)) :: _ => false case _ :: (_: (Import | Export)) :: _ => false + // UnApply has patterns included in MatchCaseCompletions + case _ :: (_: UnApply) :: _ => false case _ => true + private lazy val shouldAddSuffix = shouldAddSnippet && + (path match + /* In case of `method@@()` we should not add snippets and the path + * will contain apply as the parent of the current tree. + */ + case (fun) :: (appl: GenericApply) :: _ if appl.fun == fun => false + /* In case of `T@@[]` we should not add snippets. + */ + case tpe :: (appl: AppliedTypeTree) :: _ if appl.tpt == tpe => false + case sel :: (funSel @ Select(fun, name)) :: (appl: GenericApply) :: _ + if appl.fun == funSel && sel == fun => false + case _ => true) + private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) def includeSymbol(sym: Symbol)(using Context): Boolean = @@ -192,12 +196,12 @@ class Completions( private def findSuffix(symbol: Symbol): CompletionAffix = CompletionAffix.empty .chain { suffix => // for [] suffix - if shouldAddSnippet && symbol.info.typeParams.nonEmpty then + if shouldAddSuffix && symbol.info.typeParams.nonEmpty then suffix.withNewSuffixSnippet(Affix(SuffixKind.Bracket)) else suffix } .chain { suffix => // for () suffix - if shouldAddSnippet && symbol.is(Flags.Method) then + if shouldAddSuffix && symbol.is(Flags.Method) then val paramss = getParams(symbol) paramss match case Nil => suffix @@ -218,7 +222,7 @@ class Completions( else suffix } .chain { suffix => // for {} suffix - if shouldAddSnippet && isNew && isAbstractType(symbol) then + if shouldAddSuffix && isNew && isAbstractType(symbol) then if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Template)) else suffix.withNewSuffixSnippet(Affix(SuffixKind.Template)) else suffix @@ -271,7 +275,6 @@ class Completions( val affix = if methodDenot.symbol.isConstructor && existsApply then adjustedPath match case (select @ Select(qual, _)) :: _ => - val start = qual.span.start val insertRange = select.sourcePos.startPos.withEnd(completionPos.queryEnd).toLsp suffix @@ -317,7 +320,7 @@ class Completions( val ScalaCliCompletions = new ScalaCliCompletions(coursierComplete, pos, text) - path match + val (advanced, exclusive) = path match case ScalaCliCompletions(dependency) => (ScalaCliCompletions.contribute(dependency), true) @@ -405,6 +408,36 @@ class Completions( true, ) + // unapply pattern + case Ident(name) :: (unapp : UnApply) :: _ => + ( + CaseKeywordCompletion.contribute( + EmptyTree, // no selector + completionPos, + indexedContext, + config, + search, + parent = unapp, + autoImports, + patternOnly = Some(name.decoded) + ), + false, + ) + case Select(_, name) :: (unapp : UnApply) :: _ => + ( + CaseKeywordCompletion.contribute( + EmptyTree, // no selector + completionPos, + indexedContext, + config, + search, + parent = unapp, + autoImports, + patternOnly = Some(name.decoded) + ), + false, + ) + // class FooImpl extends Foo: // def x| case OverrideExtractor(td, completing, start, exhaustive, fallbackName) => @@ -479,21 +512,18 @@ class Completions( if tree.selectors.exists(_.renamed.sourcePos.contains(pos)) => (List.empty, true) - // From Scala 3.1.3-RC3 (as far as I know), path contains - // `Literal(Constant(null))` on head for an incomplete program, in this case, just ignore the head. - case Literal(Constant(null)) :: tl => - advancedCompletions(tl, completionPos) - case _ => val args = NamedArgCompletions.contribute( - pos, path, adjustedPath, indexedContext, config.isCompletionSnippetsEnabled() ) (args, false) - end match + val singletonCompletions = InterCompletionType.inferType(path).map( + SingletonCompletions.contribute(path, _, completionPos) + ).getOrElse(Nil) + (singletonCompletions ++ advanced, exclusive) end advancedCompletions private def isAmmoniteCompletionPosition( @@ -627,7 +657,7 @@ class Completions( .collect { case symbolic: CompletionValue.Symbolic => symbolic } .groupBy(_.symbol.fullName) // we somehow have to ignore proxy type - val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, denots) => + val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, _) => lazy val existsTypeWithoutSuffix: Boolean = !symbolicCompletionsMap .get(name.toTypeName) .forall(_.forall(sym => sym.snippetAffix.suffixes.nonEmpty)) @@ -672,6 +702,7 @@ class Completions( case fileSysMember: CompletionValue.FileSystemMember => (fileSysMember.label, true) case ii: CompletionValue.IvyImport => (ii.label, true) + case sv: CompletionValue.SingletonValue => (sv.label, true) if !alreadySeen(id) && include then alreadySeen += id @@ -864,6 +895,20 @@ class Completions( else 0 end compareLocalSymbols + private def workspaceMemberPriority(symbol: Symbol): Int = + completionItemPriority + .workspaceMemberPriority( + SemanticdbSymbols.symbolName(symbol), + ).nn + + def compareFrequency(o1: CompletionValue, o2: CompletionValue): Int = + (o1, o2) match + case (w1: CompletionValue.Workspace, w2: CompletionValue.Workspace) => + workspaceMemberPriority(w1.symbol) + .compareTo(workspaceMemberPriority(w2.symbol)) + case _ => 0 + end compareFrequency + def compareByRelevance(o1: CompletionValue, o2: CompletionValue): Int = Integer.compare( computeRelevancePenalty(o1, application), @@ -879,38 +924,19 @@ class Completions( else 2 } ) - - /** - * This one is used for the following case: - * ```scala - * def foo(argument: Int): Int = ??? - * val argument = 42 - * foo(arg@@) // completions should be ordered as : - * // - argument (local val) - actual value comes first - * // - argument = ... (named arg) - named arg after - * // - ... all other options - * ``` - */ - def compareInApplyParams(o1: CompletionValue, o2: CompletionValue): Int = + def prioritizeByClass(o1: CompletionValue, o2: CompletionValue): Int = def priority(v: CompletionValue): Int = v match - case _: CompletionValue.Compiler => 0 - case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 0 - case _ => 1 + case _: CompletionValue.SingletonValue => 0 + case _: CompletionValue.Compiler => 1 + case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 1 + case _: CompletionValue.CaseKeyword => 2 + case _: CompletionValue.NamedArg => 3 + case _: CompletionValue.Keyword => 4 + case _ => 5 priority(o1) - priority(o2) - end compareInApplyParams - - def prioritizeKeywords(o1: CompletionValue, o2: CompletionValue): Int = - def priority(v: CompletionValue): Int = - v match - case _: CompletionValue.CaseKeyword => 0 - case _: CompletionValue.NamedArg => 1 - case _: CompletionValue.Keyword => 2 - case _ => 3 - - priority(o1) - priority(o2) - end prioritizeKeywords + end prioritizeByClass /** * Some completion values should be shown first such as CaseKeyword and * NamedArg @@ -992,29 +1018,29 @@ class Completions( ) if byIdentifier != 0 then byIdentifier else - val byOwner = - s1.owner.fullName.toString - .compareTo(s2.owner.fullName.toString) - if byOwner != 0 then byOwner + val byFrequency = compareFrequency(o1, o2) + if byFrequency != 0 then byFrequency else - val byParamCount = Integer.compare( - s1.paramSymss.flatten.size, - s2.paramSymss.flatten.size - ) - if byParamCount != 0 then byParamCount - else s1.detailString.compareTo(s2.detailString) + val byOwner = + s1.owner.fullName.toString + .compareTo(s2.owner.fullName.toString) + if byOwner != 0 then byOwner + else + val byParamCount = Integer.compare( + s1.paramSymss.flatten.size, + s2.paramSymss.flatten.size + ) + if byParamCount != 0 then byParamCount + else s1.detailString.compareTo(s2.detailString) end if end if end if end if end if case _ => - val byApplyParams = compareInApplyParams(o1, o2) - if byApplyParams != 0 then byApplyParams - else - val keywords = prioritizeKeywords(o1, o2) - if keywords != 0 then keywords - else compareByRelevance(o1, o2) + val byClass = prioritizeByClass(o1, o2) + if byClass != 0 then byClass + else compareByRelevance(o1, o2) end compare end Completions diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 2e39c17b24b3..da46e5167834 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -224,7 +224,7 @@ object InterpolatorCompletions: buildTargetIdentifier: String )(using ctx: Context, reportsContext: ReportContext): List[CompletionValue] = val litStartPos = lit.span.start - val litEndPos = lit.span.end - Cursor.value.length() + val litEndPos = lit.span.end - (if completionPos.withCURSOR then Cursor.value.length else 0) val position = completionPos.originalCursorPosition val span = position.span val nameStart = diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 48c6bcfe8317..2efcba48e82d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -27,6 +27,8 @@ import dotty.tools.dotc.core.Types.NoType import dotty.tools.dotc.core.Types.OrType import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeRef +import dotty.tools.dotc.core.Types.AppliedType +import dotty.tools.dotc.typer.Applications.UnapplyArgs import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.AutoImports.SymbolImport @@ -75,10 +77,24 @@ object CaseKeywordCompletion: patternOnly, hasBind ) + val printer = ShortenedTypePrinter(search, IncludeDefaultParam.Never)(using indexedContext) val selTpe = selector match case EmptyTree => parent match + /* Parent is an unapply pattern */ + case UnApply(fn, implicits, patterns) if !fn.tpe.isErroneous => + patternOnly match + case None => None + case Some(value) => + val argPts = UnapplyArgs(fn.tpe.widen.finalResultType, fn, patterns, parent.srcPos).argTypes + patterns.zipWithIndex + .find: + case (Ident(v), tpe) => v.decoded == value + case (Select(_, v), tpe) => v.decoded == value + case t => false + .map((_, id) => argPts(id).widen.deepDealias) + /* Parent is a function expecting a case match expression */ case TreeApply(fun, _) if !fun.tpe.isErroneous => fun.tpe.paramInfoss match case (head :: Nil) :: _ @@ -105,7 +121,8 @@ object CaseKeywordCompletion: if patternOnly.isEmpty then val selectorTpe = selTpe.show val tpeLabel = - if !selectorTpe.contains("x$1") then selectorTpe + if !selectorTpe.contains("x$1") /* selector of a function type? */ then + selectorTpe else selector.symbol.info.show val label = s"case ${tpeLabel} =>" List( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 647b151a635b..dd3a910beb4f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -27,7 +27,6 @@ import dotty.tools.dotc.core.Types.TermRef import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types.TypeBounds import dotty.tools.dotc.core.Types.WildcardType -import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.IndexedContext import dotty.tools.pc.utils.InteractiveEnrichments.* import scala.annotation.tailrec @@ -35,9 +34,8 @@ import scala.annotation.tailrec object NamedArgCompletions: def contribute( - pos: SourcePosition, path: List[Tree], - untypedPath: => List[untpd.Tree], + untypedPath: List[untpd.Tree], indexedContext: IndexedContext, clientSupportsSnippets: Boolean, )(using ctx: Context): List[CompletionValue] = @@ -64,12 +62,13 @@ object NamedArgCompletions: for app <- getApplyForContextFunctionParam(rest) if !app.fun.isInfix - yield contribute( - Some(ident), - app, - indexedContext, - clientSupportsSnippets, - ) + yield + contribute( + Some(ident), + app, + indexedContext, + clientSupportsSnippets, + ) contribution.getOrElse(Nil) case (app: Apply) :: _ => /** @@ -156,10 +155,11 @@ object NamedArgCompletions: case _ => None val matchingMethods = for - (name, indxContext) <- maybeNameAndIndexedContext(method) - potentialMatches <- indxContext.findSymbol(name) - yield potentialMatches.collect { - case m + (name, indexedContext) <- maybeNameAndIndexedContext(method) + potentialMatches <- indexedContext.findSymbol(name) + yield + potentialMatches.collect { + case m if m.is(Flags.Method) && m.vparamss.length >= argss.length && Try(m.isAccessibleFrom(apply.symbol.info)).toOption @@ -170,7 +170,7 @@ object NamedArgCompletions: .zipWithIndex .forall { case (pair, index) => FuzzyArgMatcher(m.tparams) - .doMatch(allArgsProvided = index != 0) + .doMatch(allArgsProvided = index != 0, ident) .tupled(pair) } => m @@ -179,8 +179,7 @@ object NamedArgCompletions: end fallbackFindMatchingMethods val matchingMethods: List[Symbols.Symbol] = - if method.symbol.paramSymss.nonEmpty - then + if method.symbol.paramSymss.nonEmpty then val allArgsAreSupplied = val vparamss = method.symbol.vparamss vparamss.length == argss.length && vparamss @@ -386,12 +385,13 @@ class FuzzyArgMatcher(tparams: List[Symbols.Symbol])(using Context): * We check the args types not the result type. */ def doMatch( - allArgsProvided: Boolean + allArgsProvided: Boolean, + ident: Option[Ident] )(expectedArgs: List[Symbols.Symbol], actualArgs: List[Tree]) = (expectedArgs.length == actualArgs.length || (!allArgsProvided && expectedArgs.length >= actualArgs.length)) && actualArgs.zipWithIndex.forall { - case (Ident(name), _) if name.endsWith(Cursor.value) => true + case (arg: Ident, _) if ident.contains(arg) => true case (NamedArg(name, arg), _) => expectedArgs.exists { expected => expected.name == name && (!arg.hasType || arg.typeOpt.unfold diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index df0bb70b596c..f5c15ca6df0e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -279,7 +279,14 @@ object OverrideCompletions: else "" (indent, indent, lastIndent) end calcIndent - val abstractMembers = defn.typeOpt.abstractTermMembers.map(_.symbol) + val abstractMembers = + defn.tpe.abstractTermMembers.map(_.symbol).groupBy(_.owner).map { + case (owner, members) => (owner, members.sortWith{ (sym1, sym2) => + if(sym1.sourcePos.exists && sym2.sourcePos.exists) + sym1.sourcePos.start <= sym2.sourcePos.start + else !sym2.sourcePos.exists + }) + }.toSeq.sortBy(_._1.name.decoded).flatMap(_._2) val caseClassOwners = Set("Product", "Equals") val overridables = @@ -506,6 +513,8 @@ object OverrideCompletions: defn match case td: TypeDef if text.charAt(td.rhs.span.end) == ':' => Some(td.rhs.span.end) + case TypeDef(_, temp : Template) => + temp.parentsOrDerived.lastOption.map(_.span.end).filter(text.charAt(_) == ':') case _ => None private def fallbackFromParent(parent: Tree, name: String)(using Context) = @@ -521,8 +530,11 @@ object OverrideCompletions: object OverrideExtractor: def unapply(path: List[Tree])(using Context) = path match - // class FooImpl extends Foo: - // def x| + // abstract class Val: + // def hello: Int = 2 + // + // class Main extends Val: + // def h| case (dd: (DefDef | ValDef)) :: (t: Template) :: (td: TypeDef) :: _ if t.parents.nonEmpty => val completing = @@ -538,12 +550,13 @@ object OverrideCompletions: ) ) - // class FooImpl extends Foo: + // abstract class Val: + // def hello: Int = 2 + // + // class Main extends Val: // ov| case (ident: Ident) :: (t: Template) :: (td: TypeDef) :: _ - if t.parents.nonEmpty && "override".startsWith( - ident.name.show.replace(Cursor.value, "") - ) => + if t.parents.nonEmpty && "override".startsWith(ident.name.show.replace(Cursor.value, "")) => Some( ( td, @@ -554,15 +567,13 @@ object OverrideCompletions: ) ) + // abstract class Val: + // def hello: Int = 2 + // // class Main extends Val: // def@@ case (id: Ident) :: (t: Template) :: (td: TypeDef) :: _ - if t.parents.nonEmpty && "def".startsWith( - id.name.decoded.replace( - Cursor.value, - "", - ) - ) => + if t.parents.nonEmpty && "def".startsWith(id.name.decoded.replace(Cursor.value, "")) => Some( ( td, @@ -572,8 +583,12 @@ object OverrideCompletions: None, ) ) + + // abstract class Val: + // def hello: Int = 2 + // // class Main extends Val: - // he@@ + // he@@ case (id: Ident) :: (t: Template) :: (td: TypeDef) :: _ if t.parents.nonEmpty => Some( @@ -586,6 +601,23 @@ object OverrideCompletions: ) ) + // abstract class Val: + // def hello: Int = 2 + // + // class Main extends Val: + // hello@ // this transforms into this.hello, thus is a Select + case (sel @ Select(th: This, name)) :: (t: Template) :: (td: TypeDef) :: _ + if t.parents.nonEmpty && th.qual.name == td.name => + Some( + ( + td, + None, + sel.sourcePos.start, + false, + Some(name.show), + ) + ) + case _ => None end OverrideExtractor diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala new file mode 100644 index 000000000000..6e59c9afca3a --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/SingletonCompletions.scala @@ -0,0 +1,133 @@ +package dotty.tools.pc.completions + +import scala.meta.internal.metals.Fuzzy +import dotty.tools.pc.utils.InteractiveEnrichments.* +import dotty.tools.pc.completions.CompletionValue.SingletonValue + +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Constants.Constant +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.StdNames +import dotty.tools.dotc.core.Symbols +import dotty.tools.dotc.core.Types.AndType +import dotty.tools.dotc.core.Types.AppliedType +import dotty.tools.dotc.core.Types.ConstantType +import dotty.tools.dotc.core.Types.OrType +import dotty.tools.dotc.core.Types.TermRef +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types.TypeRef +import dotty.tools.dotc.util.Spans.Span +import dotty.tools.dotc.core.Symbols.defn + +object SingletonCompletions: + def contribute( + path: List[Tree], + tpe0: Type, + completionPos: CompletionPos + )(using ctx: Context): List[CompletionValue] = + for { + (name, span) <- + path match + case (i @ Ident(name)) :: _ => List(name.toString() -> i.span) + case (l @ Literal(const)) :: _ => List(const.show -> l.span) + case _ => Nil + query = name.replace(Cursor.value, "").nn + tpe = tpe0 match + // for Tuple 2 we want to suggest first arg completion + case AppliedType(t: TypeRef, args) if t.classSymbol == Symbols.defn.Tuple2 && args.nonEmpty => + args.head + case t => t + singletonValues = collectSingletons(tpe).map(_.show) + range = completionPos.originalCursorPosition.withStart(span.start).withEnd(span.start + query.length).toLsp + value <- singletonValues.collect { + case name if Fuzzy.matches(query, name) => + SingletonValue(name, tpe, Some(range)) + } + } yield value + + private def collectSingletons(tpe: Type)(using Context): List[Constant] = + tpe.deepDealias match + case ConstantType(value) => List(value) + case OrType(tpe1, tpe2) => + collectSingletons(tpe1) ++ collectSingletons(tpe2) + case AndType(tpe1, tpe2) => + collectSingletons(tpe1).intersect(collectSingletons(tpe2)) + case _ => Nil + +object InterCompletionType: + def inferType(path: List[Tree])(using Context): Option[Type] = + path match + case (lit: Literal) :: Select(Literal(_), _) :: Apply(Select(Literal(_), _), List(s: Select)) :: rest if s.symbol == defn.Predef_undefined => + inferType(rest, lit.span) + case ident :: rest => inferType(rest, ident.span) + case _ => None + + def inferType(path: List[Tree], span: Span)(using Context): Option[Type] = + path match + case Apply(head, List(p : Select)) :: rest if p.name == StdNames.nme.??? && p.qualifier.symbol.name == StdNames.nme.Predef && p.span.isSynthetic => + inferType(rest, span) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => + inferType(rest, span) + case Typed(expr, tpt) :: _ if expr.span.contains(span) && !tpt.tpe.isErroneous => Some(tpt.tpe) + case Block(_, expr) :: rest if expr.span.contains(span) => + inferType(rest, span) + case Bind(_, body) :: rest if body.span.contains(span) => inferType(rest, span) + case Alternative(_) :: rest => inferType(rest, span) + case Try(block, _, _) :: rest if block.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Try(_, cases, _) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) + case If(cond, _, _) :: rest if !cond.span.contains(span) => inferType(rest, span) + case CaseDef(_, _, body) :: Match(_, cases) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => + inferType(rest, span) + case NamedArg(_, arg) :: rest if arg.span.contains(span) => inferType(rest, span) + // x match + // case @@ + case CaseDef(pat, _, _) :: Match(sel, cases) :: rest if pat.span.contains(span) && cases.exists(_.span.contains(span)) && !sel.tpe.isErroneous => + sel.tpe match + case tpe: TermRef => Some(tpe.symbol.info).filterNot(_.isErroneous) + case tpe => Some(tpe) + // List(@@) + case SeqLiteral(_, tpe) :: _ if !tpe.tpe.isErroneous => + Some(tpe.tpe) + // val _: T = @@ + // def _: T = @@ + case (defn: ValOrDefDef) :: rest if !defn.tpt.tpe.isErroneous => Some(defn.tpt.tpe) + // f(@@) + case (app: Apply) :: rest => + val param = + for { + ind <- app.args.zipWithIndex.collectFirst { + case (arg, id) if arg.span.contains(span) => id + } + params <- app.symbol.paramSymss.find(!_.exists(_.isTypeParam)) + param <- params.get(ind) + } yield param.info + param match + // def f[T](a: T): T = ??? + // f[Int](@@) + // val _: Int = f(@@) + case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => + for { + (typeParams, args) <- + app match + case Apply(TypeApply(fun, args), _) => + val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) + typeParams.map((_, args.map(_.tpe))) + // val f: (j: "a") => Int + // f(@@) + case Apply(Select(v, StdNames.nme.apply), _) => + v.symbol.info match + case AppliedType(des, args) => + Some((des.typeSymbol.typeParams, args)) + case _ => None + case _ => None + ind = typeParams.indexOf(t.symbol) + tpe <- args.get(ind) + if !tpe.isErroneous + } yield tpe + case Some(tpe) => Some(tpe) + case _ => None + case _ => None + diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index 559e199f3449..b66fbe56fb9b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -3,6 +3,7 @@ package dotty.tools.pc.printer import scala.collection.mutable import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.metals.ReportContext +import scala.meta.internal.mtags.KeywordWrapper import scala.meta.pc.SymbolDocumentation import scala.meta.pc.SymbolSearch @@ -24,8 +25,6 @@ import dotty.tools.dotc.printing.RefinedPrinter import dotty.tools.dotc.printing.Texts.Text import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.AutoImports.ImportSel -import dotty.tools.pc.AutoImports.ImportSel.Direct -import dotty.tools.pc.AutoImports.ImportSel.Rename import dotty.tools.pc.IndexedContext import dotty.tools.pc.IndexedContext.Result import dotty.tools.pc.Params @@ -66,6 +65,11 @@ class ShortenedTypePrinter( private val foundRenames = collection.mutable.LinkedHashMap.empty[Symbol, String] + override def nameString(name: Name): String = + val nameStr = super.nameString(name) + if (nameStr.nonEmpty) KeywordWrapper.Scala3Keywords.backtickWrap(nameStr) + else nameStr + def getUsedRenames: Map[Symbol, String] = foundRenames.toMap.filter { case (k, v) => k.showName != v } @@ -296,7 +300,7 @@ class ShortenedTypePrinter( val (methodParams, extParams) = splitExtensionParamss(gsym) val paramss = methodParams ++ extParams lazy val implicitParams: List[Symbol] = - paramss.flatMap(params => params.filter(p => p.is(Flags.Implicit))) + paramss.flatMap(params => params.filter(p => p.isOneOf(Flags.GivenOrImplicit))) lazy val implicitEvidenceParams: Set[Symbol] = implicitParams @@ -419,7 +423,9 @@ class ShortenedTypePrinter( if gsym.is(Flags.ExtensionMethod) then val filteredParams = if gsym.name.isRightAssocOperatorName then - val (leadingTyParamss, rest1) = paramss.span(isTypeParamClause) + val (leadingTyParamss, rest1) = paramss match + case fst :: tail if isTypeParamClause(fst) => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(isUsingClause) val (rightTyParamss, rest3) = rest2.span(isTypeParamClause) val (rightParamss, rest4) = rest3.splitAt(1) @@ -527,7 +533,8 @@ class ShortenedTypePrinter( else if includeDefaultParam == ShortenedTypePrinter.IncludeDefaultParam.ResolveLater && isDefaultParam then " = ..." else "" // includeDefaultParam == Never or !isDefaultParam - s"$keywordName: ${paramTypeString}$default" + val inline = if(param.is(Flags.Inline)) "inline " else "" + s"$inline$keywordName: ${paramTypeString}$default" end if end paramLabel diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index dd2fb3107c49..66080a363d51 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -99,12 +99,6 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: def focusAt(point: Int): SourcePosition = pos.withSpan(pos.span.withPoint(point).focus) - def toLocation: Option[l.Location] = - for - uri <- InteractiveDriver.toUriOption(pos.source) - range <- if pos.exists then Some(pos.toLsp) else None - yield new l.Location(uri.toString(), range) - def encloses(other: SourcePosition): Boolean = pos.start <= other.start && pos.end >= other.end @@ -412,4 +406,7 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: RefinedType(parent.dealias, name, refinedInfo.deepDealias) case dealised => dealised + extension[T] (list: List[T]) + def get(n: Int): Option[T] = if 0 <= n && n < list.size then Some(list(n)) else None + end InteractiveEnrichments diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala index 78635e540c43..7d29e6c4dda9 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala @@ -8,9 +8,7 @@ import scala.meta.internal.metals.CompilerRangeParams import scala.language.unsafeNulls import dotty.tools.pc.utils.TestInlayHints -import dotty.tools.pc.utils.TextEdits -import org.eclipse.lsp4j.TextEdit class BaseInlayHintsSuite extends BasePCSuite { @@ -55,4 +53,4 @@ class BaseInlayHintsSuite extends BasePCSuite { obtained, ) -} \ No newline at end of file +} diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala index a1fec0af3e8f..1158e433e732 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala @@ -22,6 +22,7 @@ import dotty.tools.pc.utils._ import org.eclipse.lsp4j.MarkupContent import org.eclipse.lsp4j.jsonrpc.messages.Either as JEither import org.junit.runner.RunWith +import scala.meta.pc.CompletionItemPriority object TestResources: val scalaLibrary = BuildInfo.ideTestsDependencyClasspath.map(_.toPath).toSeq @@ -30,6 +31,7 @@ object TestResources: @RunWith(classOf[ReusableClassRunner]) abstract class BasePCSuite extends PcAssertions: + val completionItemPriority: CompletionItemPriority = (_: String) => 0 private val isDebug = ManagementFactory.getRuntimeMXBean.getInputArguments.toString.contains("-agentlib:jdwp") val tmp = Files.createTempDirectory("stable-pc-tests") @@ -53,6 +55,7 @@ abstract class BasePCSuite extends PcAssertions: .withExecutorService(executorService) .withScheduledExecutorService(executorService) .withSearch(search) + .withCompletionItemPriority(completionItemPriority) .newInstance("", myclasspath.asJava, scalacOpts.asJava) protected def config: PresentationCompilerConfig = diff --git a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala index 82e697e6e9a1..4999e0ddbc69 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/ReusableClassRunner.scala @@ -13,22 +13,17 @@ class ReusableClassRunner(testClass: Class[BasePCSuite]) testClass.getDeclaredConstructor().newInstance() override def createTest(): AnyRef = instance - override def withBefores( - method: FrameworkMethod, - target: Object, - statement: Statement - ): Statement = - statement override def withAfters( method: FrameworkMethod, target: Object, statement: Statement ): Statement = + val newStatement = super.withAfters(method, target, statement) new Statement(): override def evaluate(): Unit = try - statement.evaluate() + newStatement.evaluate() finally if (isLastTestCase(method)) then instance.clean() diff --git a/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala new file mode 100644 index 000000000000..5e13c07b9e5f --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/CompilerCachingSuite.scala @@ -0,0 +1,163 @@ +package dotty.tools.pc.tests + +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.pc.base.BasePCSuite +import dotty.tools.pc.ScalaPresentationCompiler +import org.junit.{Before, Test} + +import scala.language.unsafeNulls +import scala.meta.internal.metals.EmptyCancelToken +import scala.meta.internal.metals.CompilerOffsetParams +import scala.meta.pc.OffsetParams +import scala.concurrent.Future +import scala.concurrent.Await +import scala.meta.pc.VirtualFileParams +import scala.concurrent.duration.* + +import java.util.Collections +import java.nio.file.Paths +import java.util.concurrent.CompletableFuture + + +class CompilerCachingSuite extends BasePCSuite: + + val timeout = 5.seconds + + private def checkCompilationCount(expected: Int): Unit = + presentationCompiler match + case pc: ScalaPresentationCompiler => + val compilations = pc.compilerAccess.withNonInterruptableCompiler(None)(-1, EmptyCancelToken) { driver => + driver.compiler().currentCtx.runId + }.get(timeout.length, timeout.unit) + assertEquals(expected, compilations, s"Expected $expected compilations but got $compilations") + case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") + + private def getContext(): Context = + presentationCompiler match + case pc: ScalaPresentationCompiler => + pc.compilerAccess.withNonInterruptableCompiler(None)(null, EmptyCancelToken) { driver => + driver.compiler().currentCtx + }.get(timeout.length, timeout.unit) + case _ => throw IllegalStateException("Presentation compiler should always be of type of ScalaPresentationCompiler") + + @Before + def beforeEach: Unit = + presentationCompiler.restart() + + // We want to run at least one compilation, so runId points at 3. + // This will ensure that we use the same driver, not recreate fresh one on each call + val dryRunParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "dryRun", 1, EmptyCancelToken) + checkCompilationCount(2) + val freshContext = getContext() + presentationCompiler.complete(dryRunParams).get(timeout.length, timeout.unit) + checkCompilationCount(3) + val dryRunContext = getContext() + assert(freshContext != dryRunContext) + + + @Test + def `cursor-compilation-does-not-corrupt-cache`: Unit = + val contextPreCompilation = getContext() + + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostFirst = getContext() + assert(contextPreCompilation != contextPostFirst) + checkCompilationCount(4) + + val fakeParamsCursor = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = new", 15, EmptyCancelToken) + presentationCompiler.complete(fakeParamsCursor).get(timeout.length, timeout.unit) + val contextPostCursor = getContext() + assert(contextPreCompilation != contextPostCursor) + assert(contextPostFirst == contextPostCursor) + checkCompilationCount(4) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostSecond = getContext() + assert(contextPreCompilation != contextPostSecond) + assert(contextPostFirst == contextPostCursor) + assert(contextPostCursor == contextPostSecond) + checkCompilationCount(4) + + @Test + def `compilation-for-same-snippet-is-cached`: Unit = + val contextPreCompilation = getContext() + + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 14, EmptyCancelToken) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostFirst = getContext() + assert(contextPreCompilation != contextPostFirst) + checkCompilationCount(4) + + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + val contextPostSecond = getContext() + assert(contextPreCompilation != contextPostFirst) + assert(contextPostSecond == contextPostFirst) + checkCompilationCount(4) + + @Test + def `compilation-for-different-snippet-is-not-cached`: Unit = + + + checkCompilationCount(3) + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + checkCompilationCount(4) + + val fakeParams2 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = prin", 16, EmptyCancelToken) + presentationCompiler.complete(fakeParams2).get(timeout.length, timeout.unit) + checkCompilationCount(5) + + val fakeParams3 = CompilerOffsetParams(Paths.get("Test2.scala").toUri(), "def hello = print", 17, EmptyCancelToken) + presentationCompiler.complete(fakeParams3).get(timeout.length, timeout.unit) + checkCompilationCount(6) + + + private val testFunctions: List[OffsetParams => CompletableFuture[_]] = List( + presentationCompiler.complete(_), + presentationCompiler.convertToNamedArguments(_, Collections.emptyList()), + presentationCompiler.autoImports("a", _, false), + presentationCompiler.definition(_), + presentationCompiler.didChange(_), + presentationCompiler.documentHighlight(_), + presentationCompiler.hover(_), + presentationCompiler.implementAbstractMembers(_), + presentationCompiler.insertInferredType(_), + presentationCompiler.semanticTokens(_), + presentationCompiler.prepareRename(_), + presentationCompiler.rename(_, "a"), + presentationCompiler.signatureHelp(_), + presentationCompiler.typeDefinition(_) + ) + + + @Test + def `different-api-calls-reuse-cache`: Unit = + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + + val contextBefore = getContext() + + val differentContexts = testFunctions.map: f => + f(fakeParams).get(timeout.length, timeout.unit) + checkCompilationCount(4) + getContext() + .toSet + + assert(differentContexts == Set(contextBefore)) + + @Test + def `different-api-calls-reuse-cache-parallel`: Unit = + import scala.jdk.FutureConverters.* + import scala.concurrent.ExecutionContext.Implicits.global + + val fakeParams = CompilerOffsetParams(Paths.get("Test.scala").toUri(), "def hello = ne", 13, EmptyCancelToken) + presentationCompiler.complete(fakeParams).get(timeout.length, timeout.unit) + + val contextBefore = getContext() + + val futures = testFunctions.map: f => + f(fakeParams).asScala.map(_ => getContext()) + + val res = Await.result(Future.sequence(futures), timeout).toSet + assert(res == Set(contextBefore)) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala new file mode 100644 index 000000000000..ccdc68ef1cad --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/InferExpectedTypeSuite.scala @@ -0,0 +1,292 @@ +package dotty.tools.pc.tests + +import scala.language.unsafeNulls +import dotty.tools.pc.base.BasePCSuite +import scala.meta.internal.metals.CompilerOffsetParams +import java.nio.file.Paths +import scala.meta.internal.metals.EmptyCancelToken +import dotty.tools.pc.ScalaPresentationCompiler +import scala.meta.internal.mtags.CommonMtagsEnrichments.* + +import org.junit.Test +import org.junit.Ignore + +class InferExpectedTypeSuite extends BasePCSuite: + def check( + original: String, + expectedType: String, + fileName: String = "A.scala" + ): Unit = + presentationCompiler.restart() + val (code, offset) = params(original.replace("@@", "CURSOR@@"), fileName) + val offsetParams = CompilerOffsetParams( + Paths.get(fileName).toUri(), + code, + offset, + EmptyCancelToken + ) + presentationCompiler.asInstanceOf[ScalaPresentationCompiler].inferExpectedType(offsetParams).get().asScala match { + case Some(value) => assertNoDiff(expectedType, value) + case None => fail("Empty result.") + } + + @Test def basic = + check( + """|def doo: Double = @@ + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Test def `basic-param` = + check( + """|def paint(c: Int) = ??? + |val _ = paint(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `type-ascription` = + check( + """|def doo = (@@ : Double) + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Test def list = + check( + """|val i: List[Int] = List(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `list-singleton` = + check( + """|val i: List["foo"] = List("@@") + |""".stripMargin, + """|"foo" + |""".stripMargin + ) + + @Test def option = + check( + """|val i: Option[Int] = Option(@@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// some structures + @Test def `with-block` = + check( + """|def c: Double = + | @@ + |""".stripMargin, + """|Double + |""".stripMargin + ) + + @Test def `if-statement` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(shouldBeBlue) @@ + | else 2 + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-statement-2` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(shouldBeBlue) 1 + | else @@ + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-statement-3` = + check( + """|def c(shouldBeBlue: Boolean): Int = + | if(@@) 3 + | else 2 + |""".stripMargin, + """|Boolean + |""".stripMargin + ) + + @Test def `try` = + check( + """|val _: Int = + | try { + | @@ + | } catch { + | case _ => + | } + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `try-catch` = + check( + """|val _: Int = + | try { + | } catch { + | case _ => @@ + | } + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def `if-condition` = + check( + """|val _ = if @@ then 1 else 2 + |""".stripMargin, + """|Boolean + |""".stripMargin + ) + + @Test def `inline-if` = + check( + """|inline def o: Int = inline if ??? then @@ else ??? + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// pattern matching + + @Test def `pattern-match` = + check( + """|val _ = + | List(1) match + | case @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def bind = + check( + """|val _ = + | List(1) match + | case name @ @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def alternative = + check( + """|val _ = + | List(1) match + | case Nil | @@ + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def unapply = + check( + """|val _ = + | List(1) match + | case @@ :: _ => + |""".stripMargin, + """|Int + |""".stripMargin + ) + +// generic functions + + @Test def `any-generic` = + check( + """|val _ : List[Int] = identity(@@) + |""".stripMargin, + """|List[Int] + |""".stripMargin + ) + + @Test def `eq-generic` = + check( + """|def eq[T](a: T, b: T): Boolean = ??? + |val _ = eq(1, @@) + |""".stripMargin, + """|Int + |""".stripMargin + ) + + @Test def flatmap = + check( + """|val _ : List[Int] = List().flatMap(_ => @@) + |""".stripMargin, + """|IterableOnce[Nothing] + |""".stripMargin // ideally IterableOnce[Int], but can't change interpolateTypeVars + ) + + @Test def map = + check( + """|val _ : List[Int] = List().map(_ => @@) + |""".stripMargin, + """|Nothing + |""".stripMargin // ideally Int, but can't change interpolateTypeVars + ) + + @Test def `for-comprehension` = + check( + """|val _ : List[Int] = + | for { + | _ <- List("a", "b") + | } yield @@ + |""".stripMargin, + """|Nothing + |""".stripMargin // ideally Int, but can't change interpolateTypeVars + ) + +// bounds + @Test def any = + check( + """|trait Foo + |def foo[T](a: T): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|Any + |""".stripMargin + ) + + @Test def `bounds-1` = + check( + """|trait Foo + |def foo[T <: Foo](a: T): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|Foo + |""".stripMargin + ) + + @Test def `bounds-2` = + check( + """|trait Foo + |def foo[T >: Foo](a: T): Boolean = ??? + |val _ = foo(@@) + |""".stripMargin, + """|Foo + |""".stripMargin // ideally Any (maybe?) + ) + + @Test def `bounds-3` = + check( + """|trait A + |class B extends A + |class C extends B + |def roo[F >: C <: A](f: F) = ??? + |val kjk = roo(@@) + |""".stripMargin, + """|C + |""".stripMargin // ideally A + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala new file mode 100644 index 000000000000..15ee35928872 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/PcReferencesSuite.scala @@ -0,0 +1,90 @@ +package dotty.tools.pc.tests + +import scala.language.unsafeNulls + +import dotty.tools.pc.base.BasePCSuite +import dotty.tools.pc.utils.RangeReplace + +import java.net.URI +import org.eclipse.lsp4j.jsonrpc.messages.{Either => JEither} +import scala.meta.internal.jdk.CollectionConverters.* +import scala.meta.internal.metals.CompilerVirtualFileParams +import scala.meta.internal.metals.EmptyCancelToken +import scala.meta.internal.pc.PcReferencesRequest + +import org.junit.Test + +class PcReferencesSuite extends BasePCSuite with RangeReplace { + def check( + original: String, + ): Unit = + val edit = original.replaceAll("(<<|>>)", "") + val expected = original.replaceAll("@@", "") + val base = original.replaceAll("(<<|>>|@@)", "") + + val (code, offset) = params(edit, "Highlight.scala") + val ranges = presentationCompiler + .references( + PcReferencesRequest( + CompilerVirtualFileParams( + URI.create("file:/Highlight.scala"), + code, + EmptyCancelToken + ), + includeDefinition = false, + offsetOrSymbol = JEither.forLeft(offset) + ) + ) + .get() + .asScala + .flatMap(_.locations().asScala.map(_.getRange())) + .toList + + assertEquals( + renderRangesAsString(base, ranges), + expected, + "references should match" + ) + + @Test def `implicit-args` = + check( + """|package example + | + |class Bar(i: Int) + | + |object Hello { + | def m(i: Int)(implicit b: Bar) = ??? + | val foo = { + | implicit val ba@@rr: Bar = new Bar(1) + | m(3)<<>> + | } + |} + |""".stripMargin + ) + + @Test def `implicit-args-2` = + check( + """|package example + | + |class Bar(i: Int) + |class Foo(implicit b: Bar) + | + |object Hello { + | implicit val ba@@rr: Bar = new Bar(1) + | val foo = new Foo<<>> + |} + |""".stripMargin + ) + + @Test def `case-class` = + check( + """|case class Ma@@in(i: Int) + |""".stripMargin + ) + + @Test def `case-class-with-implicit` = + check( + """"|case class A()(implicit val fo@@o: Int) + |""".stripMargin + ) +} diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index f4bfc806dbb3..dc81d2596c6f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -583,7 +583,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin ) - @Test def `contructor-param` = + @Test def `constructor-param` = check( """|class Foo (xxx: Int) | @@ -595,7 +595,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin ) - @Test def `contructor-param2` = + @Test def `constructor-param2` = check( """|class Foo () | @@ -614,8 +614,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: (Context) ?=> Int, arg2: Int): String = ??? - |val m = foo(ar@@) + |object Main: + | def foo(arg1: (Context) ?=> Int, arg2: Int): String = ??? + | val m = foo(ar@@) |""".stripMargin, """|arg1 = : (Context) ?=> Int |arg2 = : Int @@ -627,8 +628,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: Context ?=> Int, arg2: Context ?=> Int): String = ??? - |val m = foo(arg1 = ???, a@@) + |object Main: + | def foo(arg1: Context ?=> Int, arg2: Context ?=> Int): String = ??? + | val m = foo(arg1 = ???, a@@) |""".stripMargin, """|arg2 = : (Context) ?=> Int |""".stripMargin, @@ -639,8 +641,9 @@ class CompletionArgSuite extends BaseCompletionSuite: check( s"""|case class Context() | - |def foo(arg1: (Boolean, Context) ?=> Int ?=> String, arg2: (Boolean, Context) ?=> Int ?=> String): String = ??? - |val m = foo(arg1 = ???, a@@) + |object Main: + | def foo(arg1: (Boolean, Context) ?=> Int ?=> String, arg2: (Boolean, Context) ?=> Int ?=> String): String = ??? + | val m = foo(arg1 = ???, a@@) |""".stripMargin, """|arg2 = : (Boolean, Context) ?=> (Int) ?=> String |""".stripMargin, @@ -786,33 +789,35 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-with-param` = check( - """|def m(idd : String, abb: Int): Int = ??? - |def m(inn : Int, uuu: Option[Int]): Int = ??? - |def m(inn : Int, aaa: Int): Int = ??? - |def k: Int = m(1, a@@) + """|object Main: + | def m(idd : String, abb: Int): Int = ??? + | def m(inn : Int, uuu: Option[Int]): Int = ??? + | def m(inn : Int, aaa: Int): Int = ??? + | def k: Int = m(1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @Test def `overloaded-with-named-param` = check( - """|def m(idd : String, abb: Int): Int = ??? - |def m(inn : Int, uuu: Option[Int]): Int = ??? - |def m(inn : Int, aaa: Int): Int = ??? - |def k: Int = m(inn = 1, a@@) + """|object Main: + | def m(idd : String, abb: Int): Int = ??? + | def m(inn : Int, uuu: Option[Int]): Int = ??? + | def m(inn : Int, aaa: Int): Int = ??? + | def k: Int = m(inn = 1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @Test def `overloaded-generic` = check( - """|object M: + """|object Main: | val g = 3 | val l : List[Int] = List(1,2,3) | def m[T](inn : List[T], yy: Int, aaa: Int, abb: Option[Int]): Int = ??? @@ -899,28 +904,30 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-function-param` = check( - """|def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? - |def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? - |def m[T](i: Int)(inn: T => String, acc: List[Int]): Int = ??? - |def k = m(1)(inn = identity[Int], a@@) + """|object Main: + | def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? + | def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? + | def m[T](i: Int)(inn: T => String, acc: List[Int]): Int = ??? + | def k = m(1)(inn = identity[Int], a@@) |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @Test def `overloaded-function-param2` = check( - """|def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? - |def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? - |def m[T](i: String)(inn: T => Int, acc: List[Int]): Int = ??? - |def k = m(1)(inn = identity[Int], a@@) + """|object Main: + | def m[T](i: Int)(inn: T => Int, abb: Option[Int]): Int = ??? + | def m[T](i: Int)(inn: T => Int, aaa: Int): Int = ??? + | def m[T](i: String)(inn: T => Int, acc: List[Int]): Int = ??? + | def k = m(1)(inn = identity[Int], a@@) |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -938,7 +945,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -956,7 +963,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|abb = : Option[Int] |acc = : List[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -978,9 +985,10 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `overloaded-function-param3` = check( - """|def m[T](inn: Int => T, abb: Option[Int]): Int = ??? - |def m[T](inn: String => T, aaa: Int): Int = ??? - |def k = m(identity[Int], a@@) + """|object Main: + | def m[T](inn: Int => T, abb: Option[Int]): Int = ??? + | def m[T](inn: String => T, aaa: Int): Int = ??? + | def k = m(identity[Int], a@@) |""".stripMargin, """|abb = : Option[Int] |""".stripMargin, @@ -1109,7 +1117,7 @@ class CompletionArgSuite extends BaseCompletionSuite: @Test def `comparison` = check( - """package a + """ |object w { | abstract class T(x: Int) { | def met(x: Int): Unit = { diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala index 4746eb93f25d..c1d0e017def7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala @@ -90,8 +90,8 @@ class CompletionCancelSuite extends BaseCompletionSuite: | val x = asser@@ |} """.stripMargin, - """|assert(assertion: Boolean): Unit - |assert(assertion: Boolean, message: => Any): Unit + """|assert(inline assertion: Boolean): Unit + |assert(inline assertion: Boolean, inline message: => Any): Unit |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala new file mode 100644 index 000000000000..5314a61ab599 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionContextSuite.scala @@ -0,0 +1,27 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite +import scala.meta.pc.CompletionItemPriority +import org.junit.Test + +class CompletionContextSuite extends BaseCompletionSuite: + override val completionItemPriority: CompletionItemPriority = { + case "scala/concurrent/Future." => -1 + case _ => 0 + } + // scala.concurrent.Future should be ranked higher than java.util.concurrent.Future + val futureCompletionResult: List[String] = + List("Future - scala.concurrent", "Future - java.util.concurrent") + + @Test + def `context` = + check( + """package fut + |object A { + | Futur@@ + |}""".stripMargin, + """Future - scala.concurrent + |Future - java.util.concurrent + |""".stripMargin, + filter = futureCompletionResult.contains + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala index 010d0b14fa90..6a8759d0a0c9 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala @@ -1,14 +1,10 @@ package dotty.tools.pc.tests.completion -import scala.meta.pc.SymbolDocumentation import scala.language.unsafeNulls import dotty.tools.pc.base.BaseCompletionSuite -import dotty.tools.pc.utils.MockEntries import org.junit.Test -import org.junit.Ignore -import scala.collection.immutable.ListMapBuilder class CompletionExtraConstructorSuite extends BaseCompletionSuite: diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala index 08cc1535fd56..50019928a2f3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala @@ -112,7 +112,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |""".stripMargin.triplequoted, """|object Main { | val myName = "" - | s"$myName $$" + | s"$myName$0 $$" |} |""".stripMargin.triplequoted, filterText = "myName" diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index bf7077d47b3f..5db0cf96d9ef 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -433,6 +433,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |given |extension |type + |opaque type |class |enum |case class diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala new file mode 100644 index 000000000000..76015a588387 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease11Suite.scala @@ -0,0 +1,32 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test +import org.junit.Before +import java.nio.file.Path +import dotty.tools.pc.utils.JRE + +class CompletionRelease11Suite extends BaseCompletionSuite: + + override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = + "-release:11" +: super.scalacOptions(classpath) + + @Before + def beforeMethod(): Unit = + org.junit.Assume.assumeTrue(JRE.getJavaMajorVersion >= 11) + + @Test def java11Symbols = + check( + """ + |object A { + | "".repea@@ + |}""".stripMargin, + """repeat(x$0: Int): String + |replaceAll(x$0: String, x$1: String): String + |prependedAll[B >: A](prefix: IterableOnce[B]): IndexedSeq[B] + |prependedAll(prefix: String): String + |prependedAll[B >: Char](prefix: IterableOnce[B]): IndexedSeq[B] + |replaceAllLiterally(literal: String, replacement: String): String + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala new file mode 100644 index 000000000000..587cd5a53073 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionRelease8Suite.scala @@ -0,0 +1,31 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test +import org.junit.Before +import java.nio.file.Path +import dotty.tools.pc.utils.JRE + +class CompletionRelease8Suite extends BaseCompletionSuite: + + override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = + "-release:8" +: super.scalacOptions(classpath) + + @Before + def beforeMethod(): Unit = + org.junit.Assume.assumeTrue(JRE.getJavaMajorVersion >= 8) + + @Test def noJvm11Symbols = + check( + """ + |object A { + | "".repea@@ + |}""".stripMargin, + """replaceAll(x$0: String, x$1: String): String + |prependedAll[B >: A](prefix: IterableOnce[B]): IndexedSeq[B] + |prependedAll(prefix: String): String + |prependedAll[B >: Char](prefix: IterableOnce[B]): IndexedSeq[B] + |replaceAllLiterally(literal: String, replacement: String): String + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 79d35944c84d..b542e4ba84e3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -8,7 +8,7 @@ import org.junit.Ignore class CompletionScalaCliSuite extends BaseCompletionSuite: @Test def `simple` = - check( + checkSubset( """|//> using lib "io.cir@@ |package A |""".stripMargin, @@ -30,11 +30,12 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |//> using lib io.circe::circe-core_native0.4 |package A |""".stripMargin, - assertSingleItem = false + assertSingleItem = false, + filter = _.contains("circe-core_native0.4") ) @Test def `version-sort` = - check( + checkSubset( """|//> using dep "com.lihaoyi::pprint:0.7@@" |package A |""".stripMargin, @@ -42,12 +43,12 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |0.7.2 |0.7.1 |0.7.0 - |""".stripMargin, + |""".stripMargin ) @Ignore @Test def `single-colon` = - check( + checkSubset( """|//> using lib "io.circe:circe-core_na@@ |package A |""".stripMargin, @@ -58,27 +59,28 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `version` = - check( - """|//> using lib "io.circe::circe-core_sjs1:0.14.1@@" + checkSubset( + """|//> using lib "io.circe::circe-core_sjs1:0.14.10@@" |package A |""".stripMargin, - "0.14.1" + "0.14.10" ) // We don't to add `::` before version if `sjs1` is specified @Test def `version-edit` = checkEdit( - """|//> using lib "io.circe::circe-core_sjs1:0.14.1@@" + """|//> using lib "io.circe::circe-core_sjs1:0.14.10@@" |package A |""".stripMargin, - """|//> using lib "io.circe::circe-core_sjs1:0.14.1" + """|//> using lib "io.circe::circe-core_sjs1:0.14.10" |package A |""".stripMargin, + filter = _.endsWith("0.14.10") ) @Ignore @Test def `multiple-libs` = - check( + checkSubset( """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, @@ -87,7 +89,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: @Ignore @Test def `script` = - check( + checkSubset( scriptWrapper( """|//> using lib "io.circe:circe-core_na@@ | @@ -103,7 +105,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `closing-quote` = - check( + checkSubset( """|//> using lib "io.circe::circe-core:0.14.0"@@ |package A |""".stripMargin, @@ -111,7 +113,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `whitespace` = - check( + checkSubset( """|//> using lib "io.circe::circe-co @@ |package A |""".stripMargin, @@ -130,7 +132,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: ) @Test def `dep` = - check( + checkSubset( """|//> using dep "io.cir@@ |package A |""".stripMargin, @@ -140,13 +142,29 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: @Ignore @Test def `multiple-deps2` = - check( + checkSubset( """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" |package A |""".stripMargin, "circe-core_native0.4" ) + def checkSubset( + original: String, + expected: String, + filename: String = "A.scala", + enablePackageWrap: Boolean = true + ) = { + val expectedAtLeast = expected.linesIterator.toSet + check( + original, + expected, + filter = expectedAtLeast, + filename = filename, + enablePackageWrap = enablePackageWrap + ) + } + private def scriptWrapper(code: String, filename: String): String = // Vaguely looks like a scala file that ScalaCLI generates // from a sc file. diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index 5769304919ca..a002e722f1f0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -289,7 +289,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |} |""".stripMargin, "scala.util.Try@@(1)", - "scala.util.Try(1)" + "scala.util.Try(1)", + assertSingleItem = false ) @Test def `case-class` = @@ -300,7 +301,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |""".stripMargin, "scala.util.Tr@@(1)", "scala.util.Try(1)", - filter = str => str.contains("Try") + filter = str => str.contains("Try"), + assertSingleItem = false ) @Test def `case-class2` = @@ -383,6 +385,29 @@ class CompletionSnippetSuite extends BaseCompletionSuite: ) @Test def `no-apply` = + checkSnippet( + s"""|package example + | + |object Widget{} + |object Main { + | Wi@@ + |} + |""".stripMargin, + """|Widget - example + |Window - java.awt + |WindowPeer - java.awt.peer + |WithFilter - [A](p: A => Boolean, xs: Array[A]): WithFilter[A] + |WithFilter - [A, CC[_$$2]](self: IterableOps[A, CC, ?], p: A => Boolean): WithFilter[A, CC] + |WithFilter - [K, V, IterableCC[_$$3], CC[_$$4,_$$5] <: IterableOps[?, AnyConstr, ?]](self: MapOps[K, V, CC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] + |WithFilter - [K, V, IterableCC[_$$1], MapCC[X,Y] <: scala.collection.Map[X, Y], CC[X,Y] <: scala.collection.Map[X, Y] & SortedMapOps[X, Y, CC, ?]](self: SortedMapOps[K, V, CC, ?] & MapOps[K, V, MapCC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] + |WithFilter - [A, IterableCC[_$$1], CC[X] <: SortedSet[X]](self: SortedSetOps[A, CC, ?] & IterableOps[A, IterableCC, ?], p: A => Boolean): WithFilter[A, IterableCC, CC] + |WithFilter - (p: Char => Boolean, s: String): WithFilter + |WithFilter - [A](l: Stream[A] @uncheckedVariance, p: A => Boolean): WithFilter[A] + |""".stripMargin, + includeDetail = true, + ) + + @Test def `no-apply2` = checkSnippet( s"""|package example | @@ -451,3 +476,34 @@ class CompletionSnippetSuite extends BaseCompletionSuite: """.stripMargin, filter = _.contains("bar: Int") ) + + @Test def `brackets-already-present` = + check( + """|package a + |case class AAA[T]() + |object O { + | val l: AA@@[Int] = ??? + |} + |""".stripMargin, + """|AAA a + |ArrowAssoc scala.Predef + |""".stripMargin, + ) + + @Test def `brackets-already-present-edit` = + checkEdit( + """|package a + |case class AAA[T]() + |object O { + | val l: AA@@[Int] = ??? + |} + |""".stripMargin, + """|package a + |case class AAA[T]() + |object O { + | val l: AAA[Int] = ??? + |} + |""".stripMargin, + assertSingleItem = false, + ) + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index b5db258601bc..ab28baea994b 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -117,6 +117,7 @@ class CompletionSuite extends BaseCompletionSuite: |fromSpecific(from: Any)(it: IterableOnce[Nothing]): List[Nothing] |fromSpecific(it: IterableOnce[Nothing]): List[Nothing] |nn: List.type & List.type + |runtimeChecked scala.collection.immutable |toFactory(from: Any): Factory[Nothing, List[Nothing]] |formatted(fmtstr: String): String |→[B](y: B): (List.type, B) @@ -529,8 +530,6 @@ class CompletionSuite extends BaseCompletionSuite: """.stripMargin, """|until(end: Int): Range |until(end: Int, step: Int): Range - |until(end: Long): Exclusive[Long] - |until(end: Long, step: Long): Exclusive[Long] |""".stripMargin, stableOrder = false ) @@ -634,6 +633,49 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin ) + @Test def patRecursive = + check( + s"""|object Main { + | Option(List(Option(1))) match { + | case Some(List(None, Som@@)) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Main { + | (null: Option[Option[Option[Option[Int]]]]) match + | case Some(Some(Some(Som@@)))) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Main { + | Option(Option(1)) match { + | case Some(Som@@) + |} + |""".stripMargin, + """|Some(value) scala + |Some scala + |""".stripMargin + ) + check( + s"""|object Test: + | case class NestedClass(x: Int) + |object TestRun: + | Option(Test.NestedClass(5)) match + | case Some(Test.Neste@@) + |""".stripMargin, + """|NestedClass(x) test.Test + |NestedClass test.Test + |""".stripMargin + ) + @Test def pat1 = check( s"""|object Main { @@ -641,7 +683,8 @@ class CompletionSuite extends BaseCompletionSuite: | case List(Som@@) |} |""".stripMargin, - """|Some[A](value: A): Some[A] + """|Some(value) scala + |Some scala |Some scala |""".stripMargin ) @@ -1093,7 +1136,7 @@ class CompletionSuite extends BaseCompletionSuite: | scala@@ |} |""".stripMargin, - """|scala + """|scala `` |""".stripMargin ) @@ -1561,7 +1604,7 @@ class CompletionSuite extends BaseCompletionSuite: @Test def `multi-export` = check( - """export scala.collection.{AbstractMap, Set@@} + """export scala.collection.{AbstractMap, Se@@} |""".stripMargin, """Set scala.collection |SetOps scala.collection @@ -1574,7 +1617,9 @@ class CompletionSuite extends BaseCompletionSuite: |StrictOptimizedSetOps scala.collection |StrictOptimizedSortedSetOps scala.collection |GenSet = scala.collection.Set[X] - |""".stripMargin + |""".stripMargin, + filter = _.contains("Set") + ) @Test def `multi-imports` = @@ -1593,6 +1638,7 @@ class CompletionSuite extends BaseCompletionSuite: |StrictOptimizedSortedSetOps scala.collection |GenSet = scala.collection.Set[X] |""".stripMargin, + filter = _.contains("Set") ) @@ -1680,8 +1726,8 @@ class CompletionSuite extends BaseCompletionSuite: check( """|import @@ |""".stripMargin, - """|java - |javax + """|java `` + |javax `` |""".stripMargin, filter = _.startsWith("java") ) @@ -1699,8 +1745,8 @@ class CompletionSuite extends BaseCompletionSuite: check( """|export @@ |""".stripMargin, - """|java - |javax + """|java `` + |javax `` |""".stripMargin, filter = _.startsWith("java") ) @@ -1908,3 +1954,199 @@ class CompletionSuite extends BaseCompletionSuite: """TestEnum test |""".stripMargin, ) + + @Test def `i6477-1` = + checkEdit( + """|package a + |import a.b.SomeClass as SC + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass@@ + |""".stripMargin, + """|package a + |import a.b.SomeClass as SC + |import a.c.SomeClass + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass + |""".stripMargin, + ) + + @Test def `namedTuple completions` = + check( + """|import scala.language.experimental.namedTuples + |import scala.NamedTuple.* + | + |val person = (name = "Jamie", city = "Lausanne") + | + |val n = person.na@@""".stripMargin, + "name: String", + filter = _.contains("name") + ) + + @Test def `Selectable with namedTuple Fields member` = + check( + """|import scala.language.experimental.namedTuples + |import scala.NamedTuple.* + | + |class NamedTupleSelectable extends Selectable { + | type Fields <: AnyNamedTuple + | def selectDynamic(name: String): Any = ??? + |} + | + |val person2 = new NamedTupleSelectable { + | type Fields = (name: String, city: String) + |} + | + |val n = person2.na@@""".stripMargin, + """|name: String + |selectDynamic(name: String): Any + """.stripMargin, + filter = _.contains("name") + ) + + @Test def `Selectable without namedTuple Fields mamber` = + check( + """|class NonNamedTupleSelectable extends Selectable { + | def selectDynamic(name: String): Any = ??? + |} + | + |val person2 = new NonNamedTupleSelectable {} + | + |val n = person2.na@@""".stripMargin, + """|selectDynamic(name: String): Any + """.stripMargin, + filter = _.contains("name") + ) + + @Test def `with-parenthesis` = + check( + """|package a + |class MyClass + |val i = MyClass@@() + |""".stripMargin, + """|MyClass(): MyClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `def-arg` = + check( + """|package a + |object W { + | val aaaaaa = 1 + |} + |object O { + | def foo(aa@@) + |} + |""".stripMargin, + "" + ) + + @Test def conflict = + check( + """|package a + |object O { + | val foofoo: Int = 123 + | def method = { + | val foofoo: String = "abc" + | foofoo@@ + | } + |} + |""".stripMargin, + """|foofoo: String + |foofoo - a.O: Int + |""".stripMargin + ) + + @Test def `conflict-2` = + check( + """|package a + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|foo - a.A: Int + |foo - a.B: Int + |""".stripMargin + ) + + @Test def `conflict-3` = + check( + """|package a + |object A { + | var foo = 1 + |} + |object B { + | var foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|foo - a.A: Int + |foo - a.B: Int + |""".stripMargin + ) + + @Test def `shadowing` = + check( + """|package pkg + |object Main { + | val x = ListBuff@@ + |} + |""".stripMargin, + """|ListBuffer[A](elems: A*): ListBuffer[A] - scala.collection.mutable + |new ListBuffer[A]: ListBuffer[A] - scala.collection.mutable + |ListBuffer - scala.collection.mutable + |""".stripMargin + ) + + @Test def `conflict-edit-2` = + checkEdit( + """|package a + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo@@ + |} + |""".stripMargin, + """|package a + | + |import a.A.foo + |object A { + | val foo = 1 + |} + |object B { + | val foo = 1 + |} + |object O { + | val x: Int = foo + |} + |""".stripMargin, + assertSingleItem = false + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala index c8cfbd178f32..488ae0923ea4 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala @@ -767,7 +767,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |package b: | def main: Unit = incre@@ |""".stripMargin, - """|increment3: Int + """|increment3 - d: Int |increment - a: Int |increment2 - a.c: Int |""".stripMargin @@ -810,7 +810,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |} |""".stripMargin, """|fooBar: String - |fooBar: List[Int] + |fooBar - test.A: List[Int] |""".stripMargin, ) @@ -937,3 +937,13 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |""".stripMargin, "" ) + + @Test def `metals-i6593` = + check( + """|package a: + | class UniqueObject + |package b: + | val i = Uniq@@ + |""".stripMargin, + "UniqueObject(): UniqueObject - a" + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala new file mode 100644 index 000000000000..25d1418900fd --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/SingletonCompletionsSuite.scala @@ -0,0 +1,300 @@ +package dotty.tools.pc.tests.completion + +import dotty.tools.pc.base.BaseCompletionSuite + +import org.junit.Test + +class SingletonCompletionsSuite extends BaseCompletionSuite { + + @Test def `basic` = + check( + """|val k: 1 = @@ + |""".stripMargin, + "1: 1", + topLines = Some(1) + ) + + @Test def `literal` = + check( + """|val k: 1 = 1@@ + |""".stripMargin, + "1: 1", + topLines = Some(1) + ) + + @Test def `string` = + check( + """|val k: "aaa" = "@@" + |""".stripMargin, + """|"aaa": "aaa" + |""".stripMargin + ) + + @Test def `string-edit` = + checkEdit( + """|val k: "aaa" = "@@" + |""".stripMargin, + """|val k: "aaa" = "aaa" + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `string-edit-2` = + checkEdit( + """|val k: "aaa" = @@ //something + |""".stripMargin, + """|val k: "aaa" = "aaa" //something + |""".stripMargin, + assertSingleItem = false + ) + + @Test def `union` = + check( + """|val k: "aaa" | "bbb" = "@@" + |""".stripMargin, + """|"aaa": "aaa" | "bbb" + |"bbb": "aaa" | "bbb" + |""".stripMargin + ) + + @Test def `type-alias-union` = + check( + """|type Color = "red" | "green" | "blue" + |val c: Color = "r@@" + |""".stripMargin, + """|"red": Color + |""".stripMargin + ) + + @Test def `param` = + check( + """|type Color = "red" | "green" | "blue" + |def paint(c: Color) = ??? + |val _ = paint(@@) + |""".stripMargin, + """|"red": Color + |"green": Color + |"blue": Color + |c = : Color + |""".stripMargin, + topLines = Some(4) + ) + + @Test def `with-block` = + check( + """|type Color = "red" | "green" | "blue" + |def c: Color = { + | "r@@" + |} + |""".stripMargin, + """|"red": Color + |""".stripMargin + ) + + @Test def `if-statement` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) "b@@" + | else "red" + |} + |""".stripMargin, + """|"blue": Color + |""".stripMargin + ) + + @Test def `if-statement-2` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) { + | println("is blue") + | "b@@" + | } else "red" + |} + |""".stripMargin, + """|"blue": Color + |""".stripMargin + ) + + @Test def `if-statement-3` = + check( + """|type Color = "red" | "green" | "blue" + |def c(shouldBeBlue: Boolean): Color = { + | if(shouldBeBlue) { + | "b@@" + | println("is blue") + | "blue" + | } else "red" + |} + |""".stripMargin, + """""".stripMargin + ) + + @Test def `middle-of-a-block` = + check( + """|type Color = "red" | "green" | "blue" + |def c: Color = { + | "r@@" + | ??? + |} + |""".stripMargin, + "" + ) + + @Test def overloaded = + check( + """| + |type Color = "red" | "green" | "blue" + |def foo(i: Int) = ??? + |def foo(c: Color) = ??? + | + |def c = foo(@@) + |""".stripMargin, + """|c = : Color + |i = : Int + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `and-type` = + check( + """|type Color = "red" | "green" | "blue" | "black" + |type FordColor = Color & "black" + |val i: FordColor = "@@" + |""".stripMargin, + """|"black": FordColor + |""".stripMargin + ) + + @Test def list = + check( + """|type Color = "red" | "green" | "blue" + |val i: List[Color] = List("@@") + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def option = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Color] = Some("@@") + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def map = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = i.map { _ => "@@" } + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def `some-for-comp` = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = + | for + | _ <- i + | yield "@@" + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def `some-for-comp-1` = + check( + """|type Color = "red" | "green" | "blue" + |val i: Option[Int] = Some(1) + |val g: Option[Color] = + | for + | _ <- i + | _ <- i + | if i > 2 + | yield "@@" + |""".stripMargin, + """|"red": "red" | "green" | "blue" + |"green": "red" | "green" | "blue" + |"blue": "red" | "green" | "blue" + |""".stripMargin + ) + + @Test def lambda = + check( + """|def m = + | val j = (f: "foo") => 1 + | j("f@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `match-case-result` = + check( + """|val h: "foo" = + | 1 match + | case _ => "@@" + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `dont-show-on-select` = + check( + """|val f: "foo" = List(1,2,3).@@ + |""".stripMargin, + "", + filter = _ == "\"foo\": \"foo\"" + ) + + @Test def `match-case` = + check( + """|def h(foo: "foo") = + | foo match + | case "@@" => + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `match-case2` = + check( + """|def h = + | ("foo" : "foo") match + | case "@@" => + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `named-args` = + check( + """|def h(foo: "foo") = ??? + |def k = h(foo = "@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) + + @Test def `map-type` = + check( + """|def m = Map["foo", Int]("@@") + |""".stripMargin, + """|"foo": "foo" + |""".stripMargin + ) +} diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index c7c9b9979404..fab21ffdee0a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -28,7 +28,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: MockLocation("scala/Predef.Ensuring#ensuring(+2).", "Predef.scala"), MockLocation("scala/Predef.Ensuring#ensuring(+3).", "Predef.scala"), MockLocation("scala/collection/immutable/List#`::`().", "List.scala"), - MockLocation("scala/collection/IterableFactory#apply().", "Factory.scala") + MockLocation("scala/package.List.", "package.scala") ) override def definitions(offsetParams: OffsetParams): List[Location] = @@ -123,7 +123,7 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: check( """| |object Main { - | /*scala/collection/IterableFactory#apply(). Factory.scala*/@@List(1) + | /*scala/package.List. package.scala*/@@List(1) |} |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala index 04c3f8a018e9..2df69cc85af2 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImplementAbstractMembersSuite.scala @@ -345,10 +345,10 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: |object Main { | class Baz extends Bar { | - | override def foo: Int = ??? - | | override def bar: Int = ??? | + | override def foo: Int = ??? + | | } |} |""".stripMargin @@ -1089,11 +1089,13 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | def foo(x: Int): Int | def bar(x: String): String | - |given Foo with + |given Foo { | | override def foo(x: Int): Int = ??? | | override def bar(x: String): String = ??? + | + |} |""".stripMargin ) @@ -1243,7 +1245,6 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | |object A { | trait Base: - | def foo(x: Int): Int | def bar(x: String): String | | class <>(x: Int, y: String) extends Base: @@ -1256,13 +1257,10 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: | |object A { | trait Base: - | def foo(x: Int): Int | def bar(x: String): String | | class Concrete(x: Int, y: String) extends Base: | - | override def foo(x: Int): Int = ??? - | | override def bar(x: String): String = ??? | | @@ -1272,6 +1270,35 @@ class AutoImplementAbstractMembersSuite extends BaseCodeActionSuite: |""".stripMargin, ) + @Test def `braceless-case-class` = + checkEdit( + """|package a + | + |trait Base: + | def foo(x: Int): Int + | def bar(x: String): String + | + |case class <>() extends Base: + | def aaa = "aaa" + |end Concrete + |""".stripMargin, + """|package a + | + |trait Base: + | def foo(x: Int): Int + | def bar(x: String): String + | + |case class Concrete() extends Base: + | + | override def foo(x: Int): Int = ??? + | + | override def bar(x: String): String = ??? + | + | def aaa = "aaa" + |end Concrete + |""".stripMargin + ) + def checkEdit( original: String, expected: String diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala index a862df975d0b..3bb5bfea7bc0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/AutoImportsSuite.scala @@ -405,6 +405,50 @@ class AutoImportsSuite extends BaseAutoImportsSuite: |""".stripMargin, ) + @Test def `i6477` = + checkEdit( + """|package a + |import a.b.SomeClass as SC + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: <> = ??? + |""".stripMargin, + """|package a + |import a.b.SomeClass as SC + |import a.c.SomeClass + | + |package b { + | class SomeClass + |} + |package c { + | class SomeClass + |} + | + |val bar: SC = ??? + |val foo: SomeClass = ??? + |""".stripMargin + ) + + @Test def `use-packages-in-scope` = + checkEdit( + """|import scala.collection.mutable as mut + | + |val l = <>(2) + |""".stripMargin, + """|import scala.collection.mutable as mut + |import mut.ListBuffer + | + |val l = ListBuffer(2) + |""".stripMargin + ) + private def ammoniteWrapper(code: String): String = // Vaguely looks like a scala file that Ammonite generates // from a sc file. @@ -456,3 +500,57 @@ class AutoImportsSuite extends BaseAutoImportsSuite: |object Main{ val obj = ABC } |""".stripMargin ) + + @Test def scalaCliNoEmptyLineAfterDirective = + checkEdit( + """|//> using scala 3.5.0 + |object Main: + | <> + |""".stripMargin, + """|//> using scala 3.5.0 + |import java.nio.file.Files + |object Main: + | Files + |""".stripMargin + ) + + @Test def scalaCliNoEmptyLineAfterLicense = + checkEdit( + """|/** + | * Some license text + | */ + | + |object Main: + | <> + |""".stripMargin, + """|/** + | * Some license text + | */ + |import java.nio.file.Files + | + |object Main: + | Files + |""".stripMargin + ) + + @Test def scalaCliNoEmptyLineAfterLicenseWithPackage = + checkEdit( + """|/** + | * Some license text + | */ + |package test + | + |object Main: + | <> + |""".stripMargin, + """|/** + | * Some license text + | */ + |package test + | + |import java.nio.file.Files + | + |object Main: + | Files + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala index 2bb896660123..bc8b91fed5e8 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/ExtractMethodSuite.scala @@ -446,3 +446,95 @@ class ExtractMethodSuite extends BaseExtractMethodSuite: | } |}""".stripMargin ) + + @Test def `i6476` = + checkEdit( + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | @@val o = { + | implicit val c = new C + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | def newMethod()(given c: C): Int = + | foo(2) + | + | val o = { + | implicit val c = new C + | newMethod() + | ??? + | } + |} + |""".stripMargin + ) + + + @Test def `i6476-2` = + checkEdit( + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | @@val o = { + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | def foo(i: Int)(implicit o: C) = i + | + | def newMethod(): Int = + | foo(2)(new C) + | + | val o = { + | newMethod() + | ??? + | } + |} + |""".stripMargin + ) + + @Test def `i6476-3` = + checkEdit( + """|object O { + | class C + | class D + | def foo(i: Int)(using o: C)(x: Int)(using d: D) = i + | + | @@val o = { + | given C = new C + | given D = new D + | val w = 2 + | <> + | ??? + | } + |} + |""".stripMargin, + """|object O { + | class C + | class D + | def foo(i: Int)(using o: C)(x: Int)(using d: D) = i + | + | def newMethod(w: Int)(given given_C: C, given_D: D): Int = + | foo(w)(w) + | + | val o = { + | given C = new C + | given D = new D + | val w = 2 + | newMethod(w) + | ??? + | } + |} + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala index f12cab7e65ef..a96dd78be138 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/InsertInferredTypeSuite.scala @@ -597,6 +597,78 @@ class InsertInferredTypeSuite extends BaseCodeActionSuite: |""".stripMargin ) + @Test def `backticks-4` = + checkEdit( + """|case class `Foo-Foo`(i: Int) + |object O{ + | val <> = `Foo-Foo`(1) + |}""".stripMargin, + """|case class `Foo-Foo`(i: Int) + |object O{ + | val foo: `Foo-Foo` = `Foo-Foo`(1) + |} + |""".stripMargin + ) + + + @Test def `backticks-5` = + checkEdit( + """|object A{ + | case class `Foo-Foo`(i: Int) + |} + |object O{ + | val <> = A.`Foo-Foo`(1) + |}""".stripMargin, + """|import A.`Foo-Foo` + |object A{ + | case class `Foo-Foo`(i: Int) + |} + |object O{ + | val foo: `Foo-Foo` = A.`Foo-Foo`(1) + |} + |""".stripMargin + ) + + + @Test def `backticks-6` = + checkEdit( + """|object A{ + | case class `Foo-Foo`[A](i: A) + |} + |object O{ + | val <> = A.`Foo-Foo`(1) + |}""".stripMargin, + """|import A.`Foo-Foo` + |object A{ + | case class `Foo-Foo`[A](i: A) + |} + |object O{ + | val foo: `Foo-Foo`[Int] = A.`Foo-Foo`(1) + |} + |""".stripMargin + ) + + @Test def `backticks-7` = + checkEdit( + """|object A{ + | class `x-x` + | case class Foo[A](i: A) + |} + |object O{ + | val <> = A.Foo(new A.`x-x`) + |}""".stripMargin, + """|import A.Foo + |import A.`x-x` + |object A{ + | class `x-x` + | case class Foo[A](i: A) + |} + |object O{ + | val foo: Foo[`x-x`] = A.Foo(new A.`x-x`) + |} + |""".stripMargin + ) + @Test def `literal-types1` = checkEdit( """|object O { diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index b51974b00fb0..3e7a2549cbe0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -269,9 +269,9 @@ class HoverTermSuite extends BaseHoverSuite: | } yield x |} |""".stripMargin, - """|Option[Int] - |override def headOption: Option[A] - |""".stripMargin.hover + """|```scala + |override def headOption: Option[Int] + |```""".stripMargin.hover ) @Test def `object` = @@ -683,3 +683,22 @@ class HoverTermSuite extends BaseHoverSuite: |""".stripMargin, """yy: A{type T = Int}""".stripMargin.hover ) + + @Test def `right-assoc-extension`: Unit = + check( + """ + |case class Wrap[+T](x: T) + | + |extension [T](a: T) + | def <<*@@:>>[U <: Tuple](b: Wrap[U]): Wrap[T *: U] = Wrap(a *: b.x) + |""".stripMargin, + "extension [T](a: T) def *:[U <: Tuple](b: Wrap[U]): Wrap[T *: U]".hover + ) + + @Test def `dont-ignore-???-in-path`: Unit = + check( + """object Obj: + | val x = ?@@?? + |""".stripMargin, + """def ???: Nothing""".stripMargin.hover + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index 8ce7cdce4382..fac30bc757b7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -920,4 +920,24 @@ class InlayHintsSuite extends BaseInlayHintsSuite { | case '[field *: fields] => ??? |""".stripMargin ) + + @Test def `arg-apply` = + check( + """|object Main: + | case class A() + | case class B[T]() + | given A = A() + | implicit def bar(using a: A): B[A] = B[A]() + | def foo(using b: B[A]): String = "aaa" + | val g: String = foo + |""".stripMargin, + """|object Main: + | case class A() + | case class B[T]() + | given A = A() + | implicit def bar(using a: A): B[A] = B[A]() + | def foo(using b: B[A]): String = "aaa" + | val g: String = foo/*(using bar<<(5:15)>>)*/ + |""".stripMargin + ) } diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala index 15546d086033..735a2eb13fab 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpInterleavingSuite.scala @@ -8,9 +8,6 @@ import java.nio.file.Path class SignatureHelpInterleavingSuite extends BaseSignatureHelpSuite: - override protected def scalacOptions(classpath: Seq[Path]): Seq[String] = - List("-language:experimental.clauseInterleaving") - @Test def `proper-position-1` = check( """ diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala index 2b458ced9683..bd9f8edeef49 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala @@ -2,7 +2,7 @@ package dotty.tools.pc.tests.signaturehelp import dotty.tools.pc.base.BaseSignatureHelpSuite -import org.junit.Test +import org.junit.{ Ignore, Test } class SignatureHelpSuite extends BaseSignatureHelpSuite: @@ -253,6 +253,20 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: ) @Test def `tparam5` = + check( + """ + |object a { + | List[Int](1).lengthCompare(@@) + |} + """.stripMargin, + """|lengthCompare(len: Int): Int + | ^^^^^^^^ + |lengthCompare(that: Iterable[?]): Int + |""".stripMargin + ) + + @Ignore("See if applyCallInfo can still inform on lengthCompare's sig, even if recv is in error") + @Test def `tparam5_TypeMismatch` = check( """ |object a { @@ -265,6 +279,31 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: |""".stripMargin ) + @Test def `tparam5_nonvarargs` = + check( + """ + |object a { + | Option[Int](1).getOrElse(@@) + |} + """.stripMargin, + """|getOrElse[B >: Int](default: => B): B + | ^^^^^^^^^^^^^ + |""".stripMargin + ) + + @Ignore("Similar to `tparam5_TypeMismatch`") + @Test def `tparam5_nonvarargs_TypeMismatch` = + check( + """ + |object a { + | Option[String](1).getOrElse(@@) + |} + """.stripMargin, + """|getOrElse[B >: String](default: => B): B + | ^^^^^^^^^^^^^ + |""".stripMargin + ) + @Test def `error1` = check( """ @@ -547,6 +586,19 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: ) @Test def `last-arg1` = + check( + """ + |object A { + | List[Int](1).map(a => @@) + |} + """.stripMargin, + """|map[B](f: Int => B): List[B] + | ^^^^^^^^^^^ + |""".stripMargin + ) + + @Ignore("Similar to `tparam5_TypeMismatch`") + @Test def `last-arg1_TypeMismatch` = check( """ |object A { diff --git a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala index 0171d2a0d76d..3dabcded4e45 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala @@ -3,17 +3,17 @@ package dotty.tools.pc.utils import scala.meta.pc.VirtualFileParams import dotty.tools.dotc.ast.tpd.* -import dotty.tools.dotc.ast.{Trees, tpd} +import dotty.tools.dotc.ast.Trees import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.PcCollector +import dotty.tools.pc.SimpleCollector import dotty.tools.pc.EndMarker final class DefSymbolCollector( driver: InteractiveDriver, params: VirtualFileParams -) extends PcCollector[Option[Symbol]](driver, params): +) extends SimpleCollector[Option[Symbol]](driver, params): def collect(parent: Option[Tree])( tree: Tree | EndMarker, diff --git a/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala new file mode 100644 index 000000000000..d082258c255b --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/utils/JRE.scala @@ -0,0 +1,13 @@ +package dotty.tools.pc.utils + +object JRE: + + def getJavaMajorVersion: Int = + val javaVersion = sys.env.get("java.specification.version").filter(!_.isEmpty()) + + javaVersion match + case Some(version) if version.startsWith("1.8") => 8 + case Some(version) => version.toInt // it is better to crash during tests than to run incorrect suite + case None => 8 + + diff --git a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala index 9015a39ba9e7..459c41e3c8e5 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala @@ -8,7 +8,6 @@ import scala.jdk.CollectionConverters.* import scala.jdk.OptionConverters.* import scala.meta.internal.metals.{ClasspathSearch, WorkspaceSymbolQuery} import scala.meta.pc.ContentType -import scala.meta.pc.SymbolSearch.Result import scala.meta.pc.{ ParentSymbols, SymbolDocumentation, diff --git a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala index ef15121c6702..af4502d66b4b 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala @@ -4,7 +4,6 @@ import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource import dotty.tools.dotc.util.DiffUtil -import dotty.tools.pc.utils.InteractiveEnrichments.* import org.hamcrest import org.hamcrest.* @@ -127,7 +126,6 @@ trait PcAssertions: def getDetailedMessage(diff: String): String = val lines = diff.linesIterator.toList val sources = completionSources.padTo(lines.size, CompletionSource.Empty) - val maxLength = lines.map(_.length).maxOption.getOrElse(0) var completionIndex = 0 lines.map: line => if line.startsWith(Console.BOLD + Console.RED) || line.startsWith(" ") then diff --git a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala index 0b41b106eb02..deafad4987ce 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/RangeReplace.scala @@ -12,14 +12,21 @@ trait RangeReplace: def renderHighlightsAsString( code: String, highlights: List[DocumentHighlight] + ): String = renderRangesAsString(code, highlights.map(_.getRange())) + + def renderRangesAsString( + code: String, + highlights: List[Range], + alreadyAddedMarkings: List[(Int, Int)] = Nil, + currentBase: Option[String] = None ): String = highlights - .foldLeft((code, immutable.List.empty[(Int, Int)])) { - case ((base, alreadyAddedMarkings), location) => - replaceInRangeWithAdjustmens( + .foldLeft((currentBase.getOrElse(code), alreadyAddedMarkings)) { + case ((base, alreadyAddedMarkings), range) => + replaceInRangeWithAdjustments( code, base, - location.getRange, + range, alreadyAddedMarkings ) } @@ -31,9 +38,9 @@ trait RangeReplace: prefix: String = "<<", suffix: String = ">>" ): String = - replaceInRangeWithAdjustmens(base, base, range, List(), prefix, suffix)._1 + replaceInRangeWithAdjustments(base, base, range, List(), prefix, suffix)._1 - protected def replaceInRangeWithAdjustmens( + protected def replaceInRangeWithAdjustments( code: String, currentBase: String, range: Range, diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala index a923b76b955c..b9d3fd411dcc 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala @@ -4,7 +4,6 @@ import scala.collection.mutable.ListBuffer import scala.meta.internal.jdk.CollectionConverters._ import dotty.tools.pc.utils.InteractiveEnrichments.* -import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.InlayHint import org.eclipse.lsp4j.TextEdit @@ -67,4 +66,4 @@ object TestInlayHints { def removeInlayHints(text: String): String = text.replaceAll(raw"\/\*(.*?)\*\/", "").nn -} \ No newline at end of file +} diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala index 0b49bdf8bca8..27b9a49f9555 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestingWorkspaceSearch.scala @@ -1,25 +1,63 @@ package dotty.tools.pc.utils +import dotty.tools.dotc.ast.untpd.* +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.pc.CompilerSearchVisitor +import dotty.tools.pc.utils.InteractiveEnrichments.decoded + import java.io.File import java.nio.file.Paths - import scala.collection.mutable -import scala.meta.internal.metals.{ - CompilerVirtualFileParams, - Fuzzy, - WorkspaceSymbolQuery -} -import scala.meta.pc.SymbolSearchVisitor import scala.language.unsafeNulls +import scala.meta.internal.metals.CompilerVirtualFileParams +import scala.meta.internal.metals.Fuzzy +import scala.meta.internal.metals.WorkspaceSymbolQuery +import scala.meta.pc.SymbolSearchVisitor -import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.interactive.InteractiveDriver -import dotty.tools.dotc.semanticdb.SemanticSymbolBuilder -import dotty.tools.pc.CompilerSearchVisitor +import TestingWorkspaceSearch.* object TestingWorkspaceSearch: def empty: TestingWorkspaceSearch = new TestingWorkspaceSearch(Nil) + class Disambiguator: + val nameMap = mutable.Map[String, Int]() + def methodPart(name: String) = + val i = nameMap.getOrElse(name, 0) + nameMap.put(name, i + 1) + if i == 0 then "()." + else s"(+$i)." + + case class ParentSymbol(symbol: SearchSymbol, fileName: String): + private val dis: Disambiguator = new Disambiguator + private def isPackage = symbol.lastOption.exists(_.suffix == "/") + private def isMethod = symbol.lastOption.exists(_.suffix.endsWith(").")) + private def isInit = symbol.lastOption.exists(_.name == "") + private def filePackage = SymbolPart(fileName, "$package.") + private def member(part: SymbolPart)= + if isPackage then Some(symbol :+ filePackage :+ part) + else if isMethod then + if isInit then Some(symbol.dropRight(1) :+ part) + else None + else Some(symbol :+ part) + def makeMethod(newPart: String) = member(SymbolPart(newPart, dis.methodPart(newPart))) + def makeVal(newPart: String) = + member(SymbolPart(newPart, ".")) + def makeTypeAlias(newPart: String) = member(SymbolPart(newPart, "#")) + def makeType(newPart: String) = symbol :+ SymbolPart(newPart, "#") + def makeTerm(newPart: String) = symbol :+ SymbolPart(newPart, ".") + def makePackage(parts: List[String], isPackageObject: Boolean = false) = + val suffix = if isPackageObject then "/package." else "/" + parts match + case "" :: Nil => List(SymbolPart("_empty_", suffix)) + case list if symbol.map(_.name) == List("_empty_") => list.map(SymbolPart(_, suffix)) + case list => symbol ++ list.map(SymbolPart(_, suffix)) + + object ParentSymbol: + def empty(fileName: String) = ParentSymbol(Nil, fileName) + + case class SymbolPart(name: String, suffix: String) + type SearchSymbol = List[SymbolPart] class TestingWorkspaceSearch(classpath: Seq[String]): val inputs: mutable.Map[String, String] = mutable.Map.empty[String, String] @@ -30,8 +68,41 @@ class TestingWorkspaceSearch(classpath: Seq[String]): defaultFlags ++ List("-classpath", classpath.mkString(File.pathSeparator)) + private class SymbolCollector extends UntypedTreeAccumulator[List[Tree]]: + override def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = tree :: x + + private def newSymbol(tree: Tree, parent: ParentSymbol)(using Context): Option[SearchSymbol] = + tree match + case PackageDef(name, _) => + Some(parent.makePackage(namesFromSelect(name).reverse)) + case m @ ModuleDef(name, _) if m.mods.is(Flags.Package) => + Some(parent.makePackage(List(name.decoded), isPackageObject = true)) + case ModuleDef(name, _) => + Some(parent.makeTerm(name.decoded)) + case ValDef(name, _, _) => + parent.makeVal(name.decoded) + case t @ TypeDef(name, _: Template) if !t.mods.is(Flags.Implicit) => + Some(parent.makeType(name.decoded)) + case TypeDef(name, _) => + parent.makeTypeAlias(name.decoded) + case DefDef(name, _, _, _) => + parent.makeMethod(name.decoded) + case _ => None + + def traverse(acc: List[SearchSymbol], tree: Tree, parent: ParentSymbol)(using Context): List[SearchSymbol] = + val symbol = newSymbol(tree, parent) + val res = symbol.filter(_.lastOption.exists(_.suffix != "/")).map(_ :: acc).getOrElse(acc) + val children = foldOver(Nil, tree).reverse + val newParent = symbol.map(ParentSymbol(_, parent.fileName)).getOrElse(parent) + children.foldLeft(res)((a, c) => traverse(a, c, newParent)) + val driver = new InteractiveDriver(settings) + private def namesFromSelect(select: Tree)(using Context): List[String] = + select match + case Select(qual, name) => name.decoded :: namesFromSelect(qual) + case Ident(name) => List(name.decoded) + def search( query: WorkspaceSymbolQuery, visitor: SymbolSearchVisitor, @@ -41,21 +112,17 @@ class TestingWorkspaceSearch(classpath: Seq[String]): visitor match case visitor: CompilerSearchVisitor => - inputs.map { (path, text) => - - val nioPath = Paths.get(path) - val uri = nioPath.toUri() - val symbols = DefSymbolCollector(driver, CompilerVirtualFileParams(uri, text)).namedDefSymbols - - // We have to map symbol from this Context, to one in PresentationCompiler - // To do it we are searching it with semanticdb symbol - val semanticSymbolBuilder = SemanticSymbolBuilder() - symbols - .filter((symbol, _) => filter(symbol)) - .filter((_, name) => Fuzzy.matches(query.query, name)) - .map(symbol => semanticSymbolBuilder.symbolName(symbol._1)) - .map( - visitor.visitWorkspaceSymbol(Paths.get(""), _, null, null) - ) - } + inputs.map: (path, text) => + val nio = Paths.get(path) + val uri = nio.toUri() + driver.run(uri, text) + val run = driver.currentCtx.run + val unit = run.units.head + val symbols = SymbolCollector().traverse(Nil, unit.untpdTree, ParentSymbol.empty(nio.getFileName().toString().stripSuffix(".scala"))) + symbols.foreach: sym => + val name = sym.last.name + if Fuzzy.matches(query.query, name) + then + val symbolsString = sym.map{ case SymbolPart(name, suffix) => name ++ suffix}.mkString + visitor.visitWorkspaceSymbol(Paths.get(""), symbolsString, null, null) case _ => diff --git a/project/Build.scala b/project/Build.scala index 047f2c0c22ea..7f98e87fcaaa 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -1,27 +1,32 @@ import java.io.File import java.nio.file._ - import Process._ import Modes._ import ScaladocGeneration._ import com.jsuereth.sbtpgp.PgpKeys -import sbt.Keys._ -import sbt._ +import sbt.Keys.* +import sbt.* import complete.DefaultParsers._ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh +import com.gradle.develocity.agent.sbt.DevelocityPlugin.autoImport._ +import com.typesafe.sbt.packager.Keys._ +import com.typesafe.sbt.packager.MappingsHelper.directory +import com.typesafe.sbt.packager.universal.UniversalPlugin +import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal +import com.typesafe.sbt.packager.windows.WindowsPlugin +import com.typesafe.sbt.packager.windows.WindowsPlugin.autoImport.Windows import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ import dotty.tools.sbtplugin.RepublishPlugin import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ -import xerial.sbt.pack.PackPlugin -import xerial.sbt.pack.PackPlugin.autoImport._ import xerial.sbt.Sonatype.autoImport._ import com.typesafe.tools.mima.plugin.MimaPlugin.autoImport._ import org.scalajs.sbtplugin.ScalaJSPlugin import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ + import sbtbuildinfo.BuildInfoPlugin import sbtbuildinfo.BuildInfoPlugin.autoImport._ import sbttastymima.TastyMiMaPlugin @@ -86,9 +91,47 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.4.2" + /** Version of the Scala compiler used to build the artifacts. + * Reference version should track the latest version pushed to Maven: + * - In main branch it should be the last RC version (using experimental TASTy required for non-bootstrapped tests) + * - In release branch it should be the last stable release + * 3.6.0-RC1 was released as 3.6.0 - it's having and experimental TASTy version + */ + val referenceVersion = "3.6.2" + + /** Version of the Scala compiler targeted in the current release cycle + * Contains a version without RC/SNAPSHOT/NIGHTLY specific suffixes + * Should be updated ONLY after release or cutoff for previous release cycle. + * + * Should only be referred from `dottyVersion` or settings/tasks requiring simplified version string, + * eg. `compatMode` or Windows native distribution version. + */ + val developedVersion = "3.6.3" + + /** The version of the compiler including the RC prefix. + * Defined as common base before calculating environment specific suffixes in `dottyVersion` + * + * By default, during development cycle defined as `${developedVersion}-RC1`; + * During release candidate cycle incremented by the release officer before publishing a subsequent RC version; + * During final, stable release is set exactly to `developedVersion`. + */ + val baseVersion = developedVersion - val baseVersion = "3.5.0" + /** Final version of Scala compiler, controlled by environment variables. */ + val dottyVersion = { + if (isRelease) baseVersion + else if (isNightly) s"${baseVersion}-bin-${VersionUtil.commitDate}-${VersionUtil.gitHash}-NIGHTLY" + else s"${baseVersion}-bin-SNAPSHOT" + } + def isRelease = sys.env.get("RELEASEBUILD").contains("yes") + def isNightly = sys.env.get("NIGHTLYBUILD").contains("yes") + + /** Version calculate for `nonbootstrapped` projects */ + val dottyNonBootstrappedVersion = { + // Make sure sbt always computes the scalaBinaryVersion correctly + val bin = if (!dottyVersion.contains("-bin")) "-bin" else "" + dottyVersion + bin + "-nonbootstrapped" + } // LTS or Next val versionLine = "Next" @@ -103,11 +146,12 @@ object Build { /** Minor version against which we check binary compatibility. * * This must be the earliest published release in the same versioning line. - * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: + * For a developedVersion `3.M.P` the mimaPreviousDottyVersion should be set to: * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` + * 3.6.1 is an exception from this rule - 3.6.0 was a broken release */ - val mimaPreviousDottyVersion = "3.4.0" + val mimaPreviousDottyVersion = "3.6.2" /** LTS version against which we check binary compatibility. * @@ -118,11 +162,9 @@ object Build { val mimaPreviousLTSDottyVersion = "3.3.0" /** Version of Scala CLI to download */ - val scalaCliLauncherVersion = "1.4.0" - /** Version of Scala CLI to download (on Windows - last known validated version) */ - val scalaCliLauncherVersionWindows = "1.4.0" + val scalaCliLauncherVersion = "1.5.4" /** Version of Coursier to download for initializing the local maven repo of Scala command */ - val coursierJarVersion = "2.1.10" + val coursierJarVersion = "2.1.18" object CompatMode { final val BinaryCompatible = 0 @@ -130,8 +172,8 @@ object Build { } val compatMode = { - val VersionRE = """^\d+\.(\d+).(\d+).*""".r - baseVersion match { + val VersionRE = """^\d+\.(\d+)\.(\d+)""".r + developedVersion match { case VersionRE(_, "0") => CompatMode.BinaryCompatible case _ => CompatMode.SourceAndBinaryCompatible } @@ -144,8 +186,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.14" - case Bootstrapped => "2.13.14" + case NonBootstrapped => "2.13.15" + case Bootstrapped => "2.13.15" } /** Version of the scala-library for which we will generate TASTy. @@ -155,30 +197,12 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.14" + val stdlibBootstrappedVersion = "2.13.15" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" val dottyGithubRawUserContentUrl = "https://raw.githubusercontent.com/scala/scala3" - - val isRelease = sys.env.get("RELEASEBUILD") == Some("yes") - - val dottyVersion = { - def isNightly = sys.env.get("NIGHTLYBUILD") == Some("yes") - if (isRelease) - baseVersion - else if (isNightly) - baseVersion + "-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY" - else - baseVersion + "-bin-SNAPSHOT" - } - val dottyNonBootstrappedVersion = { - // Make sure sbt always computes the scalaBinaryVersion correctly - val bin = if (!dottyVersion.contains("-bin")) "-bin" else "" - dottyVersion + bin + "-nonbootstrapped" - } - val sbtCommunityBuildVersion = "0.1.0-SNAPSHOT" val agentOptions = List( @@ -220,6 +244,8 @@ object Build { val repl = taskKey[Unit]("spawns a repl with the correct classpath") + val buildQuick = taskKey[Unit]("builds the compiler and writes the classpath to bin/.cp to enable the bin/scalacQ and bin/scalaQ scripts") + // Compiles the documentation and static site val genDocs = inputKey[Unit]("run scaladoc to generate static documentation site") @@ -262,6 +288,50 @@ object Build { // enable verbose exception messages for JUnit (Test / testOptions) += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-s"), + + // Configuration to publish build scans to develocity.scala-lang.org + develocityConfiguration := { + val isInsideCI = insideCI.value + val config = develocityConfiguration.value + val buildScan = config.buildScan + val buildCache = config.buildCache + // disable test retry on compilation test classes + val noRetryTestClasses = Set( + "dotty.tools.dotc.BestEffortOptionsTests", + "dotty.tools.dotc.CompilationTests", + "dotty.tools.dotc.FromTastyTests", + "dotty.tools.dotc.IdempotencyTests", + "dotty.tools.dotc.ScalaJSCompilationTests", + "dotty.tools.dotc.TastyBootstrapTests", + "dotty.tools.dotc.coverage.CoverageTests", + "dotty.tools.dotc.transform.PatmatExhaustivityTest", + "dotty.tools.repl.ScriptedTests" + ) + config + .withProjectId(ProjectId("scala3")) + .withServer(config.server.withUrl(Some(url("https://develocity.scala-lang.org")))) + .withBuildScan( + buildScan + .withPublishing(Publishing.onlyIf(_.authenticated)) + .withBackgroundUpload(!isInsideCI) + .tag(if (isInsideCI) "CI" else "Local") + .withLinks(buildScan.links ++ GithubEnv.develocityLinks) + .withValues(buildScan.values ++ GithubEnv.develocityValues) + .withObfuscation(buildScan.obfuscation.withIpAddresses(_.map(_ => "0.0.0.0"))) + ) + .withBuildCache( + buildCache + .withLocal(buildCache.local.withEnabled(false)) + .withRemote(buildCache.remote.withEnabled(false)) + ) + .withTestRetryConfiguration( + config.testRetryConfiguration + .withFlakyTestPolicy(FlakyTestPolicy.Fail) + .withMaxRetries(1) + .withMaxFailures(10) + .withClassesFilter((className, _) => !noRetryTestClasses.contains(className)) + ) + } ) // Settings shared globally (scoped in Global). Used in build.sbt @@ -313,7 +383,7 @@ object Build { ), // This is used to download nightly builds of the Scala 2 library in `scala2-library-bootstrapped` - resolvers += "scala-integration" at "https://scala-ci.typesafe.com/artifactory/scala-integration/", + resolvers += "scala-integration" at "https://scala-ci.typesafe.com/artifactory/scala-integration/" ) lazy val disableDocSetting = @@ -417,10 +487,20 @@ object Build { ) ++ extMap } + /* These projects are irrelevant from IDE point of view and do not compile with Bloop*/ + val fullyDisabledProjects = Set( + "scala2-library-cc", + "scala2-library-bootstrapped", + "scala2-library-cc-tasty", + "scala2-library-tasty" + ) + + val enableBspAllProjects = false + // Settings used when compiling dotty with a non-bootstrapped dotty - lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq( - // To enable support of scaladoc and language-server projects you need to change this to true and use sbt as your build server - bspEnabled := false, + lazy val commonBootstrappedSettings = commonDottySettings ++ Seq( + // To enable support of scaladoc and language-server projects you need to change this to true + bspEnabled := { if(fullyDisabledProjects(name.value)) false else enableBspAllProjects }, (Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped", version := dottyVersion, @@ -565,18 +645,36 @@ object Build { def findArtifactPath(classpath: Def.Classpath, name: String): String = findArtifact(classpath, name).getAbsolutePath + /** Replace package names in package definitions, for shading. + * It assumes the full package def is written on a single line. + * It does not adapt the imports accordingly. + */ + def replacePackage(lines: List[String])(replace: PartialFunction[String, String]): List[String] = { + def recur(lines: List[String]): List[String] = + lines match { + case head :: tail => + if (head.startsWith("package ")) { + val packageName = head.stripPrefix("package ").trim + val newPackageName = replace.applyOrElse(packageName, (_: String) => packageName) + s"package $newPackageName" :: tail + } else head :: recur(tail) + case _ => lines + } + recur(lines) + } + /** Insert UnsafeNulls Import after package */ - def insertUnsafeNullsImport(lines: Seq[String]): Seq[String] = { - def recur(ls: Seq[String], foundPackage: Boolean): Seq[String] = ls match { - case Seq(l, rest @ _*) => + def insertUnsafeNullsImport(lines: List[String]): List[String] = { + def recur(ls: List[String], foundPackage: Boolean): List[String] = ls match { + case l :: rest => val lt = l.trim() if (foundPackage) { if (!(lt.isEmpty || lt.startsWith("package "))) - "import scala.language.unsafeNulls" +: ls - else l +: recur(rest, foundPackage) + "import scala.language.unsafeNulls" :: ls + else l :: recur(rest, foundPackage) } else { if (lt.startsWith("package ")) l +: recur(rest, true) - else l +: recur(rest, foundPackage) + else l :: recur(rest, foundPackage) } case _ => ls } @@ -613,7 +711,7 @@ object Build { // Settings shared between scala3-compiler and scala3-compiler-bootstrapped lazy val commonDottyCompilerSettings = Seq( // Note: bench/profiles/projects.yml should be updated accordingly. - Compile / scalacOptions ++= Seq("-Yexplicit-nulls"), + Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Wsafe-init"), // Use source 3.3 to avoid fatal migration warnings on scalajs-ir scalacOptions ++= Seq("-source", "3.3"), @@ -641,11 +739,11 @@ object Build { // get libraries onboard libraryDependencies ++= Seq( - "org.scala-lang.modules" % "scala-asm" % "9.6.0-scala-1", // used by the backend + "org.scala-lang.modules" % "scala-asm" % "9.7.0-scala-2", // used by the backend Dependencies.compilerInterface, - "org.jline" % "jline-reader" % "3.25.1", // used by the REPL - "org.jline" % "jline-terminal" % "3.25.1", - "org.jline" % "jline-terminal-jna" % "3.25.1", // needed for Windows + "org.jline" % "jline-reader" % "3.27.1", // used by the REPL + "org.jline" % "jline-terminal" % "3.27.1", + "org.jline" % "jline-terminal-jni" % "3.27.1", // needed for Windows ("io.get-coursier" %% "coursier" % "2.0.16" % Test).cross(CrossVersion.for3Use2_13), ), @@ -838,7 +936,8 @@ object Build { extraClasspath ++= Seq(dottyCompiler, dottyInterfaces, asm, dottyStaging, dottyTastyInspector, tastyCore, compilerInterface) } - val fullArgs = main :: defaultOutputDirectory ::: (if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator))) + val wrappedArgs = if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator)) + val fullArgs = main :: (defaultOutputDirectory ::: wrappedArgs).map("\""+ _ + "\"").map(_.replace("\\", "\\\\")) (Compile / runMain).toTask(fullArgs.mkString(" ", " ", "")) }.evaluated, @@ -876,11 +975,18 @@ object Build { val sjsSources = (trgDir ** "*.scala").get.toSet sjsSources.foreach(f => { val lines = IO.readLines(f) - IO.writeLines(f, insertUnsafeNullsImport(lines)) + val linesWithPackage = replacePackage(lines) { + case "org.scalajs.ir" => "dotty.tools.sjs.ir" + } + IO.writeLines(f, insertUnsafeNullsImport(linesWithPackage)) }) sjsSources } (Set(scalaJSIRSourcesJar)).toSeq }.taskValue, + + // Develocity's Build Cache does not work with our compilation tests + // at the moment: it does not take compilation files as inputs. + Test / develocityBuildCacheClient := None, ) def insertClasspathInArgs(args: List[String], cp: String): List[String] = { @@ -890,8 +996,6 @@ object Build { } lazy val nonBootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( - // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 - scalacOptions += "-Ysafe-init", // packageAll packages all and then returns a map with the abs location packageAll := Def.taskDyn { // Use a dynamic task to avoid loops when loading the settings Def.task { @@ -919,8 +1023,6 @@ object Build { ) lazy val bootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( - // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 - scalacOptions += "-Wsafe-init", javaOptions ++= { val jars = packageAll.value Seq( @@ -985,7 +1087,11 @@ object Build { "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(File.pathSeparator), "-Yexplicit-nulls", ), - (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings + (Compile / doc / scalacOptions) ++= ScaladocConfigs.DefaultGenerationSettings.value.settings, + (Compile / packageSrc / mappings) ++= { + val auxBase = (ThisBuild / baseDirectory).value / "library-aux/src" + auxBase ** "*.scala" pair io.Path.relativeTo(auxBase) + }, ) lazy val `scala3-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped) @@ -1318,25 +1424,21 @@ object Build { ) lazy val `scala3-presentation-compiler` = project.in(file("presentation-compiler")) - .asScala3PresentationCompiler(NonBootstrapped) - lazy val `scala3-presentation-compiler-bootstrapped` = project.in(file("presentation-compiler")) - .asScala3PresentationCompiler(Bootstrapped) + .withCommonSettings(Bootstrapped) + .dependsOn(`scala3-compiler-bootstrapped`, `scala3-library-bootstrapped`) + .settings(presentationCompilerSettings) + .settings(scala3PresentationCompilerBuildInfo) .settings( // Add `-Yno-flexible-types` flag for bootstrap, see comments for `bootstrappedDottyCompilerSettings` Compile / scalacOptions += "-Yno-flexible-types" ) - def scala3PresentationCompiler(implicit mode: Mode): Project = mode match { - case NonBootstrapped => `scala3-presentation-compiler` - case Bootstrapped => `scala3-presentation-compiler-bootstrapped` - } - - def scala3PresentationCompilerBuildInfo(implicit mode: Mode) = + def scala3PresentationCompilerBuildInfo = Seq( ideTestsDependencyClasspath := { - val dottyLib = (dottyLibrary / Compile / classDirectory).value + val dottyLib = (`scala3-library-bootstrapped` / Compile / classDirectory).value val scalaLib = - (dottyLibrary / Compile / dependencyClasspath) + (`scala3-library-bootstrapped` / Compile / dependencyClasspath) .value .map(_.data) .filter(_.getName.matches("scala-library.*\\.jar")) @@ -1352,25 +1454,22 @@ object Build { BuildInfoPlugin.buildInfoScopedSettings(Test) ++ BuildInfoPlugin.buildInfoDefaultSettings - def presentationCompilerSettings(implicit mode: Mode) = { - val mtagsVersion = "1.3.0+56-a06a024d-SNAPSHOT" - + lazy val presentationCompilerSettings = { + val mtagsVersion = "1.4.1" Seq( - resolvers ++= Resolver.sonatypeOssRepos("snapshots"), libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", "io.get-coursier" % "interface" % "1.0.18", - "org.scalameta" % "mtags-interfaces" % mtagsVersion, + ("org.scalameta" % "mtags-interfaces" % mtagsVersion) + .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j") + .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), + "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.15" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings - // FIXME change this to just Seq("-Yexplicit-nulls, "-Wsafe-init") when reference is set to 3.5.0 - Compile / scalacOptions ++= (mode match { - case Bootstrapped => Seq("-Yexplicit-nulls", "-Wsafe-init") - case NonBootstrapped => Seq("-Yexplicit-nulls", "-Ysafe-init") - }), + Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Wsafe-init"), Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory @@ -2125,26 +2224,36 @@ object Build { ) lazy val commonDistSettings = Seq( - packMain := Map(), publishArtifact := false, - packGenerateMakefile := false, republishRepo := target.value / "republish", - packResourceDir += (republishRepo.value / "bin" -> "bin"), - packResourceDir += (republishRepo.value / "maven2" -> "maven2"), - packResourceDir += (republishRepo.value / "lib" -> "lib"), - republishCommandLibs += - ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), - republishCommandLibs += - ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), - republishCommandLibs += - ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), - Compile / pack := republishPack.value, + Universal / packageName := packageName.value, + // ======== + Universal / stage := (Universal / stage).dependsOn(republish).value, + Universal / packageBin := (Universal / packageBin).dependsOn(republish).value, + Universal / packageZipTarball := (Universal / packageZipTarball).dependsOn(republish) + .map { archiveFile => + // Rename .tgz to .tar.gz for consistency with previous versions + val renamedFile = archiveFile.getParentFile() / archiveFile.getName.replaceAll("\\.tgz$", ".tar.gz") + IO.move(archiveFile, renamedFile) + renamedFile + } + .value, + // ======== + Universal / mappings ++= directory(dist.base / "bin"), + Universal / mappings ++= directory(republishRepo.value / "maven2"), + Universal / mappings ++= directory(republishRepo.value / "lib"), + Universal / mappings ++= directory(republishRepo.value / "libexec"), + Universal / mappings += (republishRepo.value / "VERSION") -> "VERSION", + // ======== + republishCommandLibs += ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), + republishCommandLibs += ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), + republishCommandLibs += ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), ) lazy val dist = project.asDist(Bootstrapped) + .settings(packageName := "scala3-" + dottyVersion) .settings( - packArchiveName := "scala3-" + dottyVersion, - republishBinDir := baseDirectory.value / "bin", + republishLibexecDir := baseDirectory.value / "libexec", republishCoursier += ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), republishLaunchers += @@ -2152,52 +2261,67 @@ object Build { ) lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-x86_64-apple-darwin") .settings( - republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-apple-darwin", - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-apple-darwin.gz") ) lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-aarch64-apple-darwin") .settings( - republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-aarch64-apple-darwin", - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-apple-darwin.gz") ) lazy val `dist-win-x86_64` = project.in(file("dist/win-x86_64")).asDist(Bootstrapped) + .enablePlugins(WindowsPlugin) // TO GENERATE THE `.msi` installer + .settings(packageName := (dist / packageName).value + "-x86_64-pc-win32") .settings( - republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-win32", - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, - republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), - mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), republishLaunchers += - ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") + ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") + ) + .settings( + Windows / name := "scala", + // Windows/version is used to create ProductInfo - it requires a version without any -RC suffixes + // If not explicitly overriden it would try to use `dottyVersion` assigned to `dist-win-x86_64/version` + Windows / version := developedVersion, + Windows / mappings := (Universal / mappings).value, + Windows / packageBin := (Windows / packageBin).dependsOn(republish).value, + Windows / wixFiles := (Windows / wixFiles).dependsOn(republish).value, + // Additional information: https://wixtoolset.org/docs/schema/wxs/package/ + maintainer := "The Scala Programming Language", // The displayed maintainer of the package + packageSummary := s"Scala $dottyVersion", // The displayed name of the package + packageDescription := """Installer for the Scala Programming Language""", // The displayed description of the package + wixProductId := "*", // Unique ID for each generated MSI; will change for each generated msi + wixProductUpgradeId := "3E5A1A82-CA67-4353-94FE-5BDD400AF66B", // Unique ID to identify the package; used to manage the upgrades + wixProductLicense := Some(dist.base / "LICENSE.rtf") // Link to the LICENSE to show during the installation (keep in sync with ../LICENSE) ) lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-x86_64-pc-linux") .settings( - republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-linux", - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-linux.gz") ) lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) + .settings(packageName := (dist / packageName).value + "-aarch64-pc-linux") .settings( - republishBinDir := (dist / republishBinDir).value, - packArchiveName := (dist / packArchiveName).value + "-aarch64-pc-linux", - republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishLibexecDir := (dist / republishLibexecDir).value, + republishLibexecOverrides += (dist / baseDirectory).value / "libexec-native-overrides", republishFetchCoursier := (dist / republishFetchCoursier).value, republishLaunchers += ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-pc-linux.gz") @@ -2216,9 +2340,9 @@ object Build { // FIXME: we do not aggregate `bin` because its tests delete jars, thus breaking other tests def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings. - aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`, scala3PresentationCompiler). + aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`). bootstrappedAggregate(`scala2-library-tasty`, `scala2-library-cc-tasty`, `scala3-language-server`, `scala3-staging`, - `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc). + `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc, `scala3-presentation-compiler`). dependsOn(tastyCore). dependsOn(dottyCompiler). dependsOn(dottyLibrary). @@ -2236,6 +2360,11 @@ object Build { // default. addCommandAlias("publishLocal", "scala3-bootstrapped/publishLocal"), repl := (`scala3-compiler-bootstrapped` / repl).value, + buildQuick := { + val _ = (`scala3-compiler` / Compile / compile).value + val cp = (`scala3-compiler` / Compile / fullClasspath).value.map(_.data.getAbsolutePath).mkString(File.pathSeparator) + IO.write(baseDirectory.value / "bin" / ".cp", cp) + }, (Compile / console) := (Compile / console).dependsOn(Def.task { import _root_.scala.io.AnsiColor._ val msg = "`console` uses the reference Scala version. Use `repl` instead." @@ -2323,13 +2452,8 @@ object Build { settings(commonBenchmarkSettings). enablePlugins(JmhPlugin) - def asScala3PresentationCompiler(implicit mode: Mode): Project = project.withCommonSettings. - dependsOn(dottyCompiler, dottyLibrary). - settings(presentationCompilerSettings). - settings(scala3PresentationCompilerBuildInfo) - def asDist(implicit mode: Mode): Project = project. - enablePlugins(PackPlugin, RepublishPlugin). + enablePlugins(UniversalPlugin, RepublishPlugin). withCommonSettings. settings(commonDistSettings). dependsOn( @@ -2425,7 +2549,7 @@ object ScaladocConfigs { } lazy val DefaultGenerationConfig = Def.task { - def distLocation = (dist / Compile / pack).value + def distLocation = (dist / Universal / stage).value DefaultGenerationSettings.value } diff --git a/project/DocumentationWebsite.scala b/project/DocumentationWebsite.scala index 5f8e499af62f..5b05168b7f27 100644 --- a/project/DocumentationWebsite.scala +++ b/project/DocumentationWebsite.scala @@ -43,7 +43,7 @@ object DocumentationWebsite { import _root_.scala.concurrent._ import _root_.scala.concurrent.duration.Duration import ExecutionContext.Implicits.global - val inkuireVersion = "v1.0.0-M7" + val inkuireVersion = "v1.0.0-M9" val inkuireLink = s"https://github.com/VirtusLab/Inkuire/releases/download/$inkuireVersion/inkuire.js" val inkuireDestinationFile = baseDest / "dotty_res" / "scripts" / "inkuire.js" sbt.IO.touch(inkuireDestinationFile) diff --git a/project/GithubEnv.scala b/project/GithubEnv.scala new file mode 100644 index 000000000000..7e629d53f3a7 --- /dev/null +++ b/project/GithubEnv.scala @@ -0,0 +1,30 @@ +import scala.util.Properties +import sbt.url +import java.net.URL + + +// https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/variables#default-environment-variables +object GithubEnv { + lazy val repositoryVar: Option[(String, String)] = envVar("GITHUB_REPOSITORY") + lazy val runIdVar: Option[(String, String)] = envVar("GITHUB_RUN_ID") + lazy val shaVar: Option[(String, String)] = envVar("GITHUB_SHA") + lazy val workflowVar: Option[(String, String)] = envVar("GITHUB_WORKFLOW") + + lazy val runUrl: Option[(String, URL)] = + for { + (_, repository) <- repositoryVar + (_, runId) <- runIdVar + } yield "GitHub Run" -> url(s"https://github.com/$repository/actions/runs/$runId") + lazy val treeUrl: Option[(String, URL)] = + for { + (_, repository) <- repositoryVar + (_, sha) <- shaVar + } yield "GitHub Commit" -> url(s"https://github.com/$repository/tree/$sha") + + + def develocityValues: Seq[(String, String)] = repositoryVar.toSeq ++ shaVar ++ workflowVar + def develocityLinks: Seq[(String, URL)] = runUrl.toSeq ++ treeUrl + + private def envVar(key: String): Option[(String, String)] = + Properties.envOrNone(key).map(key -> _) +} diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 18d2e985f844..00e7153bcb83 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -8,24 +8,19 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library Build.mimaPreviousDottyVersion -> Seq( - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), - ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromArray"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromIArray"), - ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple.helpers"), - ProblemFilters.exclude[MissingClassProblem]("scala.Tuple$helpers$"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromArray"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), - ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.betterFors"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterFors$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.quotedPatternsWithPolymorphicFunctions"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$quotedPatternsWithPolymorphicFunctions$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.runtime.Patterns.higherOrderHoleWithTypes"), ), // Additions since last LTS Build.mimaPreviousLTSDottyVersion -> Seq( + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTuple"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTuple$"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTupleDecomposition"), + ProblemFilters.exclude[MissingClassProblem]("scala.NamedTupleDecomposition$"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass"), @@ -53,6 +48,34 @@ object MiMaFilters { ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.5"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.clauseInterleaving"), ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.relaxedExtensionImports"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E6$minusmigration$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.6-migration"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E6$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.6"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.MethodTypeKind"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeTypeTest"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleType"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeMethods"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$FlexibleTypeMethods"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$FlexibleTypeModule"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$MethodTypeKind"), + ProblemFilters.exclude[MissingClassProblem]("scala.quoted.Quotes$reflectModule$MethodTypeKind$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.betterMatchTypeExtractors"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$betterMatchTypeExtractors$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.7-migration"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language.3.7"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E7$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$3$u002E7$minusmigration$"), ), ) diff --git a/project/NoBloopExport.scala b/project/NoBloopExport.scala deleted file mode 100644 index 7a088a405781..000000000000 --- a/project/NoBloopExport.scala +++ /dev/null @@ -1,31 +0,0 @@ -import sbt._ -import Keys._ - -/* With <3 from scala-js */ -object NoBloopExport { - private lazy val bloopGenerateKey: Option[TaskKey[Result[Option[File]]]] = { - val optBloopKeysClass: Option[Class[_]] = try { - Some(Class.forName("bloop.integrations.sbt.BloopKeys")) - } catch { - case _: ClassNotFoundException => None - } - - optBloopKeysClass.map { bloopKeysClass => - val bloopGenerateGetter = bloopKeysClass.getMethod("bloopGenerate") - bloopGenerateGetter.invoke(null).asInstanceOf[TaskKey[Result[Option[File]]]] - } - } - - /** Settings to prevent the project from being exported to IDEs. */ - lazy val settings: Seq[Setting[_]] = { - bloopGenerateKey match { - case None => - Nil - case Some(key) => - Seq( - Compile / key := Value(None), - Test / key := Value(None), - ) - } - } -} diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala index 5611af798b33..f1faeca3d9cd 100644 --- a/project/RepublishPlugin.scala +++ b/project/RepublishPlugin.scala @@ -1,22 +1,18 @@ package dotty.tools.sbtplugin -import sbt._ -import xerial.sbt.pack.PackPlugin -import xerial.sbt.pack.PackPlugin.autoImport.{packResourceDir, packDir} -import sbt.Keys._ +import com.typesafe.sbt.packager.universal.UniversalPlugin +import sbt.* +import sbt.Keys.* import sbt.AutoPlugin import sbt.PublishBinPlugin -import sbt.PublishBinPlugin.autoImport._ +import sbt.PublishBinPlugin.autoImport.* import sbt.io.Using -import sbt.util.CacheImplicits._ +import sbt.util.CacheImplicits.* -import scala.collection.mutable import java.nio.file.Files - import java.nio.file.attribute.PosixFilePermission -import java.nio.file.{Files, Path} - -import scala.jdk.CollectionConverters._ +import java.nio.file.Path +import scala.jdk.CollectionConverters.* /** This local plugin provides ways of publishing a project classpath and library dependencies to * .a local repository */ @@ -53,7 +49,7 @@ object RepublishPlugin extends AutoPlugin { } override def trigger = allRequirements - override def requires = super.requires && PublishBinPlugin && PackPlugin + override def requires = super.requires && PublishBinPlugin && UniversalPlugin object autoImport { val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") @@ -64,9 +60,9 @@ object RepublishPlugin extends AutoPlugin { val republishFetchCoursier = taskKey[File]("cache the coursier.jar for resolving the local maven repo.") val republishPrepareBin = taskKey[File]("prepare the bin directory, including launchers and scripts.") val republishWriteExtraProps = taskKey[Option[File]]("write extra properties for the launchers.") - val republishBinDir = settingKey[File]("where to find static files for the bin dir.") + val republishLibexecDir = settingKey[File]("where to find static files for the `libexec` dir.") val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") - val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republishLibexecOverrides = settingKey[Seq[File]]("files to override those in libexec-dir.") val republishCommandLibs = settingKey[Seq[(String, List[String])]]("libraries needed for each command.") val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") val republishPack = taskKey[File]("do the pack command") @@ -346,11 +342,70 @@ object RepublishPlugin extends AutoPlugin { allLaunchers.toSet } + private def generateVersionFile() = Def.task[Unit] { + import scala.util.Try + import java.time.format.DateTimeFormatterBuilder + import java.time.format.SignStyle + import java.time.temporal.ChronoField.* + import java.time.ZoneId + import java.time.Instant + import java.time.ZonedDateTime + import java.time.ZonedDateTime + import java.util.Locale + import java.util.Date + + val base: File = new File(".") // Using the working directory as base for readability + val s = streams.value + val log = s.log + val progVersion = version.value + val distDir = republishRepo.value + + def write(path: String, content: String) { + val p = distDir / path + IO.write(p, content) + } + + val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .appendLiteral(' ') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendOffset("+HHMM", "Z") + .toFormatter(Locale.US) + + // Retrieve build time + val systemZone = ZoneId.systemDefault().normalized() + val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) + val buildTime = humanReadableTimestampFormatter.format(timestamp) + + // Check the current Git revision + val gitRevision: String = Try { + if ((base / ".git").exists()) { + log.info("[republish] Checking the git revision of the current project") + sys.process.Process("git rev-parse HEAD").!! + } else { + "unknown" + } + }.getOrElse("unknown").trim + + + // Output the version number and Git revision + write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") + } + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( republishCoursierDir := republishRepo.value / "coursier", republishLaunchers := Seq.empty, republishCoursier := Seq.empty, - republishBinOverrides := Seq.empty, + republishLibexecOverrides := Seq.empty, republishExtraProps := Seq.empty, republishCommandLibs := Seq.empty, republishLocalResolved / republishProjectRefs := { @@ -434,16 +489,14 @@ object RepublishPlugin extends AutoPlugin { }, republishPrepareBin := { val baseDir = baseDirectory.value - val srcBin = republishBinDir.value - val overrides = republishBinOverrides.value + val srcLibexec = republishLibexecDir.value + val overrides = republishLibexecOverrides.value val repoDir = republishRepo.value - val targetBin = repoDir / "bin" - IO.copyDirectory(srcBin, targetBin) - overrides.foreach { dir => - IO.copyDirectory(dir, targetBin, overwrite = true) - } - targetBin + val targetLibexec = repoDir / "libexec" + IO.copyDirectory(srcLibexec, targetLibexec) + overrides.foreach(IO.copyDirectory(_, targetLibexec, overwrite = true)) + targetLibexec }, republishWriteExtraProps := { val s = streams.value @@ -470,88 +523,8 @@ object RepublishPlugin extends AutoPlugin { val artifacts = republishClasspath.value val launchers = republishFetchLaunchers.value val extraProps = republishWriteExtraProps.value + val versionFile = generateVersionFile().value cacheDir }, - republishPack := { - val cacheDir = republish.value - val s = streams.value - val log = s.log - val distDir = target.value / packDir.value - val progVersion = version.value - - IO.createDirectory(distDir) - for ((path, dir) <- packResourceDir.value) { - val target = distDir / dir - IO.copyDirectory(path, target) - } - - locally { - // everything in this block is copied from sbt-pack plugin - import scala.util.Try - import java.time.format.DateTimeFormatterBuilder - import java.time.format.SignStyle - import java.time.temporal.ChronoField.* - import java.time.ZoneId - import java.time.Instant - import java.time.ZonedDateTime - import java.time.ZonedDateTime - import java.util.Locale - import java.util.Date - val base: File = new File(".") // Using the working directory as base for readability - - // Copy explicitly added dependencies - val mapped: Seq[(File, String)] = mappings.value - log.info("[republish] Copying explicit dependencies:") - val explicitDepsJars = for ((file, path) <- mapped) yield { - log.info(file.getPath) - val dest = distDir / path - IO.copyFile(file, dest, true) - dest - } - - def write(path: String, content: String) { - val p = distDir / path - IO.write(p, content) - } - - val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() - .parseCaseInsensitive() - .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(MONTH_OF_YEAR, 2) - .appendLiteral('-') - .appendValue(DAY_OF_MONTH, 2) - .appendLiteral(' ') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .appendOffset("+HHMM", "Z") - .toFormatter(Locale.US) - - // Retrieve build time - val systemZone = ZoneId.systemDefault().normalized() - val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) - val buildTime = humanReadableTimestampFormatter.format(timestamp) - - // Check the current Git revision - val gitRevision: String = Try { - if ((base / ".git").exists()) { - log.info("[republish] Checking the git revision of the current project") - sys.process.Process("git rev-parse HEAD").!! - } else { - "unknown" - } - }.getOrElse("unknown").trim - - - // Output the version number and Git revision - write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") - } - - - distDir - } ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 59e58007a4a0..21d8826b6b24 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,8 +12,6 @@ addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1") -addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.17") - addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") @@ -21,3 +19,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("ch.epfl.scala" % "sbt-tasty-mima" % "1.0.0") + +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.10.0") + +addSbtPlugin("com.gradle" % "sbt-develocity" % "1.1.1") diff --git a/project/resources/referenceReplacements/sidebar.yml b/project/resources/referenceReplacements/sidebar.yml index 240085b681f2..2e84b0b5e433 100644 --- a/project/resources/referenceReplacements/sidebar.yml +++ b/project/resources/referenceReplacements/sidebar.yml @@ -28,6 +28,9 @@ subsection: directory: contextual subsection: - page: reference/contextual/givens.md + - page: reference/contextual/deferred-givens.md + - page: reference/contextual/more-givens.md + - page: reference/contextual/previous-givens.md - page: reference/contextual/using-clauses.md - page: reference/contextual/context-bounds.md - page: reference/contextual/given-imports.md diff --git a/project/scripts/addToBackportingProject.scala b/project/scripts/addToBackportingProject.scala index 2c1929972791..0ef2ea553a74 100644 --- a/project/scripts/addToBackportingProject.scala +++ b/project/scripts/addToBackportingProject.scala @@ -1,6 +1,6 @@ -//> using scala 3.3.1 -//> using toolkit 0.2.1 -//> using lib pro.kordyjan::pytanie:0.1.7 +//> using scala 3.lts +//> using toolkit 0.4.0 +//> using lib pro.kordyjan::pytanie:0.1.9 import pytanie.* import sttp.client4.* @@ -10,8 +10,29 @@ lazy val apiToken = case class ID(value: String) derives WrapperVariable -val PROJECT_ID = ID("PVT_kwDOACj3ec4AWSoi") -val FIELD_ID = ID("PVTF_lADOACj3ec4AWSoizgO7uJ4") +// Obtained with: +// query { +// organization(login: "scala") { +// projectV2(number: 2) { +// id +// } +// } +// } +val PROJECT_ID = ID("PVT_kwDN3uPOAHewkg") + +// Obtained with: +// query { +// organization(login: "scala") { +// projectV2(number: 2) { +// field(name: "Merged at") { +// ... on ProjectV2FieldCommon { +// id +// } +// } +// } +// } +// } +val FIELD_ID = ID("PVTF_lADN3uPOAHewks4E3B1I") @main def run(commitSha: String) = val (id, date) = getPrData(commitSha) diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 11c35a7028cc..6f5c75ceb922 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -15,13 +15,13 @@ echo "testing scala.quoted.Expr.run from sbt scala" grep -qe "val a: scala.Int = 3" "$tmp" # setup for `scalac`/`scala` script tests -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/Universal/stage" -echo "capturing scala version from $DIST_DIR/target/pack/VERSION" -IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +echo "capturing scala version from $DIST_DIR/target/universal/stage/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/universal/stage/VERSION" # temporarily set IFS to ':=' to split versionProps [ ${#versionProps[@]} -eq 3 ] && \ [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/pack/VERSION" + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/universal/stage/VERSION" scala_version=${versionProps[2]} # check that `scalac` compiles and `scala` runs it @@ -77,7 +77,7 @@ echo "testing sbt scalac with suspension" clear_out "$OUT" "$SBT" "scala3-compiler-bootstrapped/scalac -d $OUT tests/pos-macros/macros-in-same-project-1/Bar.scala tests/pos-macros/macros-in-same-project-1/Foo.scala" > "$tmp" -# echo ":quit" | ./$DIST_DIR/target/pack/bin/scala # not supported by CI +# echo ":quit" | ./$DIST_DIR/target/universal/stage/bin/scala # not supported by CI echo "testing ./bin/scaladoc" clear_out "$OUT1" diff --git a/project/scripts/buildScalaBinary b/project/scripts/buildScalaBinary index 7fc5275e5d8d..9451dbdd2a07 100755 --- a/project/scripts/buildScalaBinary +++ b/project/scripts/buildScalaBinary @@ -9,4 +9,4 @@ SBT="$ROOT/project/scripts/sbt" # if run on CI source "$ROOT/bin/common-platform" # build the scala/scalac/scaladoc binary, where scala is native for the current platform. -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/Universal/stage" diff --git a/project/scripts/check-cla.sh b/project/scripts/check-cla.sh index e4e489830f11..dbb148d3c652 100755 --- a/project/scripts/check-cla.sh +++ b/project/scripts/check-cla.sh @@ -5,16 +5,16 @@ echo "Pull request submitted by $AUTHOR"; if [[ "$AUTHOR" == "github-actions[bot]" || "$AUTHOR" == "dependabot[bot]" ]] ; then echo "CLA check for $AUTHOR successful"; else - signed=$(curl -s "https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed"); + signed=$(curl -L -s "https://contribute.akka.io/contribute/cla/scala/check/$AUTHOR" | jq -r ".signed"); if [ "$signed" = "true" ] ; then echo "CLA check for $AUTHOR successful"; else echo "CLA check for $AUTHOR failed"; echo "Please sign the Scala CLA to contribute to the Scala compiler."; - echo "Go to https://www.lightbend.com/contribute/cla/scala and then"; + echo "Go to https://contribute.akka.io/contribute/cla/scala and then"; echo "comment on the pull request to ask for a new check."; echo ""; - echo "Check if CLA is signed: https://www.lightbend.com/contribute/cla/scala/check/$AUTHOR"; + echo "Check if CLA is signed: https://contribute.akka.io/contribute/cla/scala/check/$AUTHOR"; exit 1; fi; fi; diff --git a/project/scripts/cmdScaladocTests b/project/scripts/cmdScaladocTests index 06353af693f1..b54789032ad2 100755 --- a/project/scripts/cmdScaladocTests +++ b/project/scripts/cmdScaladocTests @@ -20,7 +20,7 @@ SOURCE_LINKS_REPOSITORY="scala/scala3" SOURCE_LINKS_VERSION="${GITHUB_SHA:-$DOTTY_BOOTSTRAPPED_VERSION}" "$SBT" "scaladoc/generateTestcasesDocumentation" > "$tmp" 2>&1 || echo "generated testcases project with sbt" -dist/target/pack/bin/scaladoc \ +dist/target/universal/stage/bin/scaladoc \ -d "$OUT1" \ -project "scaladoc testcases" \ -source-links:out/bootstrap/scala2-library-bootstrapped/scala-"${DOTTY_NONBOOTSTRAPPED_VERSION}"/src_managed/main/scala-library-src=github://scala/scala/v"${STDLIB_VERSION}"#src/library \ diff --git a/project/scripts/cmdTests b/project/scripts/cmdTests index 453590084b00..1fdf96d53fdd 100755 --- a/project/scripts/cmdTests +++ b/project/scripts/cmdTests @@ -55,7 +55,9 @@ cp tests/neg-macros/i6371/A_1.scala $OUT/A.scala cp tests/neg-macros/i6371/B_2.scala $OUT/B.scala "$SBT" "scalac $OUT/A.scala -d $OUT1" rm $OUT/A.scala -"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala" > "$tmp" 2>&1 || echo "ok" +# this command is expected to fail +# setting -Dscan=false disables publishing scans to develocity.scala-lang.org +"$SBT" "scalac -classpath $OUT1 -d $OUT1 $OUT/B.scala" -Dscan=false > "$tmp" 2>&1 || echo "ok" # cat "$tmp" # for debugging grep -qe "B.scala:2:7" "$tmp" grep -qe "This location contains code that was inlined from A.scala:3" "$tmp" diff --git a/project/scripts/expected-links/reference-expected-links.txt b/project/scripts/expected-links/reference-expected-links.txt index 59add1da0153..8be7dba8d4d0 100644 --- a/project/scripts/expected-links/reference-expected-links.txt +++ b/project/scripts/expected-links/reference-expected-links.txt @@ -27,13 +27,16 @@ ./contextual/context-functions-spec.html ./contextual/context-functions.html ./contextual/conversions.html +./contextual/deferred-givens.html ./contextual/derivation-macro.html ./contextual/derivation.html ./contextual/extension-methods.html ./contextual/given-imports.html ./contextual/givens.html ./contextual/index.html +./contextual/more-givens.html ./contextual/multiversal-equality.html +./contextual/previous-givens.html ./contextual/relationship-implicits.html ./contextual/right-associative-extension-methods.html ./contextual/type-classes.html diff --git a/project/scripts/genDocs b/project/scripts/genDocs index aa061d59b613..9849dac91722 100755 --- a/project/scripts/genDocs +++ b/project/scripts/genDocs @@ -5,7 +5,7 @@ shopt -s extglob # needed for rm everything but x echo "Working directory: $PWD" GENDOC_EXTRA_ARGS=$@ -GIT_HEAD=$(git rev-parse HEAD) # save current head for commit message in gh-pages +GIT_HEAD=$(git rev-parse HEAD) # save current head for commit message in scala/dotty.epfl.ch PREVIOUS_SNAPSHOTS_DIR="$PWD/../prev_snapshots" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)" SITE_OUT_DIR="$PWD/docs/_site" @@ -16,9 +16,9 @@ if [ -d "$PREVIOUS_SNAPSHOTS_DIR" ]; then fi mkdir -pv "$PREVIOUS_SNAPSHOTS_DIR" -git remote add doc-remote "https://github.com/lampepfl/dotty-website.git" -git fetch doc-remote gh-pages -git checkout gh-pages +git remote add doc-remote "https://github.com/scala/dotty.epfl.ch.git" +git fetch doc-remote main +git checkout doc-remote/main (cp -vr [03].*/ "$PREVIOUS_SNAPSHOTS_DIR"; true) # Don't fail if no `3.*` found to copy git checkout "$GIT_HEAD" diff --git a/project/scripts/native-integration/bashTests b/project/scripts/native-integration/bashTests index 5fb77355238c..c71e81ac183b 100755 --- a/project/scripts/native-integration/bashTests +++ b/project/scripts/native-integration/bashTests @@ -19,7 +19,7 @@ die () { exit 1 } -PROG_HOME="$DIST_DIR/target/pack" +PROG_HOME="$DIST_DIR/target/universal/stage" SOURCE="$ROOT/tests/pos/HelloWorld.scala" SOURCE_VERSION="$ROOT/project/scripts/native-integration/reportScalaVersion.scala" @@ -42,7 +42,7 @@ clear_cli_dotfiles() # *---------------*/ # build the distribution -"$SBT" "$DIST_PROJECT/pack" +"$SBT" "$DIST_PROJECT/Universal/stage" SCALA_VERSION="" # iterate through lines in VERSION_SRC diff --git a/project/scripts/native-integration/winTests.bat b/project/scripts/native-integration/winTests.bat index a85b2c8c2531..18e406423ebd 100755 --- a/project/scripts/native-integration/winTests.bat +++ b/project/scripts/native-integration/winTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\win-x86_64\target\pack" +set "_PREFIX=dist\win-x86_64\target\universal\stage" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index fe6a43c7f68f..dbdaed218558 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -e -PREFIX="dist/win-x86_64/target/pack" +PREFIX="dist/win-x86_64/target/universal/stage" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index 903f74d7ab98..097c05839205 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\win-x86_64\target\pack" +set "_PREFIX=dist\win-x86_64\target\universal\stage" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" set "_SITE_DIR=_site" diff --git a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala index bcdac0547e75..dc3956ada0db 100644 --- a/sbt-bridge/test/xsbt/CompileProgressSpecification.scala +++ b/sbt-bridge/test/xsbt/CompileProgressSpecification.scala @@ -66,7 +66,6 @@ class CompileProgressSpecification { "MegaPhase{pruneErasedDefs,...,arrayConstructors}", "erasure", "constructors", - "genSJSIR", "genBCode" ) val missingExpectedPhases = someExpectedPhases -- allPhases.toSet diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala index adee351b5289..f268818f2d8b 100644 --- a/sbt-bridge/test/xsbt/ProductsSpecification.scala +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -10,6 +10,26 @@ import java.nio.file.Paths class ProductsSpecification { + @Test + def extractProductsFromJar = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsToJar(src) + val srcFile = output.srcFiles.head + val products = output.analysis.productClassesToSources.filter(_._2 == srcFile).keys.toSet + + def toPathInJar(className: String): Path = + Paths.get(s"${output.classesOutput}!${className.replace('.', File.separatorChar)}.class") + val expected = Set("example.A", "example.A$B", "example.A$C$1").map(toPathInJar) + assertEquals(products, expected) + } + @Test def extractNonLocalClassesNoInc = { val src = diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index a5a969ee48b9..400bcd369e27 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -1,26 +1,20 @@ /** Adapted from https://github.com/sbt/sbt/blob/0.13/compile/interface/src/test/scala/xsbt/ScalaCompilerForUnitTesting.scala */ package xsbt -import xsbti.compile.{CompileProgress, SingleOutput} +import dotty.tools.xsbt.CompilerBridge +import sbt.io.IO +import xsbti.* +import xsbti.api.ClassLike +import xsbti.api.DependencyContext.* +import xsbti.compile.SingleOutput + import java.io.File import java.nio.file.Path -import xsbti._ -import sbt.io.IO -import xsbti.api.{ ClassLike, Def, DependencyContext } -import DependencyContext._ -import xsbt.api.SameAPI -import sbt.internal.util.ConsoleLogger -import dotty.tools.io.PlainFile.toPlainFile -import dotty.tools.xsbt.CompilerBridge import TestCallback.ExtractedClassDependencies -import ScalaCompilerForUnitTesting.Callbacks case class CompileOutput(srcFiles: Seq[VirtualFileRef], classesOutput: Path, analysis: TestCallback, progress: TestCompileProgress) -object ScalaCompilerForUnitTesting: - case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) - /** * Provides common functionality needed for unit tests that require compiling * source code using Scala compiler. diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt new file mode 100644 index 000000000000..bfdadb5ee038 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/build.sbt @@ -0,0 +1,9 @@ +ThisBuild / scalaVersion := sys.props("plugin.scalaVersion") + +lazy val i20476 = project + .in(file("i20476")) + .enablePlugins(ScalaJSPlugin) + +lazy val i18231 = project + .in(file("i18231")) + .settings(scalacOptions += "-release:8") diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala new file mode 100644 index 000000000000..82788aa829f0 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i18231/src/main/scala/main.scala @@ -0,0 +1,4 @@ +object Foo { + @Deprecated + def foo(): Unit = ??? +} diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala new file mode 100644 index 000000000000..31eb78c816cd --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/i20476/src/main/scala/main.scala @@ -0,0 +1,5 @@ +package demo + +import scala.scalajs.js + +def bar: js.Promise[Int] = js.Promise.resolve(()).`then`(_ => 1) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt new file mode 100644 index 000000000000..b9ebfd07bf1f --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("org.scala-js" % "sbt-scalajs" % sys.props("plugin.scalaJSVersion")) diff --git a/sbt-test/sbt-dotty/scaladoc-regressions/test b/sbt-test/sbt-dotty/scaladoc-regressions/test new file mode 100644 index 000000000000..816c0be96141 --- /dev/null +++ b/sbt-test/sbt-dotty/scaladoc-regressions/test @@ -0,0 +1,2 @@ +> i18231/doc +> i20476/doc diff --git a/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala b/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala new file mode 100644 index 000000000000..da7585a5dab7 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/UnrelatedDeprecationWarning.scala @@ -0,0 +1,22 @@ +import com.twitter.finagle.Thrift +import com.twitter.finagle.thrift.ThriftService +import scala.reflect.ClassTag + +class Minim { + trait Foo[A] + + object Foo { + inline def make[A]: Foo[A] = ??? + } + + final class Unrelated() + + object Unrelated { + val foo = Foo.make[Unrelated] + } + + object Main { + def foo[S <: ThriftService](using ClassTag[S]) = + Thrift.client.build[S]("asd") + } +} diff --git a/sbt-test/scala2-compat/i19675/build.sbt b/sbt-test/scala2-compat/i19675/build.sbt new file mode 100644 index 000000000000..819be2d87d58 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/build.sbt @@ -0,0 +1,6 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +scalacOptions ++= Seq("-Wunused:imports", "-deprecation", "-Werror") +libraryDependencies ++= Seq( + "com.twitter" %% "finagle-thrift" % "24.2.0" +).map(_.cross(CrossVersion.for3Use2_13)) diff --git a/sbt-test/scala2-compat/i19675/test b/sbt-test/scala2-compat/i19675/test new file mode 100644 index 000000000000..73a68203f3f1 --- /dev/null +++ b/sbt-test/scala2-compat/i19675/test @@ -0,0 +1 @@ +> compile \ No newline at end of file diff --git a/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala b/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala new file mode 100644 index 000000000000..41ca1fadf011 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/app/Main.scala @@ -0,0 +1,21 @@ +object Test: + def main(args: Array[String]): Unit = + val actual = listAnnots("ScalaUser") + val expected = List( + "new JavaAnnot(a = 5, b = _, c = _)", + "new JavaAnnot(a = 5, b = _, c = _)", + "new JavaAnnot(a = 5, b = \"foo\", c = _)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = _, c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = 3)", + "new JavaAnnot(a = 5, b = \"foo\", c = _)", + ) + if actual != expected then + println("Expected:") + expected.foreach(println(_)) + println("Actual:") + actual.foreach(println(_)) + throw new AssertionError("test failed") + end main +end Test diff --git a/sbt-test/scala3-compat/java-annotations-3.4/build.sbt b/sbt-test/scala3-compat/java-annotations-3.4/build.sbt new file mode 100644 index 000000000000..67b61a3e9edd --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/build.sbt @@ -0,0 +1,7 @@ +lazy val lib = project.in(file("lib")) + .settings( + scalaVersion := "3.4.2" + ) + +lazy val app = project.in(file("app")) + .dependsOn(lib) diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala b/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala new file mode 100644 index 000000000000..4bf3a238f9c9 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/AnnotMacro.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +inline def listAnnots(inline c: String): List[String] = ${ listAnnotsImpl('c) } + +def listAnnotsImpl(c: Expr[String])(using Quotes): Expr[List[String]] = + import quotes.reflect.* + Expr(Symbol.requiredClass(c.valueOrError).declaredMethods.flatMap(_.annotations.map(_.show))) diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java b/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java new file mode 100644 index 000000000000..9aa3537d4266 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/JavaAnnot.java @@ -0,0 +1,10 @@ + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +@interface JavaAnnot { + int a(); + String b() default "empty"; + int c() default 5; +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala b/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala new file mode 100644 index 000000000000..a14a69eae21b --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/lib/ScalaUser.scala @@ -0,0 +1,25 @@ +class ScalaUser { + @JavaAnnot(5) + def f1(): Int = 1 + + @JavaAnnot(a = 5) + def f2(): Int = 1 + + @JavaAnnot(5, "foo") + def f3(): Int = 1 + + @JavaAnnot(5, "foo", 3) + def f4(): Int = 1 + + @JavaAnnot(5, c = 3) + def f5(): Int = 1 + + @JavaAnnot(5, c = 3, b = "foo") + def f6(): Int = 1 + + @JavaAnnot(b = "foo", c = 3, a = 5) + def f7(): Int = 1 + + @JavaAnnot(b = "foo", a = 5) + def f8(): Int = 1 +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala b/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..fb946c4b8c61 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion") + ) +} diff --git a/sbt-test/scala3-compat/java-annotations-3.4/test b/sbt-test/scala3-compat/java-annotations-3.4/test new file mode 100644 index 000000000000..63092ffa4a03 --- /dev/null +++ b/sbt-test/scala3-compat/java-annotations-3.4/test @@ -0,0 +1 @@ +> app/run diff --git a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala index 8f9142583b29..4a7a0129a9ce 100644 --- a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala +++ b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala @@ -756,7 +756,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param that the iterable providing the second half of each result pair * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. - * @return a new collection of type `That` containing pairs consisting of + * @return a new collection of the type of this $coll containing pairs consisting of * corresponding elements of this $coll and `that`. The length * of the returned collection is the maximum of the lengths of this $coll and `that`. * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. diff --git a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala index 0b6f1bc8e64e..78f8abb8e327 100644 --- a/scala2-library-cc/src/scala/collection/IndexedSeqView.scala +++ b/scala2-library-cc/src/scala/collection/IndexedSeqView.scala @@ -16,13 +16,10 @@ package collection import scala.annotation.nowarn import language.experimental.captureChecking -trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { - self: IndexedSeqViewOps[A, CC, C]^ => -} +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] /** View defined in terms of indexing a range */ trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { - self: IndexedSeqView[A]^ => override def view: IndexedSeqView[A]^{this} = this diff --git a/scala2-library-cc/src/scala/collection/Iterator.scala b/scala2-library-cc/src/scala/collection/Iterator.scala index 58ef4beb930d..4d1b0ed4ff95 100644 --- a/scala2-library-cc/src/scala/collection/Iterator.scala +++ b/scala2-library-cc/src/scala/collection/Iterator.scala @@ -1008,7 +1008,7 @@ object Iterator extends IterableFactory[Iterator] { def newBuilder[A]: Builder[A, Iterator[A]] = new ImmutableBuilder[A, Iterator[A]](empty[A]) { override def addOne(elem: A): this.type = { elems = elems ++ single(elem); this } - } + }.asInstanceOf // !!! CC unsafe op /** Creates iterator that produces the results of some element computation a number of times. * @@ -1160,7 +1160,7 @@ object Iterator extends IterableFactory[Iterator] { @tailrec def merge(): Unit = if (current.isInstanceOf[ConcatIterator[_]]) { val c = current.asInstanceOf[ConcatIterator[A]] - current = c.current + current = c.current.asInstanceOf // !!! CC unsafe op currentHasNextChecked = c.currentHasNextChecked if (c.tail != null) { if (last == null) last = c.last diff --git a/scala2-library-cc/src/scala/collection/SeqView.scala b/scala2-library-cc/src/scala/collection/SeqView.scala index 34405e06eedb..292dc61ddaa8 100644 --- a/scala2-library-cc/src/scala/collection/SeqView.scala +++ b/scala2-library-cc/src/scala/collection/SeqView.scala @@ -25,7 +25,6 @@ import scala.annotation.unchecked.uncheckedCaptures * mapping a SeqView with an impure function gives an impure view). */ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - self: SeqViewOps[A, CC, C]^ => def length: Int def apply(x: Int): A @@ -75,7 +74,6 @@ trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { } trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { - self: SeqView[A]^ => override def view: SeqView[A]^{this} = this @@ -186,12 +184,14 @@ object SeqView { } @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, + class Sorted[A, B >: A] private (underlying: SomeSeqOps[A]^, private[this] val len: Int, ord: Ordering[B]) extends SeqView[A] { outer: Sorted[A, B]^ => + private var myUnderlying: SomeSeqOps[A]^{underlying} = underlying + // force evaluation immediately by calling `length` so infinite collections // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) @@ -221,10 +221,10 @@ object SeqView { val res = { val len = this.len if (len == 0) Nil - else if (len == 1) List(underlying.head) + else if (len == 1) List(myUnderlying.head) else { val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] - underlying.copyToArray(arr) + myUnderlying.copyToArray(arr) java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it // is safe because: @@ -238,12 +238,12 @@ object SeqView { } } evaluated = true - underlying = null + myUnderlying = null res } private[this] def elems: SomeSeqOps[A]^{this} = { - val orig = underlying + val orig = myUnderlying if (evaluated) _sorted else orig } diff --git a/scala2-library-cc/src/scala/collection/View.scala b/scala2-library-cc/src/scala/collection/View.scala index 31c544a46beb..132934dbe3bd 100644 --- a/scala2-library-cc/src/scala/collection/View.scala +++ b/scala2-library-cc/src/scala/collection/View.scala @@ -150,7 +150,10 @@ object View extends IterableFactory[View] { object Filter { def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = underlying match { - case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + case filter: Filter[A] if filter.isFlipped == isFlipped => + new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) + .asInstanceOf[Filter[A]^{underlying, p}] + // !!! asInstanceOf needed once paths were added, see path-patmat-should-be-pos.scala for minimization case _ => new Filter(underlying, p, isFlipped) } } diff --git a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala index ac24995e6892..28ce8da104aa 100644 --- a/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala +++ b/scala2-library-cc/src/scala/collection/immutable/LazyListIterable.scala @@ -24,6 +24,7 @@ import scala.language.implicitConversions import scala.runtime.Statics import language.experimental.captureChecking import annotation.unchecked.uncheckedCaptures +import caps.untrackedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. @@ -245,7 +246,7 @@ import annotation.unchecked.uncheckedCaptures * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) +final class LazyListIterable[+A] private(@untrackedCaptures lazyState: () => LazyListIterable.State[A]^) extends AbstractIterable[A] with Iterable[A] with IterableOps[A, LazyListIterable, LazyListIterable[A]] @@ -253,6 +254,8 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy with Serializable { import LazyListIterable._ + private var myLazyState = lazyState + @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated private[this] var midEvaluation = false @@ -264,11 +267,11 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") } midEvaluation = true - val res = try lazyState() finally midEvaluation = false + val res = try myLazyState() finally midEvaluation = false // if we set it to `true` before evaluating, we may infinite loop // if something expects `state` to already be evaluated stateEvaluated = true - lazyState = null // allow GC + myLazyState = null // allow GC res } @@ -755,7 +758,7 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy * The iterator returned by this method mostly preserves laziness; * a single element ahead of the iterator is evaluated. */ - override def grouped(size: Int): Iterator[LazyListIterable[A]] = { + override def grouped(size: Int): Iterator[LazyListIterable[A]]^{this} = { require(size > 0, "size must be positive, but was " + size) slidingImpl(size = size, step = size) } @@ -765,12 +768,12 @@ final class LazyListIterable[+A] private(private[this] var lazyState: () => Lazy * The iterator returned by this method mostly preserves laziness; * `size - step max 1` elements ahead of the iterator are evaluated. */ - override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]]^{this} = { require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") slidingImpl(size = size, step = step) } - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]]^{this} = if (knownIsEmpty) Iterator.empty else new SlidingIterator[A](this, size = size, step = step) @@ -996,7 +999,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var elem: A = null.asInstanceOf[A] var found = false @@ -1013,7 +1016,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { val marker = Statics.pfMarker val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased @@ -1032,7 +1035,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var it: Iterator[B]^{ll, f} = null var itHasNext = false @@ -1056,7 +1059,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric var iRef = n // val iRef = new IntRef(n) newLL { var rest = restRef // var rest = restRef.elem @@ -1073,7 +1076,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var rest = restRef // var rest = restRef.elem while (!rest.isEmpty && p(rest.head)) { @@ -1086,8 +1089,8 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A]^{ll*} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var scoutRef: LazyListIterable[A]^{ll*} = ll // same situation + var restRef: LazyListIterable[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A]^{ll} = ll // same situation var remainingRef = n // val remainingRef = new IntRef(n) newLL { var scout = scoutRef // var scout = scoutRef.elem @@ -1236,33 +1239,35 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { */ def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty + private class LazyIterator[+A](lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { + private var myLazyList = lazyList + override def hasNext: Boolean = !myLazyList.isEmpty override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() + if (myLazyList.isEmpty) Iterator.empty.next() else { - val res = lazyList.head - lazyList = lazyList.tail + val res = myLazyList.head + myLazyList = myLazyList.tail res } } - private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + private class SlidingIterator[A](lazyList: LazyListIterable[A]^, size: Int, step: Int) extends AbstractIterator[LazyListIterable[A]] { + private var myLazyList = lazyList private val minLen = size - step max 0 private var first = true def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) + if (first) !myLazyList.isEmpty + else myLazyList.lengthGt(minLen) def next(): LazyListIterable[A] = { if (!hasNext) Iterator.empty.next() else { first = false - val list = lazyList - lazyList = list.drop(step) + val list = myLazyList + myLazyList = list.drop(step) list.take(size) } } @@ -1281,7 +1286,7 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { import LazyBuilder._ private[this] var next: DeferredState[A] = _ - private[this] var list: LazyListIterable[A] = _ + @uncheckedCaptures private[this] var list: LazyListIterable[A]^ = _ clear() @@ -1361,7 +1366,9 @@ object LazyListIterable extends IterableFactory[LazyListIterable] { case SerializeEnd => initRead = true case a => init += a.asInstanceOf[A] } - val tail = in.readObject().asInstanceOf[LazyListIterable[A]] + val tail: LazyListIterable[A] = in.readObject().asInstanceOf[LazyListIterable[A]] + // Explicit type annotation needed so that tail.state below is dropped from capture set. + // Before paths were added, it was tail that was added, and the `asSeenFrom` to a pure type made it work. // scala/scala#10118: caution that no code path can evaluate `tail.state` // before the resulting LazyListIterable is returned val it = init.toList.iterator diff --git a/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala b/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala index 85a045c34423..b47b25f9529f 100644 --- a/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/ArrayBuffer.scala @@ -197,7 +197,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) // the previous line // - `copyElemsToArray` will call `System.arraycopy` // - `System.arraycopy` will effectively "read" all the values before - // overwriting any of them when two arrays are the the same reference + // overwriting any of them when two arrays are the same reference val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy diff --git a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala index f9aa9cf28c72..27e5a8997d48 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Buffer.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Buffer.scala @@ -16,7 +16,6 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking - /** A `Buffer` is a growable and shrinkable `Seq`. */ trait Buffer[A] extends Seq[A] @@ -184,7 +183,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[(IterableOnce[A]^{f*})](s) + val newElems = new Array[(IterableOnce[A]^{f})](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 diff --git a/scala2-library-cc/src/scala/collection/mutable/Builder.scala b/scala2-library-cc/src/scala/collection/mutable/Builder.scala index dd57cb75da91..2d5f84c32e92 100644 --- a/scala2-library-cc/src/scala/collection/mutable/Builder.scala +++ b/scala2-library-cc/src/scala/collection/mutable/Builder.scala @@ -80,7 +80,7 @@ trait Builder[-A, +To] extends Growable[A] { } } - /** A builder resulting from this builder my mapping the result using `f`. */ + /** A builder resulting from this builder by mapping the result using `f`. */ def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() diff --git a/scaladoc-testcases/docs/_docs/index.md b/scaladoc-testcases/docs/_docs/index.md index 42cb5f62dae8..9acac71a63b3 100644 --- a/scaladoc-testcases/docs/_docs/index.md +++ b/scaladoc-testcases/docs/_docs/index.md @@ -13,5 +13,12 @@ class Renderer(using RenderingContext) val renderer: Renderer = Renderer() ``` +```scala + trait Ord: + type Self + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self +``` diff --git a/scaladoc-testcases/src/tests/extensionParams.scala b/scaladoc-testcases/src/tests/extensionParams.scala index 0e2225d8aa3c..12850778c793 100644 --- a/scaladoc-testcases/src/tests/extensionParams.scala +++ b/scaladoc-testcases/src/tests/extensionParams.scala @@ -61,8 +61,6 @@ extension (using Unit)(a: Int) def f14(): Any = ??? -import scala.language.experimental.clauseInterleaving - extension (using String)(using Int)(a: Animal)(using Unit)(using Number) def f16(b: Any)[T](c: T): T = ??? diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index cddd0f56e9fe..b4c354d174c4 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -1,7 +1,5 @@ package tests.methodsAndConstructors -import scala.language.experimental.clauseInterleaving - class A class B extends A class C diff --git a/scaladoc-testcases/src/tests/opaqueTypes.scala b/scaladoc-testcases/src/tests/opaqueTypes.scala index 33cc7ab9ff91..c248632092bd 100644 --- a/scaladoc-testcases/src/tests/opaqueTypes.scala +++ b/scaladoc-testcases/src/tests/opaqueTypes.scala @@ -6,4 +6,8 @@ opaque type Permissions = Int opaque type PermissionChoice = Int -//opaque type Permission <: Permissions & PermissionChoice = Int TODO: #112 \ No newline at end of file +//opaque type Permission <: Permissions & PermissionChoice = Int TODO: #112 + +object Foo: + opaque type Bar + = Int \ No newline at end of file diff --git a/scaladoc-testcases/src/tests/rightAssocExtension.scala b/scaladoc-testcases/src/tests/rightAssocExtension.scala new file mode 100644 index 000000000000..a065ee765caf --- /dev/null +++ b/scaladoc-testcases/src/tests/rightAssocExtension.scala @@ -0,0 +1,7 @@ +package tests.rightAssocExtension + +case class Wrap[+T](x: T) + +extension [T](a: T) + def *:[U <: Tuple](b: Wrap[U]): Wrap[T *: U] + = Wrap(a *: b.x) diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 7b875fbcef8e..97f9bf14939d 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -10,7 +10,7 @@ const attrsToCopy = [ /** * @typedef {Object} SavedPageState - * @property {Strign} mainDiv + * @property {String} mainDiv * @property {String} leftColumn * @property {String} title * @property {Record} attrs @@ -322,7 +322,7 @@ function attachAllListeners() { if (location.hash) { var target = location.hash.substring(1); - // setting the 'expand' class on the top-level container causes undesireable styles + // setting the 'expand' class on the top-level container causes undesirable styles // to apply to the top-level docs, so we avoid this logic for that element. if (target != "container") { var selected = document.getElementById(location.hash.substring(1)); @@ -568,7 +568,7 @@ function showGraph() { .attr("offset", "30%"); radialGradient .append("stop") - .attr("stop-color", "var(--background-default)") + .attr("stop-color", "var(--background-main)") .attr("offset", "100%"); var inner = svg.append("g"); diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala index 1a43ea8648a8..0f7082fd6f49 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Renderer.scala @@ -30,71 +30,72 @@ abstract class Renderer(rootPackage: Member, val members: Map[DRI, Member], prot val rootApiPage: Option[Page] = Some(memberPage(rootPackage)).filter(_.children.nonEmpty).map(_.withTitle(ctx.args.name)) - val rootDocsPage: Option[Page] = staticSite match - case None => None - case Some(siteContext) => - val rootTemplate = siteContext.staticSiteRoot.rootTemplate - - // Below code is for walking in order the tree and modifing its nodes basing on its neighbours - - // We add dummy guards - val notHidden: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.filterNot(_.hidden).map(Some(_)) :+ None - - // Let's gather the list of maps for each template with its in-order neighbours - val newSettings: List[Map[String, Object]] = notHidden.sliding(size = 3, step = 1).map { - case None :: None :: Nil => - Map.empty - case prev :: mid :: next :: Nil => - def link(sibling: Option[LoadedTemplate]): Option[String] = - def realPath(path: Path) = if Files.isDirectory(path) then Paths.get(path.toString, "index.html") else path - sibling.map { n => - val realMidPath = realPath(mid.get.file.toPath) - val realSiblingPath = realPath(n.file.toPath) - realMidPath.relativize(realSiblingPath).toString.stripPrefix("../") - } - List( - for { - link <- link(prev) - p <- prev - } yield ( - "previous" -> Map( - "title" -> p.templateFile.title.name, - "url" -> link - ) - ), - for { - link <- link(next) - n <- next - } yield ( - "next" -> Map( - "title" -> n.templateFile.title.name, - "url" -> link - ) - ), - ).flatten.toMap - }.toList - - def updateSettings(templates: Seq[LoadedTemplate], additionalSettings: ListBuffer[Map[String, Object]]): List[LoadedTemplate] = - val updatedTemplates = List.newBuilder[LoadedTemplate] - for template <- templates do - val head: Map[String, Object] = - if template.hidden then Map.empty - else additionalSettings.remove(0) - val current: Map[String, Object] = template.templateFile.settings.getOrElse("page", Map.empty).asInstanceOf[Map[String, Object]] - val updatedTemplateFile = template.templateFile.copy(settings = template.templateFile.settings.updated("page", head ++ current)) - updatedTemplates += template.copy( - templateFile = updatedTemplateFile, - children = updateSettings(template.children, additionalSettings) - ) - updatedTemplates.result() - - val newTemplates = updateSettings(Seq(rootTemplate), newSettings.to(ListBuffer)) - val templatePages = newTemplates.map(templateToPage(_, siteContext)) - - val newRoot = newTemplates.head - - Some(newRoot).filter(r => r.children.nonEmpty || r.templateFile.rawCode.nonEmpty) - .map(templateToPage(_, siteContext)) + val rootDocsPage: Option[Page] = staticSite match { + case None => None + case Some(siteContext) => + val rootTemplate = siteContext.staticSiteRoot.rootTemplate + + // Below code is for walking in order the tree and modifing its nodes basing on its neighbours + + // We add dummy guards + val notHidden: Seq[Option[LoadedTemplate]] = None +: siteContext.allTemplates.filterNot(_.hidden).map(Some(_)) :+ None + + // Let's gather the list of maps for each template with its in-order neighbours + val newSettings: List[Map[String, Object]] = notHidden.sliding(size = 3, step = 1).map { + case None :: None :: Nil => + Map.empty + case prev :: mid :: next :: Nil => + def link(sibling: Option[LoadedTemplate]): Option[String] = + def realPath(path: Path) = if Files.isDirectory(path) then Paths.get(path.toString, "index.html") else path + sibling.map { n => + val realMidPath = realPath(mid.get.file.toPath) + val realSiblingPath = realPath(n.file.toPath) + realMidPath.relativize(realSiblingPath).toString.stripPrefix("../") + } + List( + for { + link <- link(prev) + p <- prev + } yield ( + "previous" -> Map( + "title" -> p.templateFile.title.name, + "url" -> link + ) + ), + for { + link <- link(next) + n <- next + } yield ( + "next" -> Map( + "title" -> n.templateFile.title.name, + "url" -> link + ) + ), + ).flatten.toMap + }.toList + + def updateSettings(templates: Seq[LoadedTemplate], additionalSettings: ListBuffer[Map[String, Object]]): List[LoadedTemplate] = + val updatedTemplates = List.newBuilder[LoadedTemplate] + for template <- templates do + val head: Map[String, Object] = + if template.hidden then Map.empty + else additionalSettings.remove(0) + val current: Map[String, Object] = template.templateFile.settings.getOrElse("page", Map.empty).asInstanceOf[Map[String, Object]] + val updatedTemplateFile = template.templateFile.copy(settings = template.templateFile.settings.updated("page", head ++ current)) + updatedTemplates += template.copy( + templateFile = updatedTemplateFile, + children = updateSettings(template.children, additionalSettings) + ) + updatedTemplates.result() + + val newTemplates = updateSettings(Seq(rootTemplate), newSettings.to(ListBuffer)) + val templatePages = newTemplates.map(templateToPage(_, siteContext)) + + val newRoot = newTemplates.head + + Some(newRoot).filter(r => r.children.nonEmpty || r.templateFile.rawCode.nonEmpty) + .map(templateToPage(_, siteContext)) + } val redirectPages: Seq[Page] = staticSite.fold(Seq.empty)(siteContext => siteContext.redirectTemplates.map { case (template, driFrom, driTo) => diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala index 33f0e089053a..c92853816d16 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/FlexmarkSnippetProcessor.scala @@ -65,7 +65,7 @@ object FlexmarkSnippetProcessor: content.add(s, 0) node.setContent(content) - val fullSnippet = Seq(snippetImports, snippet).mkString("\n").trim + val fullSnippet = Seq(snippetImports, snippet).mkString("\n").stripPrefix("\n") val snippetCompilationResult = cf(fullSnippet, lineOffset, argOverride) match { case Some(result @ SnippetCompilationResult(wrapped, _, _, messages)) => node.setContentString(fullSnippet) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala index 471d338522f0..a5e32c7332bd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala @@ -48,7 +48,7 @@ trait BasicSupport: "scala.transient", "scala.volatile", "scala.annotation.experimental", - "scala.annotation.contructorOnly", + "scala.annotation.constructorOnly", "scala.annotation.static", "scala.annotation.targetName", "scala.annotation.threadUnsafe", diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 88d57cdb9853..d3c93aaba8c7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -314,7 +314,7 @@ trait ClassLikeSupport: def parseObject(classDef: ClassDef, signatureOnly: Boolean = false): Member = mkClass(classDef)( // All objects are final so we do not need final modifier! - modifiers = classDef.symbol.getExtraModifiers().filter(_ != Modifier.Final), + modifiers = classDef.symbol.getExtraModifiers().filter(mod => mod != Modifier.Final && mod != Modifier.Opaque), signatureOnly = signatureOnly ) @@ -359,7 +359,9 @@ trait ClassLikeSupport: if methodSymbol.isExtensionMethod && methodSymbol.isRightAssoc then // Taken from RefinedPrinter.scala // If you change the names of the clauses below, also change them in right-associative-extension-methods.md - val (leftTyParams, rest1) = memberInfo.paramLists.span(_.isType) + val (leftTyParams, rest1) = memberInfo.paramLists match + case fst :: tail if fst.isType => (List(fst), tail) + case other => (List(), other) val (leadingUsing, rest2) = rest1.span(_.isUsing) val (rightTyParams, rest3) = rest2.span(_.isType) val (rightParam, rest4) = rest3.splitAt(1) @@ -586,7 +588,8 @@ trait ClassLikeSupport: // `def foo[A: ClassTag] = 1`. // Scala spec states that `$` should not be used in names and behaviour may be undefiend in such case. // Documenting method slightly different then its definition is withing the 'undefiend behaviour'. - symbol.paramSymss.flatten.find(_.name == name).exists(_.flags.is(Flags.Implicit)) + symbol.paramSymss.flatten.find(_.name == name).exists(p => + p.flags.is(Flags.Given) || p.flags.is(Flags.Implicit)) def handlePolyType(memberInfo: MemberInfo, polyType: PolyType): MemberInfo = val typeParamList = MemberInfo.TypeParameterList(polyType.paramNames.zip(polyType.paramBounds).toMap) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala index 8a703cfb5d24..d5eebd1ab798 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/InkuireSupport.scala @@ -184,10 +184,10 @@ trait InkuireSupport(using DocContext) extends Resources: else ownerNameChain(sym.owner) :+ sym.normalizedName private def viableSymbol(s: Symbol): Boolean = - !s.flags.is(Flags.Private) && - !s.flags.is(Flags.Protected) && - !s.flags.is(Flags.Override) && - !s.flags.is(Flags.Synthetic) + !s.flags.is(Flags.Private) && + !s.flags.is(Flags.Protected) && + !s.flags.is(Flags.Override) && + !s.flags.is(Flags.Synthetic) private def varName(t: Inkuire.TypeLike): Option[String] = t match { case tpe: Inkuire.Type => Some(tpe.name.name) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala index f55451fdc636..1a8337e0c6b7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TastyParser.scala @@ -5,7 +5,7 @@ package tasty import java.util.regex.Pattern import scala.util.{Try, Success, Failure} -import scala.tasty.inspector.{TastyInspector, Inspector, Tasty} +import scala.tasty.inspector.{ScaladocInternalTastyInspector, Inspector, Tasty} import scala.quoted._ import dotty.tools.dotc @@ -160,7 +160,7 @@ object ScaladocTastyInspector: report.error("File extension is not `tasty` or `jar`: " + invalidPath) if tastyPaths.nonEmpty then - TastyInspector.inspectAllTastyFiles(tastyPaths, jarPaths, classpath)(inspector) + ScaladocInternalTastyInspector.inspectAllTastyFilesInContext(tastyPaths, jarPaths, classpath)(inspector)(using ctx.compilerContext) val all = inspector.topLevels.result() all.groupBy(_._1).map { case (pckName, members) => diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 906578c9d405..190be6a588a1 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -1,5 +1,7 @@ -// Copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +// Renamed copy of tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala // FIXME remove this copy of the file +// Since copying, an inspectAllTastyFilesInContext method was added for scaladoc only +// to fix regressions introduced by the switch from old to a new TastyInspector package scala.tasty.inspector @@ -21,7 +23,7 @@ import dotty.tools.dotc.report import java.io.File.pathSeparator -object TastyInspector: +object ScaladocInternalTastyInspector: /** Load and process TASTy files using TASTy reflect * @@ -41,6 +43,32 @@ object TastyInspector: def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) + private def checkFiles(tastyFiles: List[String], jars: List[String]): Unit = + def checkFile(fileName: String, ext: String): Unit = + val file = dotty.tools.io.Path(fileName) + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then + throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") + else if !file.exists then + throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") + tastyFiles.foreach(checkFile(_, "tasty")) + jars.foreach(checkFile(_, "jar")) + + /** + * Added for Scaladoc-only. + * Meant to fix regressions introduces by the switch from old to new TastyInspector: + * https://github.com/scala/scala3/issues/18231 + * https://github.com/scala/scala3/issues/20476 + * Stable TastyInspector API does not support passing compiler context. + */ + def inspectAllTastyFilesInContext(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector)(using Context): Boolean = + checkFiles(tastyFiles, jars) + val classes = tastyFiles ::: jars + classes match + case Nil => true + case _ => + val reporter = inspectorDriver(inspector).process(inspectorArgs(dependenciesClasspath, classes), summon[Context]) + !reporter.hasErrors + /** Load and process TASTy files using TASTy reflect * * @param tastyFiles List of paths of `.tasty` files @@ -50,14 +78,7 @@ object TastyInspector: * @return boolean value indicating whether the process succeeded */ def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = - def checkFile(fileName: String, ext: String): Unit = - val file = dotty.tools.io.Path(fileName) - if !file.ext.toLowerCase.equalsIgnoreCase(ext) then - throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") - else if !file.exists then - throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") - tastyFiles.foreach(checkFile(_, "tasty")) - jars.foreach(checkFile(_, "jar")) + checkFiles(tastyFiles, jars) val files = tastyFiles ::: jars inspectFiles(dependenciesClasspath, files)(inspector) @@ -124,4 +145,4 @@ object TastyInspector: end inspectFiles -end TastyInspector +end ScaladocInternalTastyInspector diff --git a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala index a63f699c4c2f..ec39fb5ce16b 100644 --- a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala @@ -57,8 +57,9 @@ class Scaladoc3ExternalLocationProviderIntegrationTest extends ExternalLocationP def getScalaLibraryPath: String = { val classpath: List[String] = System.getProperty("java.class.path").split(java.io.File.pathSeparatorChar).toList - val stdlib = classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail - new java.io.File(stdlib).getCanonicalPath() // canonicalize for case-insensitive file systems + // For an unclear reason, depending on if we pass the compiler context onto the tasty inspector + // the scala-2-library path needs to have its characters case fixed with new java.io.File(stdlib).getCanonicalPath() + classpath.find(_.contains("scala-library-2")).getOrElse("foobarbazz") // If we don't find the scala 2 library, the test will fail } class Scaladoc2LegacyExternalLocationProviderIntegrationTest extends LegacyExternalLocationProviderIntegrationTest( diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala index 1d140315cc10..bcaee696b65c 100644 --- a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala @@ -14,6 +14,7 @@ class LinkWarningsTest extends ScaladocTest("noLinkWarnings"): override def runTest = afterRendering { val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics - assertEquals("There should be exactly one warning", 1, diagnostics.warningMsgs.size) + val filteredWarnings = diagnostics.warningMsgs.filter(_ != "1 warning found") + assertEquals("There should be exactly one warning", 1, filteredWarnings.size) assertNoErrors(diagnostics) } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index d60a4d82ff44..bfa2a372827a 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -122,3 +122,5 @@ class InfixTypes extends SignatureTest("infixTypes", SignatureTest.all) class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) class RefinedFunctionTypes extends SignatureTest("refinedFunctionTypes", SignatureTest.all) + +class RightAssocExtension extends SignatureTest("rightAssocExtension", SignatureTest.all) diff --git a/staging/src/scala/quoted/staging/Compiler.scala b/staging/src/scala/quoted/staging/Compiler.scala index b37e8d4f70f2..2cc3aa6555c1 100644 --- a/staging/src/scala/quoted/staging/Compiler.scala +++ b/staging/src/scala/quoted/staging/Compiler.scala @@ -11,7 +11,7 @@ trait Compiler: object Compiler: - /** Create a new instance of the compiler using the the classloader of the application. + /** Create a new instance of the compiler using the classloader of the application. * * Usage: * ``` diff --git a/staging/test-resources/repl-staging/i6263 b/staging/test-resources/repl-staging/i6263 index 8d967c1c58ac..0df9a9893ae1 100644 --- a/staging/test-resources/repl-staging/i6263 +++ b/staging/test-resources/repl-staging/i6263 @@ -3,7 +3,7 @@ scala> import quoted.staging.{Compiler => StagingCompiler, _} scala> implicit def compiler: StagingCompiler = StagingCompiler.make(getClass.getClassLoader) def compiler: scala.quoted.staging.Compiler scala> def fn[T : Type](v : T) = println("ok") -def fn[T](v: T)(implicit evidence$1: scala.quoted.Type[T]): Unit +def fn[T](v: T)(using evidence$1: scala.quoted.Type[T]): Unit scala> withQuotes { fn("foo") } ok scala> withQuotes { fn((1,2)) } diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index 1e075efcf857..8da8879185f5 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -324,7 +324,7 @@ object TastyFormat { * compatibility, but remains backwards compatible, with all * preceding `MinorVersion`. */ - final val MinorVersion: Int = 5 + final val MinorVersion: Int = 6 /** Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing diff --git a/tests/disabled/pos/lazylist.scala b/tests/disabled/pos/lazylist.scala index c24f8677b91f..e56eb484894c 100644 --- a/tests/disabled/pos/lazylist.scala +++ b/tests/disabled/pos/lazylist.scala @@ -34,7 +34,7 @@ object LazyNil extends LazyList[Nothing]: def map[A, B](xs: {*} LazyList[A], f: {*} A => B): {f, xs} LazyList[B] = xs.map(f) -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap, cap3: Cap) = def f[T](x: LazyList[T]): LazyList[T] = if cap1 == cap1 then x else LazyNil diff --git a/tests/explicit-nulls/neg/i21380.scala b/tests/explicit-nulls/neg/i21380.scala new file mode 100644 index 000000000000..685aa09ef818 --- /dev/null +++ b/tests/explicit-nulls/neg/i21380.scala @@ -0,0 +1,19 @@ +@main def test() = { + var x: String | Null = null + if (false) { + x = "" + + } else { + x = "" + } + try { + x = "" + throw new Exception() + } + catch { + case e: Exception => { + x = null + } + } + x.replace("", "") // error +} diff --git a/tests/explicit-nulls/neg/i21380b.scala b/tests/explicit-nulls/neg/i21380b.scala new file mode 100644 index 000000000000..83e23053547c --- /dev/null +++ b/tests/explicit-nulls/neg/i21380b.scala @@ -0,0 +1,21 @@ +def test1 = + var x: String | Null = null + x = "" + 1 match + case 1 => x = null + case _ => x = x.trim() // ok + x.replace("", "") // error + +def test2(i: Int) = + var x: String | Null = null + i match + case 1 => x = "1" + case _ => x = " " + x.replace("", "") // ok + +def test3(i: Int) = + var x: String | Null = null + i match + case 1 if x != null => () + case _ => x = " " + x.trim() // ok \ No newline at end of file diff --git a/tests/explicit-nulls/neg/i21380c.scala b/tests/explicit-nulls/neg/i21380c.scala new file mode 100644 index 000000000000..f86a5638e4c8 --- /dev/null +++ b/tests/explicit-nulls/neg/i21380c.scala @@ -0,0 +1,45 @@ +def test1(i: Int): Int = + var x: String | Null = null + if i == 0 then x = "" + else x = "" + try + x = x.replace(" ", "") // ok + throw new Exception() + catch + case e: Exception => + x = x.replaceAll(" ", "") // error + x = null + x.length // error + +def test2: Int = + var x: String | Null = null + try throw new Exception() + finally x = "" + x.length // ok + +def test3 = + var x: String | Null = "" + try throw new Exception() + catch case e: Exception => + x = (??? : String | Null) + finally + val l = x.length // error + +def test4: Int = + var x: String | Null = null + try throw new Exception() + catch + case npe: NullPointerException => x = "" + case _ => x = "" + x.length // error + // Although the catch block here is exhaustive, + // it is possible that the exception is thrown and not caught. + // Therefore, the code after the try block can only rely on the retracted info. + +def test5: Int = + var x: String | Null = null + try + x = "" + throw new Exception() + catch + case npe: NullPointerException => val i: Int = x.length // error \ No newline at end of file diff --git a/tests/explicit-nulls/pos/i21392.scala b/tests/explicit-nulls/pos/i21392.scala new file mode 100644 index 000000000000..0266199b7831 --- /dev/null +++ b/tests/explicit-nulls/pos/i21392.scala @@ -0,0 +1,16 @@ +//> using options -language:strictEquality + +import scala.collection.LinearSeq + +def foo[T](a: LinearSeq[T]) = a match + case Nil => -1 + case head +: tail => head + +enum Foo derives CanEqual: + case Bar + case Baz(x: String) + + +def foo(a: Foo) = a match + case Foo.Bar => -1 + case _ => 0 \ No newline at end of file diff --git a/tests/explicit-nulls/pos/interop-constructor.scala b/tests/explicit-nulls/pos/interop-constructor.scala index f222d24b0919..4ebfaa752b3a 100644 --- a/tests/explicit-nulls/pos/interop-constructor.scala +++ b/tests/explicit-nulls/pos/interop-constructor.scala @@ -1,4 +1,4 @@ -// Test that constructors have a non-nullab.e return type. +// Test that constructors have a non-nullable return type. class Foo { val x: java.lang.String = new java.lang.String() diff --git a/tests/explicit-nulls/warn/i21577.check b/tests/explicit-nulls/warn/i21577.check new file mode 100644 index 000000000000..b548a5bedc30 --- /dev/null +++ b/tests/explicit-nulls/warn/i21577.check @@ -0,0 +1,32 @@ +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:5:9 -------------------------------------------- +5 | case _ => // warn: null only + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:12:9 ------------------------------------------- +12 | case _ => // warn: null only + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/i21577.scala:16:7 ------------------------------------------- +16 | case _ => // warn: null only + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E030] Match case Unreachable Warning: tests/explicit-nulls/warn/i21577.scala:20:7 ---------------------------------- +20 | case _ => // warn: unreachable + | ^ + | Unreachable case +-- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:29:27 ----------------------------- +29 |def f7(s: String | Null) = s match // warn: not exhuastive + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: _: Null + | + | longer explanation available when compiling with `-explain` +-- [E029] Pattern Match Exhaustivity Warning: tests/explicit-nulls/warn/i21577.scala:36:33 ----------------------------- +36 |def f9(s: String | Int | Null) = s match // warn: not exhuastive + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: _: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/explicit-nulls/warn/i21577.scala b/tests/explicit-nulls/warn/i21577.scala new file mode 100644 index 000000000000..1bba8f4da01f --- /dev/null +++ b/tests/explicit-nulls/warn/i21577.scala @@ -0,0 +1,38 @@ +def f(s: String) = + val s2 = s.trim() + s2 match + case s3: String => + case _ => // warn: null only + + +def f2(s: String | Null) = + val s2 = s.nn.trim() + s2 match + case s3: String => + case _ => // warn: null only + +def f3(s: String | Null) = s match + case s2: String => + case _ => // warn: null only + +def f5(s: String) = s match + case _: String => + case _ => // warn: unreachable + +def f6(s: String) = s.trim() match + case _: String => + case null => + +def f61(s: String) = s.trim() match + case _: String => + +def f7(s: String | Null) = s match // warn: not exhuastive + case _: String => + +def f8(s: String | Null) = s match + case _: String => + case null => + +def f9(s: String | Int | Null) = s match // warn: not exhuastive + case _: String => + case null => \ No newline at end of file diff --git a/tests/explicit-nulls/warn/interop.check b/tests/explicit-nulls/warn/interop.check new file mode 100644 index 000000000000..0afc1dc0a3cb --- /dev/null +++ b/tests/explicit-nulls/warn/interop.check @@ -0,0 +1,8 @@ +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/interop/S.scala:8:11 ---------------------------------------- +8 | case _ => // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). +-- [E121] Pattern Match Warning: tests/explicit-nulls/warn/interop/S.scala:9:9 ----------------------------------------- +9 | case _ => println(2) // warn + | ^ + | Unreachable case except for null (if this is intentional, consider writing case null => instead). diff --git a/tests/explicit-nulls/warn/interop/J.java b/tests/explicit-nulls/warn/interop/J.java new file mode 100644 index 000000000000..f81cf685b9a9 --- /dev/null +++ b/tests/explicit-nulls/warn/interop/J.java @@ -0,0 +1,6 @@ +import java.util.ArrayList; + +class J { + ArrayList> foo(String x) { return null; } + static String fooStatic(String x) { return null; } +} diff --git a/tests/explicit-nulls/warn/interop/S.scala b/tests/explicit-nulls/warn/interop/S.scala new file mode 100644 index 000000000000..57beebe4eb76 --- /dev/null +++ b/tests/explicit-nulls/warn/interop/S.scala @@ -0,0 +1,10 @@ +import java.util.ArrayList +def f() = + val j = new J() + val s2 = j.foo(null) + s2 match + case s3: ArrayList[ArrayList[String]] => s3.get(0) match + case _: ArrayList[_] => + case _ => // warn + case _ => println(2) // warn + diff --git a/tests/init-global/neg/TypeCast.scala b/tests/init-global/neg/TypeCast.scala deleted file mode 100644 index 55447e9df4e2..000000000000 --- a/tests/init-global/neg/TypeCast.scala +++ /dev/null @@ -1,18 +0,0 @@ -object A { - val f: Int = 10 - def m() = f -} -object B { - val f: Int = g() - def g(): Int = f // error -} -object C { - val a: A.type | B.type = if ??? then A else B - def cast[T](a: Any): T = a.asInstanceOf[T] - val c: A.type = cast[A.type](a) // abstraction for c is {A, B} - val d = c.f // treat as c.asInstanceOf[owner of f].f - val e = c.m() // treat as c.asInstanceOf[owner of f].m() - val c2: B.type = cast[B.type](a) - val g = c2.f // no error here -} - diff --git a/tests/init-global/pos/cache-constructor.scala b/tests/init-global/pos/cache-constructor.scala new file mode 100644 index 000000000000..87769fd5d78a --- /dev/null +++ b/tests/init-global/pos/cache-constructor.scala @@ -0,0 +1,8 @@ +class Bar: + var f: Int = 0 + +object A: + val b1 = new Bar() + val b2 = new Bar() + val b3 = new Bar() + b3.f = 1 diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala index f97c21ee918d..03f1f5d5cda4 100644 --- a/tests/init-global/pos/i18629.scala +++ b/tests/init-global/pos/i18629.scala @@ -1,6 +1,6 @@ object Foo { val bar = List() match { case List() => ??? - case _ => ??? + case null => ??? } } diff --git a/tests/init-global/pos/match-complete.scala b/tests/init-global/pos/match-complete.scala new file mode 100644 index 000000000000..eb0e4481f59e --- /dev/null +++ b/tests/init-global/pos/match-complete.scala @@ -0,0 +1,118 @@ +object Matcher { + // Chained Match + val chained_match_xs: List[Any] = List(1, 2, 3) + val chained_match_x = chained_match_xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + println(chained_match_x) + + // Vararg Splices + val vararg_arr = Array(0, 1, 2, 3) + val vararg_lst = List(vararg_arr*) // vararg splice argument + // Throws an exception? + val vararg_splice = vararg_lst match + case List(0, 1, xs*) => 1 // binds xs to Seq(2, 3) + case List(1, _*) => 0 // wildcard pattern + case _ => 2 + println(vararg_splice) + println(vararg_lst) + + // Pattern Definitions + val patter_def_xs: List[Any] = List(1, 2, 3) + val (patter_def_x: Any) :: _ = patter_def_xs : @unchecked + println(patter_def_x) + + val patter_def_pair = (1, true) + val (patter_def_a, patter_def_b) = patter_def_pair + println(patter_def_a) + + val elems: List[(Int, Int)] = List((1, 2), (3, 4), (5, 6)) + + for ((x,y) <- elems) do println(x) + + def main(args: Array[String]) = { + // println(chained_match_x) + println(vararg_splice) + // println(patter_def_x) + // println( + } +} + + +// Patter Matching Using Extractors + +// Option Extractors +case class Person(name: String, age: Int) +object Person { + def unapply(person: Person): Option[(String, Int)] = Some((person.name, person.age)) +} + +object OptionMatcher { + val person = Person("Alice", 25) + + val result = person match { + case Person(name, age) => s"Name: $name, Age: $age" + case _ => "Not a person" + } + println(result) +} + + + +// Boolean Extractors +object Adult { + def unapply(person: Person): Boolean = person.age >= 18 +} + +object BooleanMatcher { + val person = Person("Charlie", 17) + + val adultResult = person match { + case Adult() => s"${person.name} is an adult" + case _ => s"${person.name} is not an adult" + } + + println(adultResult) +} + + + +// Variadic Extractors +// Add cases for exceptions +// +// Adding some warning test cases +// - + +object VariadicExtractor { + // Define an unapply method that takes a List and returns an Option of Seq + def unapplySeq[A](list: List[A]): Option[Seq[A]] = Some(list) +} + +object PatternMatchExample extends App { + def describeList(list: List[Int]): String = list match { + case VariadicExtractor(1, 2, rest @ _*) => + s"Starts with 1, 2 followed by: ${rest.mkString(", ")}" + case VariadicExtractor(1, rest @ _*) => + s"Starts with 1 followed by: ${rest.mkString(", ")}" + case VariadicExtractor(first, second, rest @ _*) => + s"Starts with $first, $second followed by: ${rest.mkString(", ")}" + case VariadicExtractor(single) => + s"Only one element: $single" + case VariadicExtractor() => + "Empty list" + case _ => + "Unknown pattern" + } + + // Test cases + println(describeList(List(1, 2, 3, 4, 5))) // Output: Starts with 1, 2 followed by: 3, 4, 5 + println(describeList(List(1, 3, 4, 5))) // Output: Starts with 1 followed by: 3, 4, 5 + println(describeList(List(2, 3, 4, 5))) // Output: Starts with 2, 3 followed by: 4, 5 + println(describeList(List(1))) // Output: Only one element: 1 + println(describeList(List())) // Output: Empty list +} + diff --git a/tests/init-global/pos/scodec-bits.scala b/tests/init-global/pos/scodec-bits.scala new file mode 100644 index 000000000000..97a4a793a4a6 --- /dev/null +++ b/tests/init-global/pos/scodec-bits.scala @@ -0,0 +1,17 @@ +abstract class A { + def a: Long +} + +object O { + case class B() extends A { + def a = 5L + } + case class C(a2: A) extends A { + var c: Long = a2.a + def a = c + } + def f(a: A): A = C(f(a)) + def g(): A = f(B()) + + val x = g() +} \ No newline at end of file diff --git a/tests/init-global/warn/Color.scala b/tests/init-global/warn/Color.scala new file mode 100644 index 000000000000..59554c905cd0 --- /dev/null +++ b/tests/init-global/warn/Color.scala @@ -0,0 +1,28 @@ +enum Color: + case None, White, Black + +enum Player: + case Black, White + + // Explanation: See the desugaring below + val color: Color = + if this == Player.Black // warn + then Color.Black + else Color.White + +// From the desugaring of Player, we can see the field `Player.Black` is not yet +// initialized during evaluation of the first `new Player`: +// +// class Player: +// val color: Color = +// if this == Player.Black ... +// +// object Player: +// val Black: Player = new Player // <--- problem +// val White: Player = new Player +// +// +// The complex desugaring makes it difficult to see the initialization +// semantics and it is prone to make such hard-to-spot mistakes. +// +// Note: The desugaring above is simplified for presentation. diff --git a/tests/init-global/warn/ScalaCheck.check b/tests/init-global/warn/ScalaCheck.check new file mode 100644 index 000000000000..32fad69cfc57 --- /dev/null +++ b/tests/init-global/warn/ScalaCheck.check @@ -0,0 +1,10 @@ +-- Warning: tests/init-global/warn/ScalaCheck.scala:16:9 --------------------------------------------------------------- +16 | object OptMinSuccess extends IntOpt: // warn + | ^ + | Cyclic initialization: object OptMinSuccess -> object FirstParser -> object OptMinSuccess. Calling trace: + | ├── object OptMinSuccess extends IntOpt: // warn [ ScalaCheck.scala:16 ] + | │ ^ + | ├── object FirstParser extends CmdLineParser: [ ScalaCheck.scala:15 ] + | │ ^ + | └── val opts = Some(OptMinSuccess) [ ScalaCheck.scala:21 ] + | ^^^^^^^^^^^^^ diff --git a/tests/init-global/warn/ScalaCheck.scala b/tests/init-global/warn/ScalaCheck.scala new file mode 100644 index 000000000000..34b248bcfd68 --- /dev/null +++ b/tests/init-global/warn/ScalaCheck.scala @@ -0,0 +1,22 @@ +trait CmdLineParser: + outer => + + val a: String + + trait Opt[+T]: + val default: T + val names: Set[String] + val help: String + + trait IntOpt extends Opt[Int]: + println("outer = " + outer) + println("outer.a = " + outer.a) + +object FirstParser extends CmdLineParser: + object OptMinSuccess extends IntOpt: // warn + val default = 100 + val names = Set("bla") + val help = "bla" + + val opts = Some(OptMinSuccess) + val a = "FirstParser" diff --git a/tests/init-global/warn/cyclic-object.scala b/tests/init-global/warn/cyclic-object.scala new file mode 100644 index 000000000000..e997d3259877 --- /dev/null +++ b/tests/init-global/warn/cyclic-object.scala @@ -0,0 +1,9 @@ +package cyclicObject + +object O1 { // warn + val o = cyclicObject.O2 +} + +object O2 { + val o = cyclicObject.O1 +} diff --git a/tests/init/crash/i6914.scala b/tests/init/crash/i6914.scala index 723b2ef94e0b..b5f848626772 100644 --- a/tests/init/crash/i6914.scala +++ b/tests/init/crash/i6914.scala @@ -5,7 +5,7 @@ object test1 { class ToExpr[T](using Liftable[T]) extends Conversion[T, Expr[T]] { def apply(x: T): Expr[T] = ??? } - given toExprFun[T](using Liftable[T]): ToExpr[T] with {} + given toExprFun: [T] => Liftable[T] => ToExpr[T]() given Liftable[Int] = ??? given Liftable[String] = ??? @@ -16,14 +16,12 @@ object test1 { def a: Expr[String] = "abc" } -object test2 { +object test2: - given autoToExpr[T](using Liftable[T]): Conversion[T, Expr[T]] with { + given autoToExpr: [T] => Liftable[T] => Conversion[T, Expr[T]]: def apply(x: T): Expr[T] = ??? - } given Liftable[Int] = ??? given Liftable[String] = ??? def a: Expr[String] = "abc" -} \ No newline at end of file diff --git a/tests/init/crash/i7821.scala b/tests/init/crash/i7821.scala index 1574801826bc..f99037573c75 100644 --- a/tests/init/crash/i7821.scala +++ b/tests/init/crash/i7821.scala @@ -3,9 +3,8 @@ object XObject { def anX: X = 5 - given ops: Object with { + given ops: Object: extension (x: X) def + (y: X): X = x + y - } } object MyXObject { @@ -13,9 +12,8 @@ object MyXObject { def anX: MyX = XObject.anX - given ops: Object with { + given ops: Object: extension (x: MyX) def + (y: MyX): MyX = x + y // error: warring: Infinite recursive call - } } object Main extends App { diff --git a/tests/init/warn/type-filter.scala b/tests/init/warn/type-filter.scala new file mode 100644 index 000000000000..1d25454992fe --- /dev/null +++ b/tests/init/warn/type-filter.scala @@ -0,0 +1,15 @@ +class A(o: O): + var a = 20 + +class B(o: O): + var b = 20 + +class O: + val o: A | B = new A(this) + if o.isInstanceOf[A] then + o.asInstanceOf[A].a += 1 + else + o.asInstanceOf[B].b += 1 // o.asInstanceOf[B] is treated as bottom + + // prevent early promotion + val x = 10 diff --git a/tests/init/warn/type-filter2.scala b/tests/init/warn/type-filter2.scala new file mode 100644 index 000000000000..cc9a8f8b00d0 --- /dev/null +++ b/tests/init/warn/type-filter2.scala @@ -0,0 +1,19 @@ +class A(c: C): + val f: Int = 10 + def m() = f + +class B(c: C): + val f: Int = g() // warn + def g(): Int = f + +class C(x: Int): + val a: A | B = if x > 0 then new A(this) else new B(this) + + def cast[T](a: Any): T = a.asInstanceOf[T] + + val c: A = a.asInstanceOf[A] // abstraction for c is {A, B} + val d = c.f // treat as c.asInstanceOf[owner of f].f + val e = c.m() // treat as c.asInstanceOf[owner of m].m() + val c2: B = a.asInstanceOf[B] + val g = c2.f // no error here + diff --git a/tests/neg-custom-args/captures/box-adapt-cases.scala b/tests/neg-custom-args/captures/box-adapt-cases.scala index 3dac26a98318..681d699842ed 100644 --- a/tests/neg-custom-args/captures/box-adapt-cases.scala +++ b/tests/neg-custom-args/captures/box-adapt-cases.scala @@ -4,7 +4,7 @@ def test1(): Unit = { type Id[X] = [T] -> (op: X => T) -> T val x: Id[Cap^] = ??? - x(cap => cap.use()) // was error, now OK + x(cap => cap.use()) // error, OK under sealed } def test2(io: Cap^): Unit = { diff --git a/tests/neg-custom-args/captures/box-unsoundness.scala b/tests/neg-custom-args/captures/box-unsoundness.scala index d1331f16df1f..8c1c22bc7fa6 100644 --- a/tests/neg-custom-args/captures/box-unsoundness.scala +++ b/tests/neg-custom-args/captures/box-unsoundness.scala @@ -1,4 +1,3 @@ -//@annotation.capability class CanIO { def use(): Unit = () } def use[X](x: X): (op: X -> Unit) -> Unit = op => op(x) def test(io: CanIO^): Unit = diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index e06a3a1f8268..c9530f6aad50 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -3,16 +3,21 @@ | ^^^ | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} | of an enclosing function literal with expected type () ?->{cap1} I +-- Error: tests/neg-custom-args/captures/byname.scala:22:12 ------------------------------------------------------------ +22 | h2(() => g())() // error + | ^^^ + | reference (cap2 : Cap^) is not included in the allowed capture set {cap1} + | of an enclosing function literal with expected type () ->{cap1} I -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/byname.scala:4:2 ----------------------------------------- 4 | def f() = if cap1 == cap1 then g else g // error | ^ - | Found: (x$0: Int) ->{cap2} Int - | Required: (x$0: Int) -> Int + | Found: ((x$0: Int) ->{cap2} Int)^{} + | Required: Int -> Int | | Note that the expected type Int ->{} Int | is the previously inferred result type of method test | which is also the type seen in separately compiled sources. - | The new inferred type (x$0: Int) ->{cap2} Int + | The new inferred type ((x$0: Int) ->{cap2} Int)^{} | must conform to this type. 5 | def g(x: Int) = if cap2 == cap2 then 1 else x 6 | def g2(x: Int) = if cap1 == cap1 then 1 else x diff --git a/tests/neg-custom-args/captures/byname.scala b/tests/neg-custom-args/captures/byname.scala index 279122f54735..75ad527dbd2d 100644 --- a/tests/neg-custom-args/captures/byname.scala +++ b/tests/neg-custom-args/captures/byname.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap) = def f() = if cap1 == cap1 then g else g // error @@ -17,6 +17,9 @@ def test2(cap1: Cap, cap2: Cap): I^{cap1} = def h(x: ->{cap1} I) = x // ok h(f()) // OK h(g()) // error + def h2(x: () ->{cap1} I) = x // ok + h2(() => f()) // OK + h2(() => g())() // error diff --git a/tests/neg-custom-args/captures/capt-box-env.scala b/tests/neg-custom-args/captures/capt-box-env.scala index 605b446d5262..bfe1874d073b 100644 --- a/tests/neg-custom-args/captures/capt-box-env.scala +++ b/tests/neg-custom-args/captures/capt-box-env.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class Pair[+A, +B](x: A, y: B): def fst: A = x diff --git a/tests/neg-custom-args/captures/capt-box.scala b/tests/neg-custom-args/captures/capt-box.scala index 634470704fc5..291882bed36d 100644 --- a/tests/neg-custom-args/captures/capt-box.scala +++ b/tests/neg-custom-args/captures/capt-box.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(x: Cap) = diff --git a/tests/neg-custom-args/captures/capt-test.scala b/tests/neg-custom-args/captures/capt-test.scala index 80ee1aba84e1..b202a14d0940 100644 --- a/tests/neg-custom-args/captures/capt-test.scala +++ b/tests/neg-custom-args/captures/capt-test.scala @@ -20,8 +20,8 @@ def handle[E <: Exception, R <: Top](op: (CT[E] @retains(caps.cap)) => R)(handl catch case ex: E => handler(ex) def test: Unit = - val b = handle[Exception, () => Nothing] { // error + val b = handle[Exception, () => Nothing] { (x: CanThrow[Exception]) => () => raise(new Exception)(using x) - } { + } { // error (ex: Exception) => ??? } diff --git a/tests/neg-custom-args/captures/capt-wf2.scala b/tests/neg-custom-args/captures/capt-wf2.scala index 6c65e0dc77f7..8bb04a230fdd 100644 --- a/tests/neg-custom-args/captures/capt-wf2.scala +++ b/tests/neg-custom-args/captures/capt-wf2.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability def test(c: C) = var x: Any^{c} = ??? diff --git a/tests/neg-custom-args/captures/capt1.check b/tests/neg-custom-args/captures/capt1.check index 74b9db728983..3d0ed538b2e5 100644 --- a/tests/neg-custom-args/captures/capt1.check +++ b/tests/neg-custom-args/captures/capt1.check @@ -1,54 +1,54 @@ --- Error: tests/neg-custom-args/captures/capt1.scala:4:11 -------------------------------------------------------------- -4 | () => if x == null then y else y // error +-- Error: tests/neg-custom-args/captures/capt1.scala:6:11 -------------------------------------------------------------- +6 | () => if x == null then y else y // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type () -> C --- Error: tests/neg-custom-args/captures/capt1.scala:7:11 -------------------------------------------------------------- -7 | () => if x == null then y else y // error +-- Error: tests/neg-custom-args/captures/capt1.scala:9:11 -------------------------------------------------------------- +9 | () => if x == null then y else y // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type Matchable --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:14:2 ----------------------------------------- -14 | def f(y: Int) = if x == null then y else y // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:16:2 ----------------------------------------- +16 | def f(y: Int) = if x == null then y else y // error | ^ | Found: (y: Int) ->{x} Int | Required: Matchable -15 | f +17 | f | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:21:2 ----------------------------------------- -21 | class F(y: Int) extends A: // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:23:2 ----------------------------------------- +23 | class F(y: Int) extends A: // error | ^ | Found: A^{x} | Required: A -22 | def m() = if x == null then y else y -23 | F(22) +24 | def m() = if x == null then y else y +25 | F(22) | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:26:2 ----------------------------------------- -26 | new A: // error +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/capt1.scala:28:2 ----------------------------------------- +28 | new A: // error | ^ | Found: A^{x} | Required: A -27 | def m() = if x == null then y else y +29 | def m() = if x == null then y else y | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/capt1.scala:32:12 ------------------------------------------------------------- -32 | val z2 = h[() -> Cap](() => x) // error // error +-- Error: tests/neg-custom-args/captures/capt1.scala:34:12 ------------------------------------------------------------- +34 | val z2 = h[() -> Cap](() => x) // error // error | ^^^^^^^^^^^^ | Sealed type variable X cannot be instantiated to () -> box C^ since | the part box C^ of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of method h | leaking as part of its result. --- Error: tests/neg-custom-args/captures/capt1.scala:32:30 ------------------------------------------------------------- -32 | val z2 = h[() -> Cap](() => x) // error // error +-- Error: tests/neg-custom-args/captures/capt1.scala:34:30 ------------------------------------------------------------- +34 | val z2 = h[() -> Cap](() => x) // error // error | ^ | (x : C^) cannot be referenced here; it is not included in the allowed capture set {} | of an enclosing function literal with expected type () -> box C^ --- Error: tests/neg-custom-args/captures/capt1.scala:34:12 ------------------------------------------------------------- -34 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error +-- Error: tests/neg-custom-args/captures/capt1.scala:36:12 ------------------------------------------------------------- +36 | val z3 = h[(() -> Cap) @retains(x)](() => x)(() => C()) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | Sealed type variable X cannot be instantiated to box () ->{x} Cap since - | the part C^ of that type captures the root capability `cap`. + | the part Cap of that type captures the root capability `cap`. | This is often caused by a local capability in an argument of method h | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/capt1.scala b/tests/neg-custom-args/captures/capt1.scala index 48c4d889bf8d..cad0bad4ba56 100644 --- a/tests/neg-custom-args/captures/capt1.scala +++ b/tests/neg-custom-args/captures/capt1.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import annotation.retains class C def f(x: C @retains(caps.cap), y: C): () -> C = diff --git a/tests/neg-custom-args/captures/caseclass/Test_2.scala b/tests/neg-custom-args/captures/caseclass/Test_2.scala index bffc0a295bdc..e54ab1774202 100644 --- a/tests/neg-custom-args/captures/caseclass/Test_2.scala +++ b/tests/neg-custom-args/captures/caseclass/Test_2.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability def test(c: C) = val pure: () -> Unit = () => () val impure: () => Unit = pure @@ -22,4 +22,4 @@ def test(c: C) = val y4 = y3 match case Ref(xx) => xx - val y4c: () ->{x3} Unit = y4 + val y4c: () ->{y3} Unit = y4 diff --git a/tests/neg-custom-args/captures/cc-this.scala b/tests/neg-custom-args/captures/cc-this.scala index 4c05be702c51..e4336ed457af 100644 --- a/tests/neg-custom-args/captures/cc-this.scala +++ b/tests/neg-custom-args/captures/cc-this.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index bd9a1085d262..6cb3010d6174 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -2,7 +2,7 @@ -- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- 3 | this: D^ => // error | ^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C + |reference (caps.cap : caps.Capability) captured by this self type is not included in the allowed capture set {} of pure base class class C -- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error | ^ diff --git a/tests/neg-custom-args/captures/cc-this3.scala b/tests/neg-custom-args/captures/cc-this3.scala index 25af19dd6c4a..0a36cde8173b 100644 --- a/tests/neg-custom-args/captures/cc-this3.scala +++ b/tests/neg-custom-args/captures/cc-this3.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/neg-custom-args/captures/cc-this5.scala b/tests/neg-custom-args/captures/cc-this5.scala index e84c2a41f55c..4c9a8a706670 100644 --- a/tests/neg-custom-args/captures/cc-this5.scala +++ b/tests/neg-custom-args/captures/cc-this5.scala @@ -1,7 +1,7 @@ class C: val x: C = this -@annotation.capability class Cap +class Cap extends caps.Capability def foo(c: Cap) = object D extends C: // error @@ -17,5 +17,5 @@ def test(c: Cap) = def test2(c: Cap) = class A: - def f = println(c) + def f = println(c) val x: A = this // error diff --git a/tests/neg-custom-args/captures/class-constr.scala b/tests/neg-custom-args/captures/class-constr.scala index 9afb6972ccfa..619fa9fa0341 100644 --- a/tests/neg-custom-args/captures/class-constr.scala +++ b/tests/neg-custom-args/captures/class-constr.scala @@ -1,6 +1,6 @@ import annotation.{capability, constructorOnly} -@capability class Cap +class Cap extends caps.Capability class C(x: Cap, @constructorOnly y: Cap) diff --git a/tests/neg-custom-args/captures/class-contra.check b/tests/neg-custom-args/captures/class-contra.check index 6d4c89f872ad..808118bd1795 100644 --- a/tests/neg-custom-args/captures/class-contra.check +++ b/tests/neg-custom-args/captures/class-contra.check @@ -1,7 +1,7 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:39 --------------------------------- -12 | def fun(x: K{val f: T^{a}}) = x.setf(a) // error - | ^ - | Found: (a : T^{x, y}) - | Required: T +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/class-contra.scala:12:40 --------------------------------- +12 | def fun1(k: K{val f: T^{a}}) = k.setf(a) // error + | ^ + | Found: (a : T^{x, y}) + | Required: T^{k.f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/class-contra.scala b/tests/neg-custom-args/captures/class-contra.scala index 210fd4e331f1..8ef8e7485a18 100644 --- a/tests/neg-custom-args/captures/class-contra.scala +++ b/tests/neg-custom-args/captures/class-contra.scala @@ -9,5 +9,6 @@ class T def test(x: Cap, y: Cap) = val a: T^{x, y} = ??? - def fun(x: K{val f: T^{a}}) = x.setf(a) // error + def fun1(k: K{val f: T^{a}}) = k.setf(a) // error + def fun2(k: K{val f: a.type}) = k.setf(a) () \ No newline at end of file diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.check b/tests/neg-custom-args/captures/effect-swaps-explicit.check new file mode 100644 index 000000000000..264dfa663d39 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.check @@ -0,0 +1,29 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:64:8 ------------------------- +63 | Result: +64 | Future: // error, type mismatch + | ^ + | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] + | Required: Result[Future[T], Nothing] +65 | fr.await.ok + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from effect-swaps-explicit.scala:41 +41 | boundary(Ok(body)) + | ^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:74:10 ------------------------ +74 | Future: fut ?=> // error: type mismatch + | ^ + | Found: Future[box T^?]^{fr, lbl} + | Required: Future[box T^?]^? +75 | fr.await.ok + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/effect-swaps-explicit.scala:68:15 --------------------------------------------- +68 | Result.make: //lbl ?=> // error, escaping label from Result + | ^^^^^^^^^^^ + |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9, contextual$9}, box E^?]]^): + | box Future[box T^?]^{fr, contextual$9, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps-explicit.scala b/tests/neg-custom-args/captures/effect-swaps-explicit.scala new file mode 100644 index 000000000000..7474e1711b34 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps-explicit.scala @@ -0,0 +1,76 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) +object boundary: + + final class Label[-T] // extends caps.Capability + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]^): Nothing = ??? + + def apply[T](body: Label[T]^ ?=> T): T = ??? +end boundary + +import boundary.{Label, break} + +trait Async extends caps.Capability +object Async: + def blocking[T](body: Async ?=> T): T = ??? + +class Future[+T]: + this: Future[T]^ => + def await(using Async): T = ??? +object Future: + def apply[T](op: Async ?=> T)(using Async): Future[T]^{op} = ??? + +enum Result[+T, +E]: + case Ok[+T](value: T) extends Result[T, Nothing] + case Err[+E](error: E) extends Result[Nothing, E] + + +object Result: + extension [T, E](r: Result[T, E]^)(using Label[Err[E]]^) + + /** `_.ok` propagates Err to current Label */ + def ok: T = r match + case Ok(value) => value + case Err(value) => break[Err[E]](Err(value)) + + transparent inline def apply[T, E](inline body: Label[Result[T, E]]^ ?=> T): Result[T, E] = + boundary(Ok(body)) + + // same as apply, but not an inline method + def make[T, E](body: Label[Result[T, E]]^ ?=> T): Result[T, E] = + boundary(Ok(body)) + +end Result + +def test[T, E](using Async) = + import Result.* + Async.blocking: async ?=> + val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => + Future: + Result: + frs.map(_.await.ok) // OK + + val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => + Future: + Result: + rf.ok.await // OK, Future argument has type Result[T] + + def fail3(fr: Future[Result[T, E]]^) = + Result: + Future: // error, type mismatch + fr.await.ok + + def fail4[T, E](fr: Future[Result[T, E]]^) = + Result.make: //lbl ?=> // error, escaping label from Result + Future: fut ?=> + fr.await.ok + + def fail5[T, E](fr: Future[Result[T, E]]^) = + Result.make[Future[T], E]: lbl ?=> + Future: fut ?=> // error: type mismatch + fr.await.ok + diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check index bda3509645d1..ef5a95d333bf 100644 --- a/tests/neg-custom-args/captures/effect-swaps.check +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -1,21 +1,24 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:64:8 ---------------------------------- -63 | Result: -64 | Future: // error, escaping label from Result +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:62:8 ---------------------------------- +61 | Result: +62 | Future: // error, type mismatch | ^ | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] | Required: Result[Future[T], Nothing] -65 | fr.await.ok +63 | fr.await.ok |-------------------------------------------------------------------------------------------------------------------- |Inline stack trace |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from effect-swaps.scala:41 -41 | boundary(Ok(body)) + |This location contains code that was inlined from effect-swaps.scala:39 +39 | boundary(Ok(body)) | ^^^^^^^^ -------------------------------------------------------------------------------------------------------------------- | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/effect-swaps.scala:68:15 ------------------------------------------------------ -68 | Result.make: //lbl ?=> // error, escaping label from Result - | ^^^^^^^^^^^ - |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): - | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:72:10 --------------------------------- +72 | Future: fut ?=> // error: type mismatch + | ^ + | Found: Future[box T^?]^{fr, lbl} + | Required: Future[box T^?]^? +73 | fr.await.ok + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala index 1d72077bb8da..4bafd6421af3 100644 --- a/tests/neg-custom-args/captures/effect-swaps.scala +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -1,8 +1,6 @@ -import annotation.capability - object boundary: - @capability final class Label[-T] + final class Label[-T] extends caps.Capability /** Abort current computation and instead return `value` as the value of * the enclosing `boundary` call that created `label`. @@ -14,7 +12,7 @@ end boundary import boundary.{Label, break} -@capability trait Async +trait Async extends caps.Capability object Async: def blocking[T](body: Async ?=> T): T = ??? @@ -61,10 +59,16 @@ def test[T, E](using Async) = def fail3(fr: Future[Result[T, E]]^) = Result: - Future: // error, escaping label from Result + Future: // error, type mismatch fr.await.ok def fail4[T, E](fr: Future[Result[T, E]]^) = - Result.make: //lbl ?=> // error, escaping label from Result + Result.make: // should be errorm but inders Result[Any, Any] Future: fut ?=> fr.await.ok + + def fail5[T, E](fr: Future[Result[T, E]]^) = + Result.make[Future[T], E]: lbl ?=> + Future: fut ?=> // error: type mismatch + fr.await.ok + diff --git a/tests/neg-custom-args/captures/eta.check b/tests/neg-custom-args/captures/eta.check index 91dfdf06d3cd..9850e54a7fdf 100644 --- a/tests/neg-custom-args/captures/eta.check +++ b/tests/neg-custom-args/captures/eta.check @@ -1,7 +1,7 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/eta.scala:4:9 -------------------------------------------- 4 | g // error | ^ - | Found: () ->? A + | Found: (g : () -> A) | Required: () -> Proc^{f} | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 72b88f252e59..7f915ebd9833 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- 3 | self: Err^ => // error | ^^^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable + |reference (caps.cap : caps.Capability) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ diff --git a/tests/neg-custom-args/captures/explain-under-approx.check b/tests/neg-custom-args/captures/explain-under-approx.check new file mode 100644 index 000000000000..c186fc6adb11 --- /dev/null +++ b/tests/neg-custom-args/captures/explain-under-approx.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:12:10 ------------------------- +12 | col.add(Future(() => 25)) // error + | ^^^^^^^^^^^^^^^^ + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{col.futs*} + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/explain-under-approx.scala:15:11 ------------------------- +15 | col1.add(Future(() => 25)) // error + | ^^^^^^^^^^^^^^^^ + | Found: Future[Int]{val a: (async : Async^)}^{async} + | Required: Future[Int]^{col1.futs*} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/explain-under-approx.scala b/tests/neg-custom-args/captures/explain-under-approx.scala new file mode 100644 index 000000000000..816465e4af34 --- /dev/null +++ b/tests/neg-custom-args/captures/explain-under-approx.scala @@ -0,0 +1,17 @@ +trait Async extends caps.Capability + +class Future[+T](x: () => T)(using val a: Async) + +class Collector[T](val futs: Seq[Future[T]^]): + def add(fut: Future[T]^{futs*}) = ??? + +def main() = + given async: Async = ??? + val futs = (1 to 20).map(x => Future(() => x)) + val col = Collector(futs) + col.add(Future(() => 25)) // error + val col1: Collector[Int] { val futs: Seq[Future[Int]^{async}] } + = Collector(futs) + col1.add(Future(() => 25)) // error + + diff --git a/tests/neg-custom-args/captures/extending-cap-classes.check b/tests/neg-custom-args/captures/extending-cap-classes.check new file mode 100644 index 000000000000..0936f48576e5 --- /dev/null +++ b/tests/neg-custom-args/captures/extending-cap-classes.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:7:15 ------------------------- +7 | val x2: C1 = new C2 // error + | ^^^^^^ + | Found: C2^ + | Required: C1 + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:8:15 ------------------------- +8 | val x3: C1 = new C3 // error + | ^^^^^^ + | Found: C3^ + | Required: C1 + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/extending-cap-classes.scala:13:15 ------------------------ +13 | val z2: C1 = y2 // error + | ^^ + | Found: (y2 : C2^) + | Required: C1 + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/extending-cap-classes.scala b/tests/neg-custom-args/captures/extending-cap-classes.scala index 17497e415a1e..6f5a8f48c30a 100644 --- a/tests/neg-custom-args/captures/extending-cap-classes.scala +++ b/tests/neg-custom-args/captures/extending-cap-classes.scala @@ -1,7 +1,5 @@ -import annotation.capability - class C1 -@capability class C2 extends C1 +class C2 extends C1, caps.Capability class C3 extends C2 def test = @@ -9,7 +7,8 @@ def test = val x2: C1 = new C2 // error val x3: C1 = new C3 // error - val y1: C2 = new C2 - val y2: C2 = new C3 - - val z1: C3 = new C3 \ No newline at end of file + val y2: C2 = new C2 + val y3: C3 = new C3 + + val z2: C1 = y2 // error + diff --git a/tests/neg-custom-args/captures/filevar-multi-ios.scala b/tests/neg-custom-args/captures/filevar-multi-ios.scala new file mode 100644 index 000000000000..8ffc8d8e299c --- /dev/null +++ b/tests/neg-custom-args/captures/filevar-multi-ios.scala @@ -0,0 +1,41 @@ +import language.experimental.modularity +import compiletime.uninitialized + +class IO extends caps.Capability + +class File: + def write(x: String): Unit = ??? + +object test1: + + class Service(val io: IO, val io2: IO): + var file: File^{io} = uninitialized + var file2: File^{io2} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: File^{io} => T): T = + op(new File) + + def test(io3: IO, io4: IO) = + withFile(io3): f => + val o = Service(io3, io4) + o.file = f // error + o.file2 = f // error + o.log + +object test2: + + class Service(tracked val io: IO, tracked val io2: IO): + var file: File^{io} = uninitialized + var file2: File^{io2} = uninitialized + def log = file.write("log") + + def withFile[T](io: IO)(op: File^{io} => T): T = + op(new File) + + def test(io3: IO, io4: IO) = + withFile(io3): f => + val o = Service(io3, io4) + o.file = f + o.file2 = f // error + o.log diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 59b8415d6e0f..e54f161ef124 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,11 +5,11 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^ = uninitialized // error - def log = file.write("log") + var file: File^ = uninitialized // OK, was error under sealed + def log = file.write("log") // error, was OK under sealed -def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = - op(new File) +def withFile[T](op: (l: caps.Capability) ?-> (f: File^{l}) => T): T = + op(using caps.cap)(new File) def test = withFile: f => diff --git a/tests/neg-custom-args/captures/heal-tparam-cs.scala b/tests/neg-custom-args/captures/heal-tparam-cs.scala index 498292166297..fde4b93e196c 100644 --- a/tests/neg-custom-args/captures/heal-tparam-cs.scala +++ b/tests/neg-custom-args/captures/heal-tparam-cs.scala @@ -11,12 +11,12 @@ def main(io: Capp^, net: Capp^): Unit = { } val test2: (c: Capp^) -> () => Unit = - localCap { c => // should work + localCap { c => // error (c1: Capp^) => () => { c1.use() } } val test3: (c: Capp^{io}) -> () ->{io} Unit = - localCap { c => // should work + localCap { c => // error (c1: Capp^{io}) => () => { c1.use() } } diff --git a/tests/neg-custom-args/captures/i15116.check b/tests/neg-custom-args/captures/i15116.check index df05324866e1..0a16af9f6704 100644 --- a/tests/neg-custom-args/captures/i15116.check +++ b/tests/neg-custom-args/captures/i15116.check @@ -18,13 +18,17 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:5:13 ---------------------------------------- 5 | val x = Foo(m) // error | ^^^^^^ - | Found: Foo{val m: String^{Baz.this}}^{Baz.this} + | Found: Foo{val m²: (Baz.this.m : String^)}^{Baz.this.m} | Required: Foo | + | where: m is a value in trait Baz + | m² is a value in class Foo + | + | | Note that the expected type Foo | is the previously inferred type of value x | which is also the type seen in separately compiled sources. - | The new inferred type Foo{val m: String^{Baz.this}}^{Baz.this} + | The new inferred type Foo{val m: (Baz.this.m : String^)}^{Baz.this.m} | must conform to this type. | | longer explanation available when compiling with `-explain` @@ -48,13 +52,17 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15116.scala:9:13 ---------------------------------------- 9 | val x = Foo(m) // error | ^^^^^^ - | Found: Foo{val m: String^{Baz2.this}}^{Baz2.this} + | Found: Foo{val m²: (Baz2.this.m : String^)}^{Baz2.this.m} | Required: Foo | + | where: m is a value in trait Baz2 + | m² is a value in class Foo + | + | | Note that the expected type Foo | is the previously inferred type of value x | which is also the type seen in separately compiled sources. - | The new inferred type Foo{val m: String^{Baz2.this}}^{Baz2.this} + | The new inferred type Foo{val m: (Baz2.this.m : String^)}^{Baz2.this.m} | must conform to this type. | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15749.scala b/tests/neg-custom-args/captures/i15749.scala new file mode 100644 index 000000000000..c5b59042085a --- /dev/null +++ b/tests/neg-custom-args/captures/i15749.scala @@ -0,0 +1,15 @@ +class Unit +object unit extends Unit + +type Top = Any^ + +type LazyVal[T] = Unit => T + +class Foo[T](val x: T) + +// Foo[□ Unit => T] +type BoxedLazyVal[T] = Foo[LazyVal[T]] + +def force[A](v: BoxedLazyVal[A]): A = + // Γ ⊢ v.x : □ {cap} Unit -> A + v.x(unit) // error: (unbox v.x)(unit), was ok under the sealed policy \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i15749a.scala b/tests/neg-custom-args/captures/i15749a.scala index 0158928f4e39..57fca27fae66 100644 --- a/tests/neg-custom-args/captures/i15749a.scala +++ b/tests/neg-custom-args/captures/i15749a.scala @@ -1,4 +1,6 @@ import caps.cap +import caps.unbox + class Unit object u extends Unit @@ -16,7 +18,7 @@ def test = def force[A](thunk: Unit ->{cap} A): A = thunk(u) - def forceWrapper[A](mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = + def forceWrapper[A](@unbox mx: Wrapper[Unit ->{cap} A]): Wrapper[A] = // Γ ⊢ mx: Wrapper[□ {cap} Unit => A] // `force` should be typed as ∀(□ {cap} Unit -> A) A, but it can not strictMap[Unit ->{mx*} A, A](mx)(t => force[A](t)) // error // should work diff --git a/tests/neg-custom-args/captures/i15772.check b/tests/neg-custom-args/captures/i15772.check index cce58da1b93b..58582423b101 100644 --- a/tests/neg-custom-args/captures/i15772.check +++ b/tests/neg-custom-args/captures/i15772.check @@ -25,17 +25,17 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:33:34 --------------------------------------- 33 | val boxed2 : Observe[C]^ = box2(c) // error | ^ - | Found: box C^ - | Required: box C{val arg: C^?}^? + | Found: C^ + | Required: box C{val arg: C^?}^ | - | Note that the universal capability `cap` - | cannot be included in capture set ? + | Note that C^ cannot be box-converted to box C{val arg: C^?}^ + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i15772.scala:44:2 ---------------------------------------- 44 | x: (() -> Unit) // error | ^ - | Found: () ->{x} Unit + | Found: (x : () ->{filesList, sayHello} Unit) | Required: () -> Unit | | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i15922.scala b/tests/neg-custom-args/captures/i15922.scala index 974870cd769c..89bf91493fcd 100644 --- a/tests/neg-custom-args/captures/i15922.scala +++ b/tests/neg-custom-args/captures/i15922.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to force sealed encapsulation checking) trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) diff --git a/tests/neg-custom-args/captures/i15923-cases.scala b/tests/neg-custom-args/captures/i15923-cases.scala new file mode 100644 index 000000000000..83cfa554e8b9 --- /dev/null +++ b/tests/neg-custom-args/captures/i15923-cases.scala @@ -0,0 +1,7 @@ +trait Cap { def use(): Int } +type Id[X] = [T] -> (op: X => T) -> T +def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) + +def foo(x: Id[Cap^]) = { + x(_.use()) // error, was OK under sealed policy +} diff --git a/tests/neg-custom-args/captures/i15923.scala b/tests/neg-custom-args/captures/i15923.scala index 754fd0687037..e71f01996938 100644 --- a/tests/neg-custom-args/captures/i15923.scala +++ b/tests/neg-custom-args/captures/i15923.scala @@ -3,9 +3,9 @@ type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) def bar() = { - def withCap[X](op: (lcap: caps.Cap) ?-> Cap^{lcap} => X): X = { + def withCap[X](op: (lcap: caps.Capability) ?-> Cap^{lcap} => X): X = { val cap: Cap = new Cap { def use() = { println("cap is used"); 0 } } - val result = op(cap) + val result = op(using caps.cap)(cap) result } diff --git a/tests/neg-custom-args/captures/i16114.scala b/tests/neg-custom-args/captures/i16114.scala index d363bb665dc3..ec04fe9c9827 100644 --- a/tests/neg-custom-args/captures/i16114.scala +++ b/tests/neg-custom-args/captures/i16114.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) trait Cap { def use(): Int; def close(): Unit } def mkCap(): Cap^ = ??? diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala index ff06b3be78a7..1accf197c626 100644 --- a/tests/neg-custom-args/captures/i16725.scala +++ b/tests/neg-custom-args/captures/i16725.scala @@ -1,6 +1,5 @@ import language.experimental.captureChecking -@annotation.capability -class IO: +class IO extends caps.Capability: def brewCoffee(): Unit = ??? def usingIO[T](op: IO => T): T = ??? @@ -8,7 +7,7 @@ type Wrapper[T] = [R] -> (f: T => R) -> R def mk[T](x: T): Wrapper[T] = [R] => f => f(x) def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = () => - wrapper: io => // error + wrapper: io => // error io.brewCoffee() def main(): Unit = val escaped = usingIO(io => useWrappedIO(mk(io))) diff --git a/tests/neg-custom-args/captures/i19330-alt2.scala b/tests/neg-custom-args/captures/i19330-alt2.scala index b49dce4b71ef..86634b45dbe3 100644 --- a/tests/neg-custom-args/captures/i19330-alt2.scala +++ b/tests/neg-custom-args/captures/i19330-alt2.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait Logger diff --git a/tests/neg-custom-args/captures/i19330.scala b/tests/neg-custom-args/captures/i19330.scala index 8acb0dd8f66b..5fbdc00db311 100644 --- a/tests/neg-custom-args/captures/i19330.scala +++ b/tests/neg-custom-args/captures/i19330.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to force sealed encapsulation checking) import language.experimental.captureChecking trait Logger diff --git a/tests/neg-custom-args/captures/i21313.check b/tests/neg-custom-args/captures/i21313.check new file mode 100644 index 000000000000..37b944a97d68 --- /dev/null +++ b/tests/neg-custom-args/captures/i21313.check @@ -0,0 +1,11 @@ +-- Error: tests/neg-custom-args/captures/i21313.scala:6:27 ------------------------------------------------------------- +6 |def foo(x: Async) = x.await(???) // error + | ^ + | (x : Async) is not a tracked capability +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21313.scala:15:12 --------------------------------------- +15 | ac1.await(src2) // error + | ^^^^ + | Found: (src2 : Source[Int, caps.CapSet^{ac2}]^?) + | Required: Source[Int, caps.CapSet^{ac1}]^ + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21313.scala b/tests/neg-custom-args/captures/i21313.scala new file mode 100644 index 000000000000..01bedb10aefd --- /dev/null +++ b/tests/neg-custom-args/captures/i21313.scala @@ -0,0 +1,15 @@ +import caps.CapSet + +trait Async: + def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T + +def foo(x: Async) = x.await(???) // error + +trait Source[+T, Cap^]: + final def await(using ac: Async^{Cap^}) = ac.await[T, Cap](this) // Contains[Cap, ac] is assured because {ac} <: Cap. + +def test(using ac1: Async^, ac2: Async^, x: String) = + val src1 = new Source[Int, CapSet^{ac1}] {} + ac1.await(src1) // ok + val src2 = new Source[Int, CapSet^{ac2}] {} + ac1.await(src2) // error diff --git a/tests/neg-custom-args/captures/i21347.check b/tests/neg-custom-args/captures/i21347.check new file mode 100644 index 000000000000..c680a54d3efc --- /dev/null +++ b/tests/neg-custom-args/captures/i21347.check @@ -0,0 +1,15 @@ +-- Error: tests/neg-custom-args/captures/i21347.scala:4:15 ------------------------------------------------------------- +4 | ops.foreach: op => // error + | ^ + | Local reach capability C leaks into capture scope of method runOps +5 | op() +-- Error: tests/neg-custom-args/captures/i21347.scala:8:14 ------------------------------------------------------------- +8 | () => runOps(f :: Nil) // error + | ^^^^^^^^^^^^^^^^ + | reference (caps.cap : caps.Capability) is not included in the allowed capture set {} + | of an enclosing function literal with expected type () -> Unit +-- Error: tests/neg-custom-args/captures/i21347.scala:11:15 ------------------------------------------------------------ +11 | ops.foreach: op => // error + | ^ + | Local reach capability ops* leaks into capture scope of method runOpsAlt +12 | op() diff --git a/tests/neg-custom-args/captures/i21347.scala b/tests/neg-custom-args/captures/i21347.scala new file mode 100644 index 000000000000..41887be6a78a --- /dev/null +++ b/tests/neg-custom-args/captures/i21347.scala @@ -0,0 +1,12 @@ +import language.experimental.captureChecking + +def runOps[C^](ops: List[() ->{C^} Unit]): Unit = + ops.foreach: op => // error + op() + +def boom(f: () => Unit): () -> Unit = + () => runOps(f :: Nil) // error + +def runOpsAlt(ops: List[() => Unit]): Unit = + ops.foreach: op => // error + op() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/i21401.check b/tests/neg-custom-args/captures/i21401.check new file mode 100644 index 000000000000..e204540358ce --- /dev/null +++ b/tests/neg-custom-args/captures/i21401.check @@ -0,0 +1,14 @@ +-- Error: tests/neg-custom-args/captures/i21401.scala:15:22 ------------------------------------------------------------ +15 | val a = usingIO[IO^](x => x) // error: The expression's type IO^ is not allowed to capture the root capability `cap` + | ^^^^^^^^^^^^^^^^^^^^ + | The expression's type box IO^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/i21401.scala:16:70 ------------------------------------------------------------ +16 | val leaked: [R, X <: Boxed[IO^] -> R] -> (op: X) -> R = usingIO[Res](mkRes) // error: The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^ + | ^^^^^^^^^^^^^^^^^^^ + | The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/i21401.scala:18:21 ------------------------------------------------------------ +18 | val y: IO^{x*} = x.unbox // error + | ^^^^^^^ + | Local reach capability x* leaks into capture scope of method test2 diff --git a/tests/neg-custom-args/captures/i21401.scala b/tests/neg-custom-args/captures/i21401.scala new file mode 100644 index 000000000000..8284c601cd5f --- /dev/null +++ b/tests/neg-custom-args/captures/i21401.scala @@ -0,0 +1,19 @@ +import language.experimental.captureChecking + +trait IO: + def println(s: String): Unit +def usingIO[R](op: IO^ => R): R = ??? + +case class Boxed[+T](unbox: T) + +type Res = [R, X <: Boxed[IO^] -> R] -> (op: X) -> R +def mkRes(x: IO^): Res = + [R, X <: Boxed[IO^] -> R] => (op: X) => + val op1: Boxed[IO^] -> R = op + op1(Boxed[IO^](x)) +def test2() = + val a = usingIO[IO^](x => x) // error: The expression's type IO^ is not allowed to capture the root capability `cap` + val leaked: [R, X <: Boxed[IO^] -> R] -> (op: X) -> R = usingIO[Res](mkRes) // error: The expression's type Res is not allowed to capture the root capability `cap` in its part box IO^ + val x: Boxed[IO^] = leaked[Boxed[IO^], Boxed[IO^] -> Boxed[IO^]](x => x) + val y: IO^{x*} = x.unbox // error + y.println("boom") diff --git a/tests/neg-custom-args/captures/i21442.check b/tests/neg-custom-args/captures/i21442.check new file mode 100644 index 000000000000..a3bbf65c5988 --- /dev/null +++ b/tests/neg-custom-args/captures/i21442.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/captures/i21442.scala:9:13 ------------------------------------------------------------- +9 | val io = x.unbox // error: local reach capability {x*} leaks + | ^^^^^^^ + | Local reach capability x* leaks into capture scope of method foo +-- Error: tests/neg-custom-args/captures/i21442.scala:17:14 ------------------------------------------------------------ +17 | val io = x1.unbox // error + | ^^^^^^^^ + | Local reach capability x1* leaks into capture scope of method bar diff --git a/tests/neg-custom-args/captures/i21442.scala b/tests/neg-custom-args/captures/i21442.scala new file mode 100644 index 000000000000..c9fa7d152fae --- /dev/null +++ b/tests/neg-custom-args/captures/i21442.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +trait IO: + def use(): Unit +case class Boxed[+T](unbox: T) + +// `foo` is a function that unboxes its parameter +// and uses the capability boxed inside the parameter. +def foo(x: Boxed[IO^]): Unit = + val io = x.unbox // error: local reach capability {x*} leaks + io.use() + +// `bar` is a function that does the same thing in a +// slightly different way. +// But, no type error reported. +def bar(x: Boxed[IO^]): Unit = + val x1: Boxed[IO^] = x + val io = x1.unbox // error + io.use() diff --git a/tests/neg-custom-args/captures/i21614.check b/tests/neg-custom-args/captures/i21614.check new file mode 100644 index 000000000000..14b468db4c8e --- /dev/null +++ b/tests/neg-custom-args/captures/i21614.check @@ -0,0 +1,17 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21614.scala:9:33 ---------------------------------------- +9 | files.map((f: F) => new Logger(f)) // error, Q: can we make this pass (see #19076)? + | ^ + | Found: (f : F^) + | Required: File^ + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21614.scala:12:12 --------------------------------------- +12 | files.map(new Logger(_)) // error, Q: can we improve the error message? + | ^^^^^^^^^^^^^ + | Found: Logger{val f: (_$1 : File^{files*})}^ + | Required: Logger{val f: File^?}^? + | + | Note that the universal capability `cap` + | cannot be included in capture set ? + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21614.scala b/tests/neg-custom-args/captures/i21614.scala new file mode 100644 index 000000000000..a5ed25d818a5 --- /dev/null +++ b/tests/neg-custom-args/captures/i21614.scala @@ -0,0 +1,12 @@ +import language.experimental.captureChecking +import caps.Capability +import caps.unbox + +trait File extends Capability +class Logger(f: File^) extends Capability // <- will work if we remove the extends clause + +def mkLoggers1[F <: File^](@unbox files: List[F]): List[Logger^] = + files.map((f: F) => new Logger(f)) // error, Q: can we make this pass (see #19076)? + +def mkLoggers2(@unbox files: List[File^]): List[Logger^] = + files.map(new Logger(_)) // error, Q: can we improve the error message? diff --git a/tests/neg-custom-args/captures/i21620.check b/tests/neg-custom-args/captures/i21620.check new file mode 100644 index 000000000000..3a09ba978574 --- /dev/null +++ b/tests/neg-custom-args/captures/i21620.check @@ -0,0 +1,13 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/i21620.scala:5:6 ------------------------------------- +5 | x + | ^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/i21620.scala:9:31 ---------------------------------------- +9 | val _: () -> () ->{x} Unit = f // error + | ^ + | Found: () ->{f} () ->{x} Unit + | Required: () -> () ->{x} Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/i21620.scala b/tests/neg-custom-args/captures/i21620.scala new file mode 100644 index 000000000000..a21a41a10863 --- /dev/null +++ b/tests/neg-custom-args/captures/i21620.scala @@ -0,0 +1,10 @@ +class C +def test(x: C^) = + val f = () => + def foo() = + x + () + println(s"hey: $x") + () => foo() + val _: () -> () ->{x} Unit = f // error + () diff --git a/tests/neg-custom-args/captures/inner-classes.scala b/tests/neg-custom-args/captures/inner-classes.scala index 181b830e4996..fd500e607970 100644 --- a/tests/neg-custom-args/captures/inner-classes.scala +++ b/tests/neg-custom-args/captures/inner-classes.scala @@ -1,6 +1,6 @@ object test: - @annotation.capability class FileSystem + class FileSystem extends caps.Capability def foo(fs: FileSystem) = diff --git a/tests/neg-custom-args/captures/lazylist.check b/tests/neg-custom-args/captures/lazylist.check index 09352ec648ce..f0fbd1a025b5 100644 --- a/tests/neg-custom-args/captures/lazylist.check +++ b/tests/neg-custom-args/captures/lazylist.check @@ -8,8 +8,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:35:29 ------------------------------------- 35 | val ref1c: LazyList[Int] = ref1 // error | ^^^^ - | Found: (ref1 : lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^?}^{cap1}) - | Required: lazylists.LazyList[Int] + | Found: lazylists.LazyCons[Int]{val xs: () ->{cap1} lazylists.LazyList[Int]^?}^{ref1} + | Required: lazylists.LazyList[Int] | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:37:36 ------------------------------------- @@ -26,11 +26,11 @@ | Required: lazylists.LazyList[Int]^{cap2} | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:48 ------------------------------------- -41 | val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error - | ^^^^ - | Found: (ref4 : lazylists.LazyList[Int]^{cap3, cap2, ref1, cap1}) - | Required: lazylists.LazyList[Int]^{cap1, ref3, cap3} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lazylist.scala:41:42 ------------------------------------- +41 | val ref4c: LazyList[Int]^{cap1, ref3} = ref4 // error + | ^^^^ + | Found: (ref4 : lazylists.LazyList[Int]^{cap3, ref2, ref1}) + | Required: lazylists.LazyList[Int]^{cap1, ref3} | | longer explanation available when compiling with `-explain` -- [E164] Declaration Error: tests/neg-custom-args/captures/lazylist.scala:22:6 ---------------------------------------- diff --git a/tests/neg-custom-args/captures/lazylist.scala b/tests/neg-custom-args/captures/lazylist.scala index e6e4d003f7ae..f3cd0fd31e7a 100644 --- a/tests/neg-custom-args/captures/lazylist.scala +++ b/tests/neg-custom-args/captures/lazylist.scala @@ -38,4 +38,4 @@ def test(cap1: Cap, cap2: Cap, cap3: Cap) = val ref3 = ref1.map(g) val ref3c: LazyList[Int]^{cap2} = ref3 // error val ref4 = (if cap1 == cap2 then ref1 else ref2).map(h) - val ref4c: LazyList[Int]^{cap1, ref3, cap3} = ref4 // error + val ref4c: LazyList[Int]^{cap1, ref3} = ref4 // error diff --git a/tests/neg-custom-args/captures/lazylists-exceptions.check b/tests/neg-custom-args/captures/lazylists-exceptions.check index 3095c1f2f4f9..4a8738118609 100644 --- a/tests/neg-custom-args/captures/lazylists-exceptions.check +++ b/tests/neg-custom-args/captures/lazylists-exceptions.check @@ -1,9 +1,8 @@ -- Error: tests/neg-custom-args/captures/lazylists-exceptions.scala:36:2 ----------------------------------------------- 36 | try // error | ^ - | result of `try` cannot have type LazyList[Int]^ since - | that type captures the root capability `cap`. - | This is often caused by a locally generated exception capability leaking as part of its result. + | The expression's type LazyList[Int]^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. 37 | tabulate(10) { i => 38 | if i > 9 then throw Ex1() 39 | i * i diff --git a/tests/neg-custom-args/captures/leak-problem-2.check b/tests/neg-custom-args/captures/leak-problem-2.check new file mode 100644 index 000000000000..42282ff7f9f4 --- /dev/null +++ b/tests/neg-custom-args/captures/leak-problem-2.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/leak-problem-2.scala:8:8 --------------------------------- +8 | = race(Seq(src1, src2)) // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/leak-problem-2.scala b/tests/neg-custom-args/captures/leak-problem-2.scala new file mode 100644 index 000000000000..08a3a6c2d9ca --- /dev/null +++ b/tests/neg-custom-args/captures/leak-problem-2.scala @@ -0,0 +1,9 @@ +import language.experimental.captureChecking + +trait Source[+T] + +def race[T](@caps.unbox sources: Seq[Source[T]^]): Source[T]^{sources*} = ??? + +def raceTwo[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} + = race(Seq(src1, src2)) // error + // this compiled and returned a Source that does not capture src1 and src2. \ No newline at end of file diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index a5f8d73ccf7a..ddfa7c051211 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,17 +1,14 @@ --- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ -17 | val _ = Ref[String => String]((x: String) => x) // error +-- Error: tests/neg-custom-args/captures/levels.scala:19:13 ------------------------------------------------------------ +19 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ | Sealed type variable T cannot be instantiated to box String => String since | that type captures the root capability `cap`. | This is often caused by a local capability in an argument of constructor Ref | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/levels.scala:22:11 --------------------------------------- -22 | r.setV(g) // error +-- Error: tests/neg-custom-args/captures/levels.scala:24:11 ------------------------------------------------------------ +24 | r.setV(g) // error | ^ - | Found: box (x: String) ->{cap3} String - | Required: box (x$0: String) ->? String + | reference (cap3 : CC^) is not included in the allowed capture set ? of value r | | Note that reference (cap3 : CC^), defined in method scope - | cannot be included in outer capture set ? of value r which is associated with method test2 - | - | longer explanation available when compiling with `-explain` + | cannot be included in outer capture set ? of value r diff --git a/tests/neg-custom-args/captures/levels.scala b/tests/neg-custom-args/captures/levels.scala index b28e87f03ef7..4709fd80d9b8 100644 --- a/tests/neg-custom-args/captures/levels.scala +++ b/tests/neg-custom-args/captures/levels.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class CC def test1(cap1: CC^) = diff --git a/tests/neg-custom-args/captures/lubs.check b/tests/neg-custom-args/captures/lubs.check new file mode 100644 index 000000000000..b2eaf6ae6f4e --- /dev/null +++ b/tests/neg-custom-args/captures/lubs.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:17:13 ----------------------------------------- +17 | val _: D = x1 // error + | ^^ + | Found: (x1 : D^{d1}) + | Required: D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:18:13 ----------------------------------------- +18 | val _: D = x2 // error + | ^^ + | Found: (x2 : D^{d1}) + | Required: D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/lubs.scala:19:13 ----------------------------------------- +19 | val _: D = x3 // error + | ^^ + | Found: (x3 : D^{d1, d2}) + | Required: D + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/lubs.scala b/tests/neg-custom-args/captures/lubs.scala new file mode 100644 index 000000000000..3a2eb59b48b5 --- /dev/null +++ b/tests/neg-custom-args/captures/lubs.scala @@ -0,0 +1,20 @@ +import java.sql.Date + +class C extends caps.Capability +class D + +def Test(c1: C, c2: C) = + val d: D = ??? + val d1: D^{c1} = ??? + val d2: D^{c2} = ??? + val x1 = if ??? then d else d1 + val _: D^{c1} = x1 + val x2 = if ??? then d1 else d + val _: D^{c1} = x2 + val x3 = if ??? then d1 else d2 + val _: D^{c1, c2} = x3 + + val _: D = x1 // error + val _: D = x2 // error + val _: D = x3 // error + diff --git a/tests/neg-custom-args/captures/outer-var.check b/tests/neg-custom-args/captures/outer-var.check index c250280961d9..32351a179eab 100644 --- a/tests/neg-custom-args/captures/outer-var.check +++ b/tests/neg-custom-args/captures/outer-var.check @@ -1,8 +1,8 @@ -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:11:8 ------------------------------------- 11 | x = q // error | ^ - | Found: () ->{q} Unit - | Required: () ->{p, q²} Unit + | Found: box () ->{q} Unit + | Required: box () ->{p, q²} Unit | | where: q is a parameter in method inner | q² is a parameter in method test @@ -12,33 +12,19 @@ 12 | x = (q: Proc) // error | ^^^^^^^ | Found: Proc - | Required: () ->{p, q} Unit + | Required: box () ->{p, q} Unit + | + | Note that () => Unit cannot be box-converted to box () ->{p, q} Unit + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:13:9 ------------------------------------- 13 | y = (q: Proc) // error | ^^^^^^^ | Found: Proc - | Required: () ->{p} Unit - | - | Note that the universal capability `cap` - | cannot be included in capture set {p} of variable y - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/outer-var.scala:14:8 ------------------------------------- -14 | y = q // error - | ^ - | Found: () ->{q} Unit - | Required: () ->{p} Unit + | Required: box () => Unit | - | Note that reference (q : Proc), defined in method inner - | cannot be included in outer capture set {p} of variable y which is associated with method test + | Note that () => Unit cannot be box-converted to box () => Unit + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/outer-var.scala:16:53 --------------------------------------------------------- -16 | var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method apply - | leaking as part of its result. diff --git a/tests/neg-custom-args/captures/outer-var.scala b/tests/neg-custom-args/captures/outer-var.scala index 39c3a6da4ca3..e26cd631602a 100644 --- a/tests/neg-custom-args/captures/outer-var.scala +++ b/tests/neg-custom-args/captures/outer-var.scala @@ -11,8 +11,8 @@ def test(p: Proc, q: () => Unit) = x = q // error x = (q: Proc) // error y = (q: Proc) // error - y = q // error + y = q // OK, was error under sealed - var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // error + var finalizeActions = collection.mutable.ListBuffer[() => Unit]() // OK, was error under sealed diff --git a/tests/neg-custom-args/captures/outerRefsUses.scala b/tests/neg-custom-args/captures/outerRefsUses.scala new file mode 100644 index 000000000000..cd03c8c41efd --- /dev/null +++ b/tests/neg-custom-args/captures/outerRefsUses.scala @@ -0,0 +1,10 @@ +class IO +def test(io: IO^) = + class C: + def foo() = () => + val x: IO^{this} = io + () + val c = new C + val _: C^{io} = c // ok + val _: C = c // error + () diff --git a/tests/neg-custom-args/captures/path-box.scala b/tests/neg-custom-args/captures/path-box.scala new file mode 100644 index 000000000000..3213c236aaf5 --- /dev/null +++ b/tests/neg-custom-args/captures/path-box.scala @@ -0,0 +1,20 @@ +class A: + val m: A^ = ??? + val self: this.type = this + +case class Box[+T](value: T) + +def testBox1(a: A^): Box[A^{a}] = + Box(a.m) + +def testBox2(a: A^): Box[A^{a.m}] = + Box(a.m) + +def testBox3(a: A^): Box[A^{a.m}] = + Box(a) // error + +def testBox4(a: A^): Box[A^{a.m}] = + Box(a.m.m.m) + +def testBox5(a: A^): Box[A^{a.m}] = + Box(a.m.m.self) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-connection.scala b/tests/neg-custom-args/captures/path-connection.scala new file mode 100644 index 000000000000..c65aa75b1ed2 --- /dev/null +++ b/tests/neg-custom-args/captures/path-connection.scala @@ -0,0 +1,48 @@ +import language.experimental.modularity + +trait Reader: + def read(): String + +trait Sender: + def send(msg: String): Unit + +class Connection extends Reader, Sender: + def read() = "hello" + def send(msg: String) = () + + val readOnly: Reader^ = new Reader: + def read() = Connection.this.read() + +class ReaderProxy(tracked val r: Reader^) extends Reader: + def read() = "(Proxy)" + r.read() + +class SenderProxy(tracked val s: Sender^) extends Sender: + def send(msg: String) = s.send("(Proxy) " + msg) + +// TODO: We have to put `c` in the different argument list to make it work. +// See the comments in `integrateRT`. +def testConnection(c: Connection^)( + handle1: Reader^{c.readOnly} => String, + handle2: Sender^{c} => Unit, + handle3: Reader^{c} => String, + ) = + val m1 = c.read() + c.send("hello") + + val m2 = c.readOnly.read() + + val m3a = handle1(c.readOnly) + val m3b = handle3(c.readOnly) + + val m4a = handle1(c) // error + val m4b = handle3(c) + + val m5a = handle1(new ReaderProxy(c.readOnly)) + val m5b = handle3(new ReaderProxy(c.readOnly)) + + val m6a = handle1(new ReaderProxy(c)) // error + val m6b = handle3(new ReaderProxy(c)) + + handle2(c) + + handle2(new SenderProxy(c)) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-illigal.scala b/tests/neg-custom-args/captures/path-illigal.scala new file mode 100644 index 000000000000..f09db0087ef7 --- /dev/null +++ b/tests/neg-custom-args/captures/path-illigal.scala @@ -0,0 +1,7 @@ +class A: + val m: A^ = ??? + var n: A^ = ??? + +def test1(a: A^) = + val c1: A^{a.m} = a.m + val f1: A^{a.n} = a.n // error \ No newline at end of file diff --git a/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala b/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala new file mode 100644 index 000000000000..aca6102204a3 --- /dev/null +++ b/tests/neg-custom-args/captures/path-patmat-should-be-pos.scala @@ -0,0 +1,26 @@ +class It[A] + +class Filter[A](val underlying: It[A]^, val p: A => Boolean) extends It[A] +object Filter: + def apply[A](underlying: It[A]^, p: A => Boolean): Filter[A]^{underlying, p} = + underlying match + case filter: Filter[A]^ => + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} // error + // !!! should work, it seems to be the case that the system does not recognize that + // underlying and filter are aliases. + + // On the other hand, the following works: + locally: + val filter: underlying.type & Filter[A] = ??? + val a: It[A]^{filter.underlying} = ??? + val b: It[A]^{underlying} = a + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} + + locally: + val filter: underlying.type & Filter[A]^ = ??? + val a: It[A]^{filter.underlying} = ??? + val b: It[A]^{underlying} = a + val x = new Filter(filter.underlying, a => filter.p(a) && p(a)) + x: Filter[A]^{underlying, p} diff --git a/tests/neg-custom-args/captures/path-prefix.scala b/tests/neg-custom-args/captures/path-prefix.scala new file mode 100644 index 000000000000..af5817636d0b --- /dev/null +++ b/tests/neg-custom-args/captures/path-prefix.scala @@ -0,0 +1,44 @@ +import language.experimental.modularity +import language.experimental.captureChecking +import caps.Capability + +class F: + val f: AnyRef^ = ??? + +case class B(tracked val a: A) extends F, Capability + +class A extends F, Capability: + val b: B { val a: A.this.type } = B(this) + +def test(a: A) = + val x: a.b.type = a.b + val y: x.a.type = x.a + // x and y are two distinct singleton types with following properties: + // x =:= a.b + // y =:= x.a =:= a.b.a =:= a + + val cx: AnyRef^{x} = ??? + val cy: AnyRef^{y} = ??? + val caf: AnyRef^{a.f} = ??? + val cabf: AnyRef^{a.b.f} = ??? + val cxf: AnyRef^{x.f} = ??? + val cyf: AnyRef^{y.f} = ??? + + // x and y subsume to each other: + // * {x} <:< {y}: the underlying singleton of y is x.a, + // and the underlying singleton of x.a is a, + // which is a prefix for the underlying type of x (a.b), + // hence {x} <:< {y}; + // * {y} <:< {x}: by underlying singleton of y is x.a, whose prefix is x. + // Hence, {x} =:= {y}. + val x2y: AnyRef^{y} = cx + val y2x: AnyRef^{x} = cy + + val yf2af: AnyRef^{a.f} = cyf + val af2yf: AnyRef^{y.f} = caf + val xf2abf: AnyRef^{a.b.f} = cxf + val abf2xf: AnyRef^{x.f} = cabf + + // Since `x !=:= y`, {x.f} !=:= {y.f} + val yf2xf: AnyRef^{x.f} = cyf // error + val xf2yf: AnyRef^{y.f} = cxf // error diff --git a/tests/neg-custom-args/captures/path-simple.scala b/tests/neg-custom-args/captures/path-simple.scala new file mode 100644 index 000000000000..93b6dacebe74 --- /dev/null +++ b/tests/neg-custom-args/captures/path-simple.scala @@ -0,0 +1,27 @@ + +class A: + val m: A^ = ??? + val self: this.type = this + +case class C(ca: A^) + +def test1(a: A^, b: A^) = + val c1: A^{a} = a.m + val c2: A^{a.m} = a.m + val c3: A^{b} = a.m // error + + val d1: A^{a} = a.self + val d2: A^{a.self} = a.self + val d3: A^{a.self} = a + + val e1: A^{a.m} = a.self.m + val e2: A^{a.self.m} = a.self.m + val e3: A^{a.self.m} = a.m + +def test2(a: A^) = + val b: a.type = a + val c1: C^{a} = new C(a) + val c2: C^{a} = new C(a.m) + val c3: C^{a.m} = new C(a.m) + val c4: C^{b} = new C(a) + val c5: C^{a} = new C(b) \ No newline at end of file diff --git a/tests/neg-custom-args/captures/polyCaptures.check b/tests/neg-custom-args/captures/polyCaptures.check new file mode 100644 index 000000000000..8173828b7bc8 --- /dev/null +++ b/tests/neg-custom-args/captures/polyCaptures.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/captures/polyCaptures.scala:4:22 ------------------------------------------------------- +4 |val runOpsCheck: [C^] -> (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + | ^ + | Implementation restriction: polymorphic function types cannot wrap function types that have capture sets +-- Error: tests/neg-custom-args/captures/polyCaptures.scala:5:23 ------------------------------------------------------- +5 |val runOpsCheck2: [C^] => (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + | ^ + | Implementation restriction: polymorphic function types cannot wrap function types that have capture sets diff --git a/tests/neg-custom-args/captures/polyCaptures.scala b/tests/neg-custom-args/captures/polyCaptures.scala new file mode 100644 index 000000000000..776af95e5dcf --- /dev/null +++ b/tests/neg-custom-args/captures/polyCaptures.scala @@ -0,0 +1,7 @@ +class Box[X](val elem: X) + +val runOps = [C^] => (b: Box[() ->{C^} Unit]) => b.elem() +val runOpsCheck: [C^] -> (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error +val runOpsCheck2: [C^] => (ops: List[() ->{C^} Unit]) ->{C^} Unit = runOps // error + + diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index a1c5a56369e9..f00fea09ed8c 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -1,48 +1,52 @@ --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:21:11 -------------------------------------- -21 | cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:22:11 -------------------------------------- +22 | cur = (() => f.write()) :: Nil // error | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: List[box () ->{xs*} Unit] | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:32:7 --------------------------------------- -32 | (() => f.write()) :: Nil // error since {f*} !<: {xs*} +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:33:7 --------------------------------------- +33 | (() => f.write()) :: Nil // error | ^^^^^^^^^^^^^^^^^^^^^^^ | Found: List[box () ->{f} Unit] | Required: box List[box () ->{xs*} Unit]^? | | Note that reference (f : File^), defined in method $anonfun - | cannot be included in outer capture set {xs*} of value cur which is associated with method runAll1 + | cannot be included in outer capture set {xs*} of value cur | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:35:6 ------------------------------------------------------------ -35 | var cur: List[Proc] = xs // error: Illegal type for var - | ^ - | Mutable variable cur cannot have type List[box () => Unit] since - | the part box () => Unit of that type captures the root capability `cap`. --- Error: tests/neg-custom-args/captures/reaches.scala:42:15 ----------------------------------------------------------- -42 | val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref - | ^^^^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to List[box () => Unit] since - | the part box () => Unit of that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of constructor Ref - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/reaches.scala:52:31 ----------------------------------------------------------- -52 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error - | ^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box () => Unit since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of constructor Id - | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:60:27 -------------------------------------- -60 | val f1: File^{id*} = id(f) // error +-- Error: tests/neg-custom-args/captures/reaches.scala:38:31 ----------------------------------------------------------- +38 | val next: () => Unit = cur.head // error + | ^^^^^^^^ + | The expression's type box () => Unit is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/reaches.scala:45:35 ----------------------------------------------------------- +45 | val next: () => Unit = cur.get.head // error + | ^^^^^^^^^^^^ + | The expression's type box () => Unit is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +-- Error: tests/neg-custom-args/captures/reaches.scala:55:6 ------------------------------------------------------------ +55 | id(() => f.write()) // error + | ^^^^^^^^^^^^^^^^^^^ + | Local reach capability id* leaks into capture scope of method test +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:62:27 -------------------------------------- +62 | val f1: File^{id*} = id(f) // error, since now id(f): File^ | ^^^^^ - | Found: File^{id, f} + | Found: File^{f} | Required: File^{id*} | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/reaches.scala:77:5 ------------------------------------------------------------ -77 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * - | ^^^^^^ - | Reach capability cap and universal capability cap cannot both - | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression +-- Error: tests/neg-custom-args/captures/reaches.scala:79:10 ----------------------------------------------------------- +79 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + | Local reach capability ps* leaks into capture scope of method mapCompose +-- Error: tests/neg-custom-args/captures/reaches.scala:79:13 ----------------------------------------------------------- +79 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + | Local reach capability ps* leaks into capture scope of method mapCompose +-- [E057] Type Mismatch Error: tests/neg-custom-args/captures/reaches.scala:53:51 -------------------------------------- +53 | val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error + | ^ + | Type argument () -> Unit does not conform to lower bound () => Unit + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index de5e4362cdf2..c33ba80a668b 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -1,3 +1,4 @@ +import caps.unbox class File: def write(): Unit = ??? @@ -10,17 +11,17 @@ class Ref[T](init: T): def get: T = x def set(y: T) = { x = y } -def runAll0(xs: List[Proc]): Unit = - var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR +def runAll0(@unbox xs: List[Proc]): Unit = + var cur: List[() ->{xs*} Unit] = xs while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head next() cur = cur.tail: List[() ->{xs*} Unit] usingFile: f => - cur = (() => f.write()) :: Nil // error since {f*} !<: {xs*} + cur = (() => f.write()) :: Nil // error -def runAll1(xs: List[Proc]): Unit = +def runAll1(@unbox xs: List[Proc]): Unit = val cur = Ref[List[() ->{xs*} Unit]](xs) // OK, by revised VAR while cur.get.nonEmpty do val next: () ->{xs*} Unit = cur.get.head @@ -29,19 +30,19 @@ def runAll1(xs: List[Proc]): Unit = usingFile: f => cur.set: - (() => f.write()) :: Nil // error since {f*} !<: {xs*} + (() => f.write()) :: Nil // error def runAll2(xs: List[Proc]): Unit = - var cur: List[Proc] = xs // error: Illegal type for var + var cur: List[Proc] = xs while cur.nonEmpty do - val next: () => Unit = cur.head + val next: () => Unit = cur.head // error next() cur = cur.tail def runAll3(xs: List[Proc]): Unit = - val cur = Ref[List[Proc]](xs) // error: illegal type for type argument to Ref + val cur = Ref[List[Proc]](xs) while cur.get.nonEmpty do - val next: () => Unit = cur.get.head + val next: () => Unit = cur.get.head // error next() cur.set(cur.get.tail: List[Proc]) @@ -51,13 +52,14 @@ class Id[-A, +B >: A](): def test = val id: Id[Proc, Proc] = new Id[Proc, () -> Unit] // error usingFile: f => - id(() => f.write()) // escape, if it was not for the error above + id(() => f.write()) // error def attack2 = val id: File^ -> File^ = x => x + // val id: File^ -> EX C.File^C val leaked = usingFile[File^{id*}]: f => - val f1: File^{id*} = id(f) // error + val f1: File^{id*} = id(f) // error, since now id(f): File^ f1 class List[+A]: @@ -74,6 +76,7 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * - + ps.map((x, y) => compose1(x, y)) // error // error +def mapCompose2[A](@unbox ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) diff --git a/tests/neg-custom-args/captures/reaches2.check b/tests/neg-custom-args/captures/reaches2.check new file mode 100644 index 000000000000..03860ee4a01b --- /dev/null +++ b/tests/neg-custom-args/captures/reaches2.check @@ -0,0 +1,10 @@ +-- Error: tests/neg-custom-args/captures/reaches2.scala:8:10 ----------------------------------------------------------- +8 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + |reference ps* is not included in the allowed capture set {} + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? +-- Error: tests/neg-custom-args/captures/reaches2.scala:8:13 ----------------------------------------------------------- +8 | ps.map((x, y) => compose1(x, y)) // error // error + | ^ + |reference ps* is not included in the allowed capture set {} + |of an enclosing function literal with expected type ((box A ->{ps*} A, box A ->{ps*} A)) -> box (x$0: A^?) ->? (ex$15: caps.Exists) -> A^? diff --git a/tests/neg-custom-args/captures/reaches2.scala b/tests/neg-custom-args/captures/reaches2.scala new file mode 100644 index 000000000000..f2447b8c8795 --- /dev/null +++ b/tests/neg-custom-args/captures/reaches2.scala @@ -0,0 +1,9 @@ +class List[+A]: + def map[B](f: A -> B): List[B] = ??? + +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) + +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) // error // error + diff --git a/tests/neg-custom-args/captures/real-try.check b/tests/neg-custom-args/captures/real-try.check index 50dcc16f5f54..7f8ab50bc222 100644 --- a/tests/neg-custom-args/captures/real-try.check +++ b/tests/neg-custom-args/captures/real-try.check @@ -1,46 +1,46 @@ --- [E190] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:36:4 ---------------------------------- -36 | b.x +-- [E190] Potential Issue Warning: tests/neg-custom-args/captures/real-try.scala:38:4 ---------------------------------- +38 | b.x | ^^^ | Discarded non-Unit value of type () -> Unit. You may want to use `()`. | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/real-try.scala:12:2 ----------------------------------------------------------- -12 | try // error +-- Error: tests/neg-custom-args/captures/real-try.scala:14:2 ----------------------------------------------------------- +14 | try // error | ^ | result of `try` cannot have type () => Unit since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -13 | () => foo(1) -14 | catch -15 | case _: Ex1 => ??? -16 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:18:10 ---------------------------------------------------------- -18 | val x = try // error +15 | () => foo(1) +16 | catch +17 | case _: Ex1 => ??? +18 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:20:10 ---------------------------------------------------------- +20 | val x = try // error | ^ | result of `try` cannot have type () => Unit since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -19 | () => foo(1) -20 | catch -21 | case _: Ex1 => ??? -22 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:24:10 ---------------------------------------------------------- -24 | val y = try // error +21 | () => foo(1) +22 | catch +23 | case _: Ex1 => ??? +24 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:26:10 ---------------------------------------------------------- +26 | val y = try // error | ^ | result of `try` cannot have type () => Cell[Unit]^? since | that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -25 | () => Cell(foo(1)) -26 | catch -27 | case _: Ex1 => ??? -28 | case _: Ex2 => ??? --- Error: tests/neg-custom-args/captures/real-try.scala:30:10 ---------------------------------------------------------- -30 | val b = try // error +27 | () => Cell(foo(1)) +28 | catch +29 | case _: Ex1 => ??? +30 | case _: Ex2 => ??? +-- Error: tests/neg-custom-args/captures/real-try.scala:32:10 ---------------------------------------------------------- +32 | val b = try // error | ^ | result of `try` cannot have type Cell[box () => Unit]^? since | the part box () => Unit of that type captures the root capability `cap`. | This is often caused by a locally generated exception capability leaking as part of its result. -31 | Cell(() => foo(1)) -32 | catch -33 | case _: Ex1 => ??? -34 | case _: Ex2 => ??? +33 | Cell(() => foo(1)) +34 | catch +35 | case _: Ex1 => ??? +36 | case _: Ex2 => ??? diff --git a/tests/neg-custom-args/captures/real-try.scala b/tests/neg-custom-args/captures/real-try.scala index 23961e884ea3..51f1a0fdea5a 100644 --- a/tests/neg-custom-args/captures/real-try.scala +++ b/tests/neg-custom-args/captures/real-try.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.saferExceptions class Ex1 extends Exception("Ex1") diff --git a/tests/neg-custom-args/captures/refine-reach-shallow.scala b/tests/neg-custom-args/captures/refine-reach-shallow.scala index 9f4b28ce52e3..525d33fdb7c5 100644 --- a/tests/neg-custom-args/captures/refine-reach-shallow.scala +++ b/tests/neg-custom-args/captures/refine-reach-shallow.scala @@ -14,5 +14,5 @@ def test4(): Unit = val ys: List[IO^{xs*}] = xs // ok def test5(): Unit = val f: [R] -> (IO^ -> R) -> IO^ = ??? - val g: [R] -> (IO^ -> R) -> IO^{f*} = f // ok + val g: [R] -> (IO^ -> R) -> IO^{f*} = f // error val h: [R] -> (IO^{f*} -> R) -> IO^ = f // error diff --git a/tests/neg-custom-args/captures/singletons.scala b/tests/neg-custom-args/captures/singletons.scala index 194e6e850dcd..be0ee67ab1bc 100644 --- a/tests/neg-custom-args/captures/singletons.scala +++ b/tests/neg-custom-args/captures/singletons.scala @@ -1,6 +1,6 @@ val x = () => () -val y1: x.type = x // ok -val y2: x.type^{} = x // error: singleton type cannot have capture set -val y3: x.type^{x} = x // error: singleton type cannot have capture set // error -val y4: x.type^ = x // error: singleton type cannot have capture set +val y1: x.type = x +val y2: x.type^{} = x +val y3: x.type^{x} = x // error +val y4: x.type^ = x diff --git a/tests/neg-custom-args/captures/spread-problem.check b/tests/neg-custom-args/captures/spread-problem.check new file mode 100644 index 000000000000..31cf38a51727 --- /dev/null +++ b/tests/neg-custom-args/captures/spread-problem.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/spread-problem.scala:8:6 --------------------------------- +8 | race(Seq(src1, src2)*) // error + | ^^^^^^^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/spread-problem.scala:11:6 -------------------------------- +11 | race(src1, src2) // error + | ^^^^^^^^^^^^^^^^ + | Found: Source[box T^?]^{src1, src2} + | Required: Source[T] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/spread-problem.scala b/tests/neg-custom-args/captures/spread-problem.scala new file mode 100644 index 000000000000..579c7817b9c1 --- /dev/null +++ b/tests/neg-custom-args/captures/spread-problem.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +trait Source[+T] + +def race[T](@caps.unbox sources: (Source[T]^)*): Source[T]^{sources*} = ??? + +def raceTwo[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} = + race(Seq(src1, src2)*) // error + +def raceThree[T](src1: Source[T]^, src2: Source[T]^): Source[T]^{} = + race(src1, src2) // error \ No newline at end of file diff --git a/tests/neg-custom-args/captures/stack-alloc.scala b/tests/neg-custom-args/captures/stack-alloc.scala index befafbf13003..80e7e4169720 100644 --- a/tests/neg-custom-args/captures/stack-alloc.scala +++ b/tests/neg-custom-args/captures/stack-alloc.scala @@ -5,11 +5,11 @@ class Pooled val stack = mutable.ArrayBuffer[Pooled]() var nextFree = 0 -def withFreshPooled[T](op: (lcap: caps.Cap) ?-> Pooled^{lcap} => T): T = +def withFreshPooled[T](op: (lcap: caps.Capability) ?-> Pooled^{lcap} => T): T = if nextFree >= stack.size then stack.append(new Pooled) val pooled = stack(nextFree) nextFree = nextFree + 1 - val ret = op(pooled) + val ret = op(using caps.cap)(pooled) nextFree = nextFree - 1 ret diff --git a/tests/neg-custom-args/captures/try.check b/tests/neg-custom-args/captures/try.check index 3b96927de738..77a5fc06e05a 100644 --- a/tests/neg-custom-args/captures/try.check +++ b/tests/neg-custom-args/captures/try.check @@ -1,10 +1,12 @@ --- Error: tests/neg-custom-args/captures/try.scala:23:16 --------------------------------------------------------------- -23 | val a = handle[Exception, CanThrow[Exception]] { // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable R cannot be instantiated to box CT[Exception]^ since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method handle - | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/try.scala:25:3 ---------------------------------------------------------------- +23 | val a = handle[Exception, CanThrow[Exception]] { +24 | (x: CanThrow[Exception]) => x +25 | }{ // error (but could be better) + | ^ + | The expression's type box CT[Exception]^ is not allowed to capture the root capability `cap`. + | This usually means that a capability persists longer than its allowed lifetime. +26 | (ex: Exception) => ??? +27 | } -- Error: tests/neg-custom-args/captures/try.scala:30:65 --------------------------------------------------------------- 30 | (x: CanThrow[Exception]) => () => raise(new Exception)(using x) // error | ^ diff --git a/tests/neg-custom-args/captures/try.scala b/tests/neg-custom-args/captures/try.scala index 3d25dff4cd2c..45a1b346a512 100644 --- a/tests/neg-custom-args/captures/try.scala +++ b/tests/neg-custom-args/captures/try.scala @@ -20,9 +20,9 @@ def handle[E <: Exception, R <: Top](op: CT[E]^ => R)(handler: E => R): R = catch case ex: E => handler(ex) def test = - val a = handle[Exception, CanThrow[Exception]] { // error + val a = handle[Exception, CanThrow[Exception]] { (x: CanThrow[Exception]) => x - }{ + }{ // error (but could be better) (ex: Exception) => ??? } diff --git a/tests/neg-custom-args/captures/try3.scala b/tests/neg-custom-args/captures/try3.scala index 004cda6a399c..880d20ef16a0 100644 --- a/tests/neg-custom-args/captures/try3.scala +++ b/tests/neg-custom-args/captures/try3.scala @@ -4,7 +4,7 @@ class CT[E] type CanThrow[E] = CT[E]^ type Top = Any^ -def handle[E <: Exception, T <: Top](op: (lcap: caps.Cap) ?-> CT[E]^{lcap} ?=> T)(handler: E => T): T = +def handle[E <: Exception, T <: Top](op: (lcap: caps.Capability) ?-> CT[E]^{lcap} ?=> T)(handler: E => T): T = val x: CT[E] = ??? try op(using caps.cap)(using x) catch case ex: E => handler(ex) diff --git a/tests/neg-custom-args/captures/unbox-overrides.check b/tests/neg-custom-args/captures/unbox-overrides.check new file mode 100644 index 000000000000..b9a3be7bffbc --- /dev/null +++ b/tests/neg-custom-args/captures/unbox-overrides.check @@ -0,0 +1,21 @@ +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:8:6 ---------------------------------- +8 | def foo(x: C): C // error + | ^ + |error overriding method foo in trait A of type (x: C): C; + | method foo of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:9:6 ---------------------------------- +9 | def bar(@unbox x: C): C // error + | ^ + |error overriding method bar in trait A of type (x: C): C; + | method bar of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` +-- [E164] Declaration Error: tests/neg-custom-args/captures/unbox-overrides.scala:15:15 -------------------------------- +15 |abstract class C extends A[C], B2 // error + | ^ + |error overriding method foo in trait A of type (x: C): C; + | method foo in trait B2 of type (x: C): C has a parameter x with different @unbox status than the corresponding parameter in the overridden definition + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/unbox-overrides.scala b/tests/neg-custom-args/captures/unbox-overrides.scala new file mode 100644 index 000000000000..5abb5013bfbe --- /dev/null +++ b/tests/neg-custom-args/captures/unbox-overrides.scala @@ -0,0 +1,15 @@ +import caps.unbox + +trait A[X]: + def foo(@unbox x: X): X + def bar(x: X): X + +trait B extends A[C]: + def foo(x: C): C // error + def bar(@unbox x: C): C // error + +trait B2: + def foo(x: C): C + def bar(@unbox x: C): C + +abstract class C extends A[C], B2 // error diff --git a/tests/neg/unsound-reach-2.scala b/tests/neg-custom-args/captures/unsound-reach-2.scala similarity index 80% rename from tests/neg/unsound-reach-2.scala rename to tests/neg-custom-args/captures/unsound-reach-2.scala index 27742d72557b..5bea18bdccba 100644 --- a/tests/neg/unsound-reach-2.scala +++ b/tests/neg-custom-args/captures/unsound-reach-2.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait Consumer[-T]: def apply(x: T): Unit @@ -18,8 +20,8 @@ def bad(): Unit = var escaped: File^{backdoor*} = null withFile("hello.txt"): f => - boom.use(f): // error - new Consumer[File^{backdoor*}]: + boom.use(f): + new Consumer[File^{backdoor*}]: // error def apply(f1: File^{backdoor*}) = escaped = f1 diff --git a/tests/neg/unsound-reach-3.scala b/tests/neg-custom-args/captures/unsound-reach-3.scala similarity index 79% rename from tests/neg/unsound-reach-3.scala rename to tests/neg-custom-args/captures/unsound-reach-3.scala index 71c27fe5007d..0063216e957e 100644 --- a/tests/neg/unsound-reach-3.scala +++ b/tests/neg-custom-args/captures/unsound-reach-3.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) import language.experimental.captureChecking trait File: def close(): Unit @@ -14,8 +16,8 @@ def bad(): Unit = val boom: Foo[File^{backdoor*}] = backdoor var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - escaped = boom.use(f) // error + withFile("hello.txt"): f => // error + escaped = boom.use(f) // boom.use: (x: File^) -> File^{backdoor*}, it is a selection so reach capabilities are allowed // f: File^, so there is no reach capabilities diff --git a/tests/neg-custom-args/captures/unsound-reach-4.check b/tests/neg-custom-args/captures/unsound-reach-4.check new file mode 100644 index 000000000000..d359b298555e --- /dev/null +++ b/tests/neg-custom-args/captures/unsound-reach-4.check @@ -0,0 +1,6 @@ +-- Error: tests/neg-custom-args/captures/unsound-reach-4.scala:21:25 --------------------------------------------------- +21 | withFile("hello.txt"): f => // error + | ^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (f: File^) ->{backdoor*} Unit of this expression +22 | escaped = boom.use(f) diff --git a/tests/neg-custom-args/captures/unsound-reach-4.scala b/tests/neg-custom-args/captures/unsound-reach-4.scala new file mode 100644 index 000000000000..bc66085614f2 --- /dev/null +++ b/tests/neg-custom-args/captures/unsound-reach-4.scala @@ -0,0 +1,22 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +type F = File^ + +trait Foo[+X]: + def use(x: F): X +class Bar extends Foo[File^]: + def use(x: F): File^ = x + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => // error + escaped = boom.use(f) diff --git a/tests/neg-custom-args/captures/unsound-reach.check b/tests/neg-custom-args/captures/unsound-reach.check new file mode 100644 index 000000000000..4a6793d204c5 --- /dev/null +++ b/tests/neg-custom-args/captures/unsound-reach.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/captures/unsound-reach.scala:18:21 ----------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^ + | Local reach capability backdoor* leaks into capture scope of method bad +19 | escaped = f1 +-- [E164] Declaration Error: tests/neg-custom-args/captures/unsound-reach.scala:10:8 ----------------------------------- +10 | def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking + | ^ + | error overriding method use in trait Foo of type (x: File^)(op: box File^ => Unit): Unit; + | method use of type (x: File^)(op: File^ => Unit): Unit has incompatible type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/unsound-reach.scala b/tests/neg-custom-args/captures/unsound-reach.scala new file mode 100644 index 000000000000..c3c31a7f32ff --- /dev/null +++ b/tests/neg-custom-args/captures/unsound-reach.scala @@ -0,0 +1,20 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +trait Foo[+X]: + def use(x: File^)(op: X => Unit): Unit +class Bar extends Foo[File^]: + def use(x: File^)(op: File^ => Unit): Unit = op(x) // error, was OK using sealed checking + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): (f1: File^{backdoor*}) => // error + escaped = f1 + diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index bf5c1dc4f83a..068d8be78c70 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,12 +1,12 @@ --- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ -23 | val later = usingLogFile { f => () => f.write(0) } // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:22:14 ------------------------------------------------------ +22 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 --- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ -28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:27:23 ------------------------------------------------------ +27 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error | ^^^^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 --- Error: tests/neg-custom-args/captures/usingLogFile.scala:44:16 ------------------------------------------------------ -44 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error +-- Error: tests/neg-custom-args/captures/usingLogFile.scala:43:16 ------------------------------------------------------ +43 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^ | local reference f leaks into outer capture set of type parameter T of method usingFile in object Test3 diff --git a/tests/neg-custom-args/captures/usingLogFile.scala b/tests/neg-custom-args/captures/usingLogFile.scala index 67e6f841e7ce..2b46a5401f46 100644 --- a/tests/neg-custom-args/captures/usingLogFile.scala +++ b/tests/neg-custom-args/captures/usingLogFile.scala @@ -1,11 +1,10 @@ import java.io.* -import annotation.capability object Test1: - def usingLogFile[T](op: (local: caps.Cap) ?-> FileOutputStream => T): T = + def usingLogFile[T](op: (local: caps.Capability) ?-> FileOutputStream => T): T = val logFile = FileOutputStream("log") - val result = op(logFile) + val result = op(using caps.cap)(logFile) logFile.close() result diff --git a/tests/neg-custom-args/captures/vars-simple.check b/tests/neg-custom-args/captures/vars-simple.check index 2bc014e9a4e7..e9671f775c22 100644 --- a/tests/neg-custom-args/captures/vars-simple.check +++ b/tests/neg-custom-args/captures/vars-simple.check @@ -2,16 +2,17 @@ 15 | a = (g: String => String) // error | ^^^^^^^^^^^^^^^^^^^ | Found: String => String - | Required: String ->{cap1, cap2} String + | Required: box String ->{cap1, cap2} String + | + | Note that String => String cannot be box-converted to box String ->{cap1, cap2} String + | since at least one of their capture sets contains the root capability `cap` | | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 ----------------------------------- +-- Error: tests/neg-custom-args/captures/vars-simple.scala:16:8 -------------------------------------------------------- 16 | a = g // error | ^ - | Found: (x: String) ->{cap3} String - | Required: (x: String) ->{cap1, cap2} String - | - | longer explanation available when compiling with `-explain` + | reference (cap3 : Cap) is not included in the allowed capture set {cap1, cap2} + | of an enclosing function literal with expected type box String ->{cap1, cap2} String -- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars-simple.scala:17:12 ---------------------------------- 17 | b = List(g) // error | ^^^^^^^ diff --git a/tests/neg-custom-args/captures/vars.check b/tests/neg-custom-args/captures/vars.check index 22d13d8e26e7..0d3c2e0f2e11 100644 --- a/tests/neg-custom-args/captures/vars.check +++ b/tests/neg-custom-args/captures/vars.check @@ -1,28 +1,25 @@ --- Error: tests/neg-custom-args/captures/vars.scala:22:14 -------------------------------------------------------------- -22 | a = x => g(x) // error +-- Error: tests/neg-custom-args/captures/vars.scala:24:14 -------------------------------------------------------------- +24 | a = x => g(x) // error | ^^^^ | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a | | Note that reference (cap3 : Cap), defined in method scope - | cannot be included in outer capture set {cap1} of variable a which is associated with method test --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:23:8 ------------------------------------------ -23 | a = g // error + | cannot be included in outer capture set {cap1} of variable a +-- Error: tests/neg-custom-args/captures/vars.scala:25:8 --------------------------------------------------------------- +25 | a = g // error | ^ - | Found: (x: String) ->{cap3} String - | Required: (x$0: String) ->{cap1} String + | reference (cap3 : Cap) is not included in the allowed capture set {cap1} of variable a | | Note that reference (cap3 : Cap), defined in method scope - | cannot be included in outer capture set {cap1} of variable a which is associated with method test - | - | longer explanation available when compiling with `-explain` --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:25:12 ----------------------------------------- -25 | b = List(g) // error + | cannot be included in outer capture set {cap1} of variable a +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/vars.scala:27:12 ----------------------------------------- +27 | b = List(g) // error | ^^^^^^^ | Found: List[box (x$0: String) ->{cap3} String] | Required: List[box String ->{cap1, cap2} String] | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/vars.scala:34:2 --------------------------------------------------------------- -34 | local { cap3 => // error +-- Error: tests/neg-custom-args/captures/vars.scala:36:2 --------------------------------------------------------------- +36 | local { cap3 => // error | ^^^^^ | local reference cap3 leaks into outer capture set of type parameter T of method local diff --git a/tests/neg-custom-args/captures/vars.scala b/tests/neg-custom-args/captures/vars.scala index ab5a2f43acc7..5eb1e3fedda9 100644 --- a/tests/neg-custom-args/captures/vars.scala +++ b/tests/neg-custom-args/captures/vars.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class CC type Cap = CC^ diff --git a/tests/neg-custom-args/captures/widen-reach.check b/tests/neg-custom-args/captures/widen-reach.check new file mode 100644 index 000000000000..06d21ff445d8 --- /dev/null +++ b/tests/neg-custom-args/captures/widen-reach.check @@ -0,0 +1,15 @@ +-- Error: tests/neg-custom-args/captures/widen-reach.scala:13:26 ------------------------------------------------------- +13 | val y2: IO^ -> IO^ = y1.foo // error + | ^^^^^^ + | Local reach capability x* leaks into capture scope of method test +-- Error: tests/neg-custom-args/captures/widen-reach.scala:14:30 ------------------------------------------------------- +14 | val y3: IO^ -> IO^{x*} = y1.foo // error + | ^^^^^^ + | Local reach capability x* leaks into capture scope of method test +-- [E164] Declaration Error: tests/neg-custom-args/captures/widen-reach.scala:9:6 -------------------------------------- +9 | val foo: IO^ -> IO^ = x => x // error + | ^ + | error overriding value foo in trait Foo of type IO^ -> box IO^; + | value foo of type IO^ -> (ex$3: caps.Exists) -> IO^{ex$3} has incompatible type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/widen-reach.scala b/tests/neg-custom-args/captures/widen-reach.scala new file mode 100644 index 000000000000..fa5eee1232df --- /dev/null +++ b/tests/neg-custom-args/captures/widen-reach.scala @@ -0,0 +1,14 @@ +import language.experimental.captureChecking + +trait IO + +trait Foo[+T]: + val foo: IO^ -> T + +trait Bar extends Foo[IO^]: + val foo: IO^ -> IO^ = x => x // error + +def test(x: Foo[IO^]): Unit = + val y1: Foo[IO^{x*}] = x + val y2: IO^ -> IO^ = y1.foo // error + val y3: IO^ -> IO^{x*} = y1.foo // error \ No newline at end of file diff --git a/tests/neg-macros/BigFloat/BigFloat_1.scala b/tests/neg-macros/BigFloat/BigFloat_1.scala index 5bb5b49587bd..246e3dcd442d 100644 --- a/tests/neg-macros/BigFloat/BigFloat_1.scala +++ b/tests/neg-macros/BigFloat/BigFloat_1.scala @@ -35,7 +35,7 @@ object BigFloat extends App { def fromDigits(digits: String) = apply(digits) } - given BigFloatFromDigits with { + given BigFloatFromDigits { override inline def fromDigits(digits: String) = ${ BigFloatFromDigitsImpl('digits) } @@ -43,7 +43,7 @@ object BigFloat extends App { // Should be in StdLib: - given ToExpr[BigInt] with { + given ToExpr[BigInt] { def apply(x: BigInt)(using Quotes) = '{BigInt(${Expr(x.toString)})} } diff --git a/tests/neg-macros/GenericNumLits/Even_1.scala b/tests/neg-macros/GenericNumLits/Even_1.scala index 24bcf32cc7c0..5772d98d3808 100644 --- a/tests/neg-macros/GenericNumLits/Even_1.scala +++ b/tests/neg-macros/GenericNumLits/Even_1.scala @@ -16,7 +16,7 @@ object Even { def fromDigits(digits: String) = evenFromDigits(digits) } - given EvenFromDigits with { + given EvenFromDigits { override transparent inline def fromDigits(digits: String) = ${ EvenFromDigitsImpl('digits) } diff --git a/tests/neg-macros/annot-crash.check b/tests/neg-macros/annot-crash.check index 16eb0f68bc44..0a5d573d2c0d 100644 --- a/tests/neg-macros/annot-crash.check +++ b/tests/neg-macros/annot-crash.check @@ -2,7 +2,7 @@ -- Error: tests/neg-macros/annot-crash/Test_2.scala:1:0 ---------------------------------------------------------------- 1 |@crash // error |^^^^^^ - |Failed to evaluate macro. + |Failed to evaluate macro annotation '@crash'. | Caused by class scala.NotImplementedError: an implementation is missing | scala.Predef$.$qmark$qmark$qmark(Predef.scala:344) | crash.transform(Macro_1.scala:7) diff --git a/tests/neg-macros/i11483/Test_2.scala b/tests/neg-macros/i11483/Test_2.scala index 6fe975168684..e61716615d22 100644 --- a/tests/neg-macros/i11483/Test_2.scala +++ b/tests/neg-macros/i11483/Test_2.scala @@ -3,7 +3,7 @@ package x import scala.language.implicitConversions import scala.concurrent.Future -given FutureAsyncMonad: CpsMonad[Future] with +given FutureAsyncMonad: CpsMonad[Future]: def pure[T](t:T): Future[T] = ??? def impure[T](t:Future[T]): T = ??? def map[A,B](x:Future[A])(f: A=>B): Future[B] = ??? diff --git a/tests/neg-macros/i17152/DFBits.scala b/tests/neg-macros/i17152/DFBits.scala index dd0e8b88a962..a88485b56dc6 100644 --- a/tests/neg-macros/i17152/DFBits.scala +++ b/tests/neg-macros/i17152/DFBits.scala @@ -16,7 +16,7 @@ trait Baz trait Width[T]: type Out <: Int object Width: - given fromDFBoolOrBit[T <: DFBoolOrBit]: Width[T] with + given fromDFBoolOrBit: [T <: DFBoolOrBit] => Width[T]: type Out = 1 transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = @@ -38,7 +38,7 @@ private object CompanionsDFBits: type OutW <: Int def apply(value: R): DFValOf[DFBits[OutW]] object Candidate: - given fromDFUInt[W <: Int, R <: DFValOf[DFDecimal]]: Candidate[R] with + given fromDFUInt: [W <: Int, R <: DFValOf[DFDecimal]] => Candidate[R]: type OutW = W def apply(value: R): DFValOf[DFBits[W]] = import DFVal.Ops.bits diff --git a/tests/neg-macros/i19601/Macro.scala b/tests/neg-macros/i19601/Macro.scala index 8d6d22005017..06260ab8c981 100644 --- a/tests/neg-macros/i19601/Macro.scala +++ b/tests/neg-macros/i19601/Macro.scala @@ -10,7 +10,7 @@ object Macros { '{ () } } - given [A](using Type[A]): FromExpr[Assertion[A]] with { + given [A] => Type[A] => FromExpr[Assertion[A]] { def unapply(assertion: Expr[Assertion[A]])(using Quotes): Option[Assertion[A]] = { import quotes.reflect.* diff --git a/tests/neg-macros/i7919.scala b/tests/neg-macros/i7919.scala index e68965fc614f..74863282e09a 100644 --- a/tests/neg-macros/i7919.scala +++ b/tests/neg-macros/i7919.scala @@ -3,16 +3,16 @@ import scala.quoted.* object Test { def staged[T](using Quotes) = { import quotes.reflect.* - given typeT: Type[T] with {} // error + given typeT: Type[T] {} // error val tt = TypeRepr.of[T] '{ "in staged" } } - given Expr[Int] with {} // error + given Expr[Int] {} // error new Expr[Int] // error class Expr2 extends Expr[Int] // error - given Type[Int] with {} // error + given Type[Int] {} // error new Type[Int] // error class Type2 extends Type[Int] // error diff --git a/tests/neg-macros/quote-sym-newtype/Macro_1.scala b/tests/neg-macros/quote-sym-newtype/Macro_1.scala new file mode 100644 index 000000000000..953be0d5497b --- /dev/null +++ b/tests/neg-macros/quote-sym-newtype/Macro_1.scala @@ -0,0 +1,47 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testConflictingBounds = ${ testConflictingBoundsImpl } +inline def testConflictingBoundsWithTypeLambda = ${ testConflictingBoundsWithTypeLambdaImpl } + +transparent inline def transparentTestConflictingBounds = ${ testConflictingBoundsImpl } +transparent inline def transparentTestConflictingBoundsWithTypeLambda = ${ testConflictingBoundsWithTypeLambdaImpl } + + +def testConflictingBoundsImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeType(owner: Symbol): Symbol = + // type Foo >: Int <: String + Symbol.newBoundedType( + owner, + "Foo", + Flags.EmptyFlags, + TypeBounds(TypeRepr.of[Int], TypeRepr.of[String]), + Symbol.noSymbol + ) + makeClass(makeType) +} + +def testConflictingBoundsWithTypeLambdaImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + def makeType(owner: Symbol): Symbol = + // type Foo >: [X] =>> Int <: Any + Symbol.newBoundedType( + owner, + "Foo", + Flags.EmptyFlags, + TypeBounds(TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]), TypeRepr.of[Any]), + Symbol.noSymbol + ) + makeClass(makeType) +} + +def makeClass(using quotes: Quotes)(typeCons: quotes.reflect.Symbol => quotes.reflect.Symbol) = { + import quotes.reflect.* + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => List(typeCons(sym)), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List(TypeDef(clsSymbol.typeMember("Foo")))) + + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} + diff --git a/tests/neg-macros/quote-sym-newtype/Test_2.scala b/tests/neg-macros/quote-sym-newtype/Test_2.scala new file mode 100644 index 000000000000..60fef3cb7322 --- /dev/null +++ b/tests/neg-macros/quote-sym-newtype/Test_2.scala @@ -0,0 +1,6 @@ +//> using options -experimental -Yno-experimental +def test = + transparentTestConflictingBounds // error + transparentTestConflictingBoundsWithTypeLambda // error + // testConflictingBounds // should throw an error here also, to be implemented before stabilisation + // testConflictingBoundsWithTypeLambda // should throw an error here also, to be implemented before stabilisation diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check new file mode 100644 index 000000000000..860482f2e552 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.check @@ -0,0 +1,6 @@ +-- Error: tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala:11:48 ------------------------------ +11 | case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala new file mode 100644 index 000000000000..6797ae926367 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params-regression.scala @@ -0,0 +1,12 @@ +/** + * Supporting hoas quote pattern with bounded type variable + * is future todo. + * Refer to: quoted-pattern-with-bounded-type-params.scala + */ + +import scala.quoted.* + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params.check b/tests/neg-macros/quoted-pattern-with-bounded-type-params.check new file mode 100644 index 000000000000..0e787377bfc5 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-macros/quoted-pattern-with-bounded-type-params.scala:11:50 ----------------------------------------- +11 | case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + | ^ + | Implementation restriction: Type arguments to Open pattern are expected to have no bounds diff --git a/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala b/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala new file mode 100644 index 000000000000..567efa9ee35d --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-bounded-type-params.scala @@ -0,0 +1,12 @@ +/* + * Supporting hoas quote pattern with bounded type variable + * is future todo. + */ + +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A <: Int, B] => (x : A, y : A) => $b[A](x, y) : A } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-type-params-regression.check b/tests/neg-macros/quoted-pattern-with-type-params-regression.check new file mode 100644 index 000000000000..543c119b3d33 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params-regression.check @@ -0,0 +1,16 @@ +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:8:31 --------------------------------------- +8 | case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:9:33 --------------------------------------- +9 | case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + | ^ + | Type variables that this argument depends on are not captured in this hoas pattern +-- Error: tests/neg-macros/quoted-pattern-with-type-params-regression.scala:10:24 -------------------------------------- +10 | case '{ (a:Int) => $b[Int](a) : String } => ??? // error + | ^ + | Type must be fully defined. + | Consider annotating the splice using a type ascription: + | (${b}: XYZ). diff --git a/tests/neg-macros/quoted-pattern-with-type-params-regression.scala b/tests/neg-macros/quoted-pattern-with-type-params-regression.scala new file mode 100644 index 000000000000..aa2489bc440b --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params-regression.scala @@ -0,0 +1,11 @@ +/** + * Refer to: quoted-pattern-with-type-params.scala + */ +import scala.quoted.* + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + case '{ (a:Int) => $b[Int](a) : String } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/quoted-pattern-with-type-params.check b/tests/neg-macros/quoted-pattern-with-type-params.check new file mode 100644 index 000000000000..37e8f611d5a9 --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:6:32 -------------------------------------------------- +6 | case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + | ^^^^^ + | Implementation restriction: A higher-order pattern must carry value arguments +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:7:33 -------------------------------------------------- +7 | case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + | ^ + | Type variables that this argument depends on are not captured in this hoas pattern +-- Error: tests/neg-macros/quoted-pattern-with-type-params.scala:8:26 -------------------------------------------------- +8 | case '{ (a:Int) => $b[Int](a) : String } => ??? // error + | ^^^ + | Type arguments of a hoas pattern needs to be defined inside the quoted pattern diff --git a/tests/neg-macros/quoted-pattern-with-type-params.scala b/tests/neg-macros/quoted-pattern-with-type-params.scala new file mode 100644 index 000000000000..2e4a059ee23a --- /dev/null +++ b/tests/neg-macros/quoted-pattern-with-type-params.scala @@ -0,0 +1,9 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A) => $b[A] : (A => A) } => ??? // error + case '{ [A] => (x : A) => $b(x) : (A => A) } => ??? // error + case '{ (a:Int) => $b[Int](a) : String } => ??? // error + case _ => Expr("not matched") diff --git a/tests/neg-macros/tasty-macro-error/quoted_1.scala b/tests/neg-macros/tasty-macro-error/quoted_1.scala index b395ec4c240b..8a4c45e46c89 100644 --- a/tests/neg-macros/tasty-macro-error/quoted_1.scala +++ b/tests/neg-macros/tasty-macro-error/quoted_1.scala @@ -6,7 +6,7 @@ object Macros { def impl(x: Expr[Any])(using Quotes) : Expr[Unit] = { import quotes.reflect.* - report.error("here is the the argument is " + x.asTerm.underlyingArgument.show, x.asTerm.underlyingArgument.pos) + report.error("here is the argument is " + x.asTerm.underlyingArgument.show, x.asTerm.underlyingArgument.pos) '{} } diff --git a/tests/neg-macros/tasty-macro-positions/quoted_1.scala b/tests/neg-macros/tasty-macro-positions/quoted_1.scala index b77373baa21c..a64e575a8d4d 100644 --- a/tests/neg-macros/tasty-macro-positions/quoted_1.scala +++ b/tests/neg-macros/tasty-macro-positions/quoted_1.scala @@ -7,8 +7,8 @@ object Macros { def impl(x: Expr[Any])(using Quotes) : Expr[Unit] = { import quotes.reflect.* val pos = x.asTerm.underlyingArgument.pos - report.error("here is the the argument is " + x.asTerm.underlyingArgument.show, pos) - report.error("here (+5) is the the argument is " + x.asTerm.underlyingArgument.show, Position(pos.sourceFile, pos.start + 5, pos.end + 5)) + report.error("here is the argument is " + x.asTerm.underlyingArgument.show, pos) + report.error("here (+5) is the argument is " + x.asTerm.underlyingArgument.show, Position(pos.sourceFile, pos.start + 5, pos.end + 5)) '{} } diff --git a/tests/neg-with-compiler/GenericNumLits/Even_1.scala b/tests/neg-with-compiler/GenericNumLits/Even_1.scala index 0867150dd944..7f5824b30957 100644 --- a/tests/neg-with-compiler/GenericNumLits/Even_1.scala +++ b/tests/neg-with-compiler/GenericNumLits/Even_1.scala @@ -16,7 +16,7 @@ object Even { def fromDigits(digits: String) = evenFromDigits(digits) } - given EvenFromDigits with { + given EvenFromDigits { override inline def fromDigits(digits: String) = ${ EvenFromDigitsImpl('digits) } diff --git a/tests/neg/17579.check b/tests/neg/17579.check new file mode 100644 index 000000000000..24b7d354dcb6 --- /dev/null +++ b/tests/neg/17579.check @@ -0,0 +1,30 @@ +-- [E200] Syntax Error: tests/neg/17579.scala:5:10 --------------------------------------------------------------------- +5 | final val v1 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:6:15 --------------------------------------------------------------------- +6 | final lazy val v2 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:7:10 --------------------------------------------------------------------- +7 | final def v4 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:8:10 --------------------------------------------------------------------- +8 | final var v5 = 42 // error: final modifier is not allowed on local definitions + | ^^^ + | The final modifier is not allowed on local definitions +-- [E200] Syntax Error: tests/neg/17579.scala:9:10 --------------------------------------------------------------------- +9 | final type Foo = String // error: final modifier is not allowed on local definitions + | ^^^^ + | The final modifier is not allowed on local definitions +-- [E088] Syntax Error: tests/neg/17579.scala:14:10 -------------------------------------------------------------------- +14 | final private val v3 = 42 // error: expected start of definition + | ^^^^^^^ + | Expected start of definition + | + | longer explanation available when compiling with `-explain` +-- [E147] Syntax Warning: tests/neg/17579.scala:19:6 ------------------------------------------------------------------- +19 | final given Object() // warning: modifier `final` is redundant for this definition + | ^^^^^ + | Modifier final is redundant for this definition diff --git a/tests/neg/17579.scala b/tests/neg/17579.scala new file mode 100644 index 000000000000..0ffd20d4b267 --- /dev/null +++ b/tests/neg/17579.scala @@ -0,0 +1,26 @@ +class C: + final var v = 42 // ok + + def f = + final val v1 = 42 // error: final modifier is not allowed on local definitions + final lazy val v2 = 42 // error: final modifier is not allowed on local definitions + final def v4 = 42 // error: final modifier is not allowed on local definitions + final var v5 = 42 // error: final modifier is not allowed on local definitions + final type Foo = String // error: final modifier is not allowed on local definitions + + // We get a different error message here because `private` is also not a + // local modifier token. In the future, we could always parse all tokens and + // then flag those that are not legal at this point. + final private val v3 = 42 // error: expected start of definition + + { + // No error in this case, because the `given` is translated to a class + // definition, for which `final` is redundant but not illegal. + final given Object() // warning: modifier `final` is redundant for this definition + } + + { + // Also no error in this case, because we can't easily distinguish it from + // the previous case. + final given Object = new Object {} + } diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check index c21806e16c2c..cc51ee471553 100644 --- a/tests/neg/19414-desugared.check +++ b/tests/neg/19414-desugared.check @@ -8,7 +8,6 @@ | writer = | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ | summon[Writer[B]] - | , - | this.given_BodySerializer_B$default$2[B]) + | ) | |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414.check b/tests/neg/19414.check index 6804546df037..016e3942c825 100644 --- a/tests/neg/19414.check +++ b/tests/neg/19414.check @@ -8,7 +8,6 @@ | evidence$1 = | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ | summon[Writer[B]] - | , - | this.given_BodySerializer_B$default$2[B]) + | ) | |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414.scala b/tests/neg/19414.scala index bb275ad943b7..8843441e81f2 100644 --- a/tests/neg/19414.scala +++ b/tests/neg/19414.scala @@ -9,7 +9,7 @@ class Printer given Writer[JsValue] = ??? given Writer[JsObject] = ??? -given [B: Writer](using printer: Printer = new Printer): BodySerializer[B] = ??? +given [B: Writer] => (printer: Printer = new Printer) => BodySerializer[B] = ??? def f: Unit = summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/21538.check b/tests/neg/21538.check new file mode 100644 index 000000000000..e0bcb43f9356 --- /dev/null +++ b/tests/neg/21538.check @@ -0,0 +1,11 @@ +-- [E083] Type Error: tests/neg/21538.scala:3:45 ----------------------------------------------------------------------- +3 |inline def foo[V](inline value: V)(using Bar[value.type]) : Unit = {} // error + | ^^^^^^^^^^ + | (value : V) is not a valid singleton type, since it is not an immutable path + | Inline parameters are not considered immutable paths and cannot be used as + | singleton types. + | + | Hint: Removing the `inline` qualifier from the `value` parameter + | may help resolve this issue. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/21538.scala b/tests/neg/21538.scala new file mode 100644 index 000000000000..66500277159e --- /dev/null +++ b/tests/neg/21538.scala @@ -0,0 +1,3 @@ +trait Bar[T] +given [T] => Bar[T]() +inline def foo[V](inline value: V)(using Bar[value.type]) : Unit = {} // error \ No newline at end of file diff --git a/tests/neg/abstract-givens.check b/tests/neg/abstract-givens.check index 022c454c31f1..1430c5b6e950 100644 --- a/tests/neg/abstract-givens.check +++ b/tests/neg/abstract-givens.check @@ -1,5 +1,5 @@ -- Error: tests/neg/abstract-givens.scala:11:8 ------------------------------------------------------------------------- -11 | given s[T](using T): Seq[T] with // error +11 | given s: [T] => T => Seq[T]: // error | ^ |instance cannot be created, since def iterator: Iterator[A] in trait IterableOnce in package scala.collection is not defined -- [E164] Declaration Error: tests/neg/abstract-givens.scala:8:8 ------------------------------------------------------- diff --git a/tests/neg/abstract-givens.scala b/tests/neg/abstract-givens.scala index 5aa5bdee88e3..dbd4a7a85927 100644 --- a/tests/neg/abstract-givens.scala +++ b/tests/neg/abstract-givens.scala @@ -8,7 +8,7 @@ object Test extends T: given y(using Int): String = summon[Int].toString * 22 // error given z[T](using T): Seq[T] = List(summon[T]) // error - given s[T](using T): Seq[T] with // error + given s: [T] => T => Seq[T]: // error def apply(x: Int) = ??? override def length = ??? diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala index 07155edb05dc..d9d8f3d6d19b 100644 --- a/tests/neg/cb-companion-leaks.scala +++ b/tests/neg/cb-companion-leaks.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity -source future -explain +//> using options -language:experimental.modularity -explain class C[Self] diff --git a/tests/neg/cc-ex-conformance.scala b/tests/neg/cc-ex-conformance.scala new file mode 100644 index 000000000000..a953466daa9a --- /dev/null +++ b/tests/neg/cc-ex-conformance.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +import caps.{Exists, Capability} + +class C + +type EX1 = () => (c: Exists) => (C^{c}, C^{c}) + +type EX2 = () => (c1: Exists) => (c2: Exists) => (C^{c1}, C^{c2}) + +type EX3 = () => (c: Exists) => (x: Object^) => C^{c} + +type EX4 = () => (x: Object^) => (c: Exists) => C^{c} + +def Test = + val ex1: EX1 = ??? + val ex2: EX2 = ??? + val _: EX1 = ex1 + val _: EX2 = ex1 // ok + val _: EX1 = ex2 // ok + + val ex3: EX3 = ??? + val ex4: EX4 = ??? + val _: EX4 = ex3 // ok + val _: EX4 = ex4 + val _: EX3 = ex4 // error diff --git a/tests/neg/cc-poly-1.check b/tests/neg/cc-poly-1.check new file mode 100644 index 000000000000..abb507078bf4 --- /dev/null +++ b/tests/neg/cc-poly-1.check @@ -0,0 +1,12 @@ +-- [E057] Type Mismatch Error: tests/neg/cc-poly-1.scala:12:6 ---------------------------------------------------------- +12 | f[Any](D()) // error + | ^ + | Type argument Any does not conform to upper bound caps.CapSet^ + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/cc-poly-1.scala:13:6 ---------------------------------------------------------- +13 | f[String](D()) // error + | ^ + | Type argument String does not conform to upper bound caps.CapSet^ + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/cc-poly-1.scala b/tests/neg/cc-poly-1.scala new file mode 100644 index 000000000000..580b124bc8f3 --- /dev/null +++ b/tests/neg/cc-poly-1.scala @@ -0,0 +1,13 @@ +import language.experimental.captureChecking +import caps.{CapSet, Capability} + +object Test: + + class C extends Capability + class D + + def f[X^](x: D^{X^}): D^{X^} = x + + def test(c1: C, c2: C) = + f[Any](D()) // error + f[String](D()) // error diff --git a/tests/neg/cc-poly-2.check b/tests/neg/cc-poly-2.check new file mode 100644 index 000000000000..0615ce19b5ea --- /dev/null +++ b/tests/neg/cc-poly-2.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:13:15 --------------------------------------------------------- +13 | f[Nothing](d) // error + | ^ + | Found: (d : Test.D^) + | Required: Test.D + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:14:19 --------------------------------------------------------- +14 | f[CapSet^{c1}](d) // error + | ^ + | Found: (d : Test.D^) + | Required: Test.D^{c1} + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/cc-poly-2.scala:16:20 --------------------------------------------------------- +16 | val _: D^{c1} = x // error + | ^ + | Found: (x : Test.D^{d}) + | Required: Test.D^{c1} + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/cc-poly-2.scala b/tests/neg/cc-poly-2.scala new file mode 100644 index 000000000000..c5e5df6540da --- /dev/null +++ b/tests/neg/cc-poly-2.scala @@ -0,0 +1,16 @@ +import language.experimental.captureChecking +import caps.{CapSet, Capability} + +object Test: + + class C extends Capability + class D + + def f[X^](x: D^{X^}): D^{X^} = x + + def test(c1: C, c2: C) = + val d: D^ = D() + f[Nothing](d) // error + f[CapSet^{c1}](d) // error + val x = f(d) + val _: D^{c1} = x // error diff --git a/tests/neg/context-bounds-migration-future.check b/tests/neg/context-bounds-migration-future.check index f56da5d6b28d..f517a1e335c9 100644 --- a/tests/neg/context-bounds-migration-future.check +++ b/tests/neg/context-bounds-migration-future.check @@ -4,3 +4,7 @@ | method foo does not take more parameters | | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/context-bounds-migration-future.scala:6:6 -------------------------------------------------------- +6 |given [T]: C[T] = C[T]() + | ^ + | This old given syntax is no longer supported; use `=>` instead of `:` diff --git a/tests/neg/ctx-bounds-priority-migration.scala b/tests/neg/ctx-bounds-priority-migration.scala new file mode 100644 index 000000000000..8fc819c1e089 --- /dev/null +++ b/tests/neg/ctx-bounds-priority-migration.scala @@ -0,0 +1,13 @@ +//> using options -source 3.5 +trait Eq[A] +trait Order[A] extends Eq[A]: + def toOrdering: Ordering[A] + +def f[Element: Eq: Order] = summon[Eq[Element]].toOrdering // ok + +def Test() = + val eq: Eq[Int] = ??? + val ord: Order[Int] = ??? + f(eq, ord) // error + f(using eq, ord) // ok + diff --git a/tests/neg/ctx-bounds-priority.scala b/tests/neg/ctx-bounds-priority.scala new file mode 100644 index 000000000000..023a3273d586 --- /dev/null +++ b/tests/neg/ctx-bounds-priority.scala @@ -0,0 +1,6 @@ +//> using options -source 3.7 +trait Eq[A] +trait Order[A] extends Eq[A]: + def toOrdering: Ordering[A] + +def Test[Element: Eq: Order] = summon[Eq[Element]].toOrdering // error diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala index 4e75ceb08728..9a95271a4f46 100644 --- a/tests/neg/deferred-givens-2.scala +++ b/tests/neg/deferred-givens-2.scala @@ -12,7 +12,7 @@ object Scoped: class SortedIntCorrect2 extends Sorted: type Element = Int - override given (Int is Ord)() as given_Ord_Element + override given given_Ord_Element: (Int is Ord)() class SortedIntWrong1 extends Sorted: // error type Element = Int diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala index 7ff67d784714..4de79120a1c7 100644 --- a/tests/neg/deferred-givens.scala +++ b/tests/neg/deferred-givens.scala @@ -1,11 +1,11 @@ -//> using options -language:experimental.modularity -source future + import compiletime.deferred class Ctx class Ctx2 trait A: - given Ctx as ctx = deferred + given ctx: Ctx = deferred given Ctx2 = deferred class B extends A // error @@ -13,7 +13,7 @@ class B extends A // error abstract class C extends A // error class D extends A: - given Ctx as ctx = Ctx() // ok, was implemented + given ctx: Ctx = Ctx() // ok, was implemented given Ctx2 = Ctx2() // ok class Ctx3[T] diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala index cddde82535fb..39a775cf78bf 100644 --- a/tests/neg/deferredSummon.scala +++ b/tests/neg/deferredSummon.scala @@ -1,4 +1,4 @@ -//> using options -language:experimental.modularity + object Test: given Int = compiletime.deferred // error diff --git a/tests/neg/empty-given.scala b/tests/neg/empty-given.scala index 10daf5ac009a..cf7566724cc2 100644 --- a/tests/neg/empty-given.scala +++ b/tests/neg/empty-given.scala @@ -1,3 +1,3 @@ -given { // error +given { def foo = 1 // error -} // error \ No newline at end of file +} \ No newline at end of file diff --git a/tests/neg/eql.scala b/tests/neg/eql.scala index 58378800bbc5..40ec1fb5d9ed 100644 --- a/tests/neg/eql.scala +++ b/tests/neg/eql.scala @@ -1,7 +1,7 @@ object lst: opaque type Lst[+T] = Any object Lst: - given lstCanEqual[T, U]: CanEqual[Lst[T], Lst[U]] = CanEqual.derived + given lstCanEqual: [T, U] => CanEqual[Lst[T], Lst[U]] = CanEqual.derived val Empty: Lst[Nothing] = ??? end lst diff --git a/tests/neg/existential-mapping.check b/tests/neg/existential-mapping.check new file mode 100644 index 000000000000..edfce67f6eef --- /dev/null +++ b/tests/neg/existential-mapping.check @@ -0,0 +1,88 @@ +-- Error: tests/neg/existential-mapping.scala:44:13 -------------------------------------------------------------------- +44 | val z1: A^ => Array[C^] = ??? // error + | ^^^^^^^^^^^^^^^ + | Array[box C^] captures the root capability `cap` in invariant position +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:9:25 ------------------------------------------------ +9 | val _: (x: C^) -> C = x1 // error + | ^^ + | Found: (x1 : (x: C^) -> (ex$3: caps.Exists) -> C^{ex$3}) + | Required: (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:12:20 ----------------------------------------------- +12 | val _: C^ -> C = x2 // error + | ^^ + | Found: (x2 : C^ -> (ex$7: caps.Exists) -> C^{ex$7}) + | Required: C^ -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:15:30 ----------------------------------------------- +15 | val _: A^ -> (x: C^) -> C = x3 // error + | ^^ + | Found: (x3 : A^ -> (x: C^) -> (ex$11: caps.Exists) -> C^{ex$11}) + | Required: A^ -> (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:18:25 ----------------------------------------------- +18 | val _: A^ -> C^ -> C = x4 // error + | ^^ + | Found: (x4 : A^ -> C^ -> (ex$19: caps.Exists) -> C^{ex$19}) + | Required: A^ -> C^ -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:21:30 ----------------------------------------------- +21 | val _: A^ -> (x: C^) -> C = x5 // error + | ^^ + | Found: (x5 : A^ -> (ex$27: caps.Exists) -> Fun[C^{ex$27}]) + | Required: A^ -> (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:24:30 ----------------------------------------------- +24 | val _: A^ -> (x: C^) => C = x6 // error + | ^^ + | Found: (x6 : A^ -> (ex$33: caps.Exists) -> IFun[C^{ex$33}]) + | Required: A^ -> (ex$36: caps.Exists) -> (x: C^) ->{ex$36} C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:27:25 ----------------------------------------------- +27 | val _: (x: C^) => C = y1 // error + | ^^ + | Found: (y1 : (x: C^) => (ex$38: caps.Exists) -> C^{ex$38}) + | Required: (x: C^) => C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:30:20 ----------------------------------------------- +30 | val _: C^ => C = y2 // error + | ^^ + | Found: (y2 : C^ => (ex$42: caps.Exists) -> C^{ex$42}) + | Required: C^ => C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:33:30 ----------------------------------------------- +33 | val _: A^ => (x: C^) => C = y3 // error + | ^^ + | Found: (y3 : A^ => (ex$47: caps.Exists) -> (x: C^) ->{ex$47} (ex$46: caps.Exists) -> C^{ex$46}) + | Required: A^ => (ex$50: caps.Exists) -> (x: C^) ->{ex$50} C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:36:25 ----------------------------------------------- +36 | val _: A^ => C^ => C = y4 // error + | ^^ + | Found: (y4 : A^ => (ex$53: caps.Exists) -> C^ ->{ex$53} (ex$52: caps.Exists) -> C^{ex$52}) + | Required: A^ => (ex$56: caps.Exists) -> C^ ->{ex$56} C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:39:30 ----------------------------------------------- +39 | val _: A^ => (x: C^) -> C = y5 // error + | ^^ + | Found: (y5 : A^ => (ex$58: caps.Exists) -> Fun[C^{ex$58}]) + | Required: A^ => (x: C^) -> C + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/existential-mapping.scala:42:30 ----------------------------------------------- +42 | val _: A^ => (x: C^) => C = y6 // error + | ^^ + | Found: (y6 : A^ => (ex$64: caps.Exists) -> IFun[C^{ex$64}]) + | Required: A^ => (ex$67: caps.Exists) -> (x: C^) ->{ex$67} C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/existential-mapping.scala b/tests/neg/existential-mapping.scala new file mode 100644 index 000000000000..290f7dc767a6 --- /dev/null +++ b/tests/neg/existential-mapping.scala @@ -0,0 +1,46 @@ +import language.experimental.captureChecking + +class A +class C +type Fun[X] = (x: C^) -> X +type IFun[X] = (x: C^) => X +def Test = + val x1: (x: C^) -> C^ = ??? + val _: (x: C^) -> C = x1 // error + + val x2: C^ -> C^ = ??? + val _: C^ -> C = x2 // error + + val x3: A^ -> (x: C^) -> C^ = ??? + val _: A^ -> (x: C^) -> C = x3 // error + + val x4: A^ -> C^ -> C^ = ??? + val _: A^ -> C^ -> C = x4 // error + + val x5: A^ -> Fun[C^] = ??? + val _: A^ -> (x: C^) -> C = x5 // error + + val x6: A^ -> IFun[C^] = ??? + val _: A^ -> (x: C^) => C = x6 // error + + val y1: (x: C^) => C^ = ??? + val _: (x: C^) => C = y1 // error + + val y2: C^ => C^ = ??? + val _: C^ => C = y2 // error + + val y3: A^ => (x: C^) => C^ = ??? + val _: A^ => (x: C^) => C = y3 // error + + val y4: A^ => C^ => C^ = ??? + val _: A^ => C^ => C = y4 // error + + val y5: A^ => Fun[C^] = ??? + val _: A^ => (x: C^) -> C = y5 // error + + val y6: A^ => IFun[C^] = ??? + val _: A^ => (x: C^) => C = y6 // error + + val z1: A^ => Array[C^] = ??? // error + + diff --git a/tests/neg/exports.scala b/tests/neg/exports.scala index c187582c940d..459a56e88c4f 100644 --- a/tests/neg/exports.scala +++ b/tests/neg/exports.scala @@ -5,7 +5,7 @@ type PrinterType def print(bits: BitMap): Unit = ??? def status: List[String] = ??? - given bitmap: BitMap with {} + given bitmap: BitMap() } class Scanner { diff --git a/tests/neg/extmethod-overload.scala b/tests/neg/extmethod-overload.scala index 8fa7c05222a1..ef927c0be6ce 100644 --- a/tests/neg/extmethod-overload.scala +++ b/tests/neg/extmethod-overload.scala @@ -1,16 +1,15 @@ -object Test { - given a: AnyRef with - extension (x: Int) { +object Test: + + given a: AnyRef: + extension (x: Int) def |+| (y: Int) = x + y - } - given b: AnyRef with - extension (x: Int) { + + given b: AnyRef: + extension (x: Int) def |+| (y: String) = x + y.length - } + assert((1 |+| 2) == 3) // error ambiguous - locally { + locally: import b.|+| assert((1 |+| "2") == 2) // OK - } -} \ No newline at end of file diff --git a/tests/neg/gadt-approximation-interaction.scala b/tests/neg/gadt-approximation-interaction.scala index 5f010e4b784d..a6bfe0d44007 100644 --- a/tests/neg/gadt-approximation-interaction.scala +++ b/tests/neg/gadt-approximation-interaction.scala @@ -28,7 +28,7 @@ object GivenLookup { class Tag[T] - given ti: Tag[Int] with {} + given ti: Tag[Int]() def foo[T](t: T, ev: T SUB Int) = ev match { case SUB.Refl() => diff --git a/tests/neg/genericNumbers.scala b/tests/neg/genericNumbers.scala index 0c5769f7ba12..7c08caeb4e9d 100644 --- a/tests/neg/genericNumbers.scala +++ b/tests/neg/genericNumbers.scala @@ -7,13 +7,11 @@ object Test extends App { case class Even(n: Int) - given FromDigits[Even] with { - def fromDigits(digits: String): Even = { + given FromDigits[Even]: + def fromDigits(digits: String): Even = val intValue = digits.toInt if (intValue % 2 == 0) Even(intValue) else throw FromDigits.MalformedNumber() - } - } val e: Even = 1234 // error diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check index cbe8b972a389..4d473a301340 100644 --- a/tests/neg/given-ambiguous-default-2.check +++ b/tests/neg/given-ambiguous-default-2.check @@ -1,9 +1,9 @@ -- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- 18 |def f: Unit = summon[C] // error: Ambiguous given instances | ^ - |No best given instance of type C was found for parameter x of method summon in object Predef. - |I found: + | No best given instance of type C was found for parameter x of method summon in object Predef. + | I found: | - | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A], this.given_C$default$2) + | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) | - |But both given instance a1 and given instance a2 match type A. + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-loop-prevention.check b/tests/neg/given-loop-prevention.check index 460adf03be49..cbaeec2474f4 100644 --- a/tests/neg/given-loop-prevention.check +++ b/tests/neg/given-loop-prevention.check @@ -1,14 +1,4 @@ --- Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------------------ +-- [E172] Type Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------ 10 | given List[Foo] = List(summon[Foo]) // error | ^ - | Result of implicit search for Foo will change. - | Current result Baz.given_Foo will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Baz.given_Foo comes earlier, - | - use an explicit argument. + | No given instance of type Foo was found for parameter x of method summon in object Predef diff --git a/tests/neg/given-loop-prevention.scala b/tests/neg/given-loop-prevention.scala index 9d404b8c6d8e..9ad2163a4bf8 100644 --- a/tests/neg/given-loop-prevention.scala +++ b/tests/neg/given-loop-prevention.scala @@ -2,11 +2,11 @@ class Foo object Bar { - given Foo with {} + given Foo() given List[Foo] = List(summon[Foo]) // ok } object Baz { given List[Foo] = List(summon[Foo]) // error - given Foo with {} + given Foo() } diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check index f366c18e78f0..8a05ed4b3129 100644 --- a/tests/neg/given-triangle.check +++ b/tests/neg/given-triangle.check @@ -7,6 +7,6 @@ | (given_B : B) |and | (given_A : A) - |will change. - |Current choice : the first alternative - |New choice from Scala 3.7: the second alternative + |will change in the future release. + |Current choice : the first alternative + |Choice from Scala 3.7 : the second alternative diff --git a/tests/neg/i10901.check b/tests/neg/i10901.check index 4a8fa5db28bf..325cdccc6aab 100644 --- a/tests/neg/i10901.check +++ b/tests/neg/i10901.check @@ -1,23 +1,23 @@ -- [E008] Not Found Error: tests/neg/i10901.scala:45:38 ---------------------------------------------------------------- 45 | val pos1: Point2D[Int,Double] = x º y // error | ^^^ - | value º is not a member of object BugExp4Point2D.IntT. - | An extension method was tried, but could not be fully constructed: - | - | º(x) - | - | failed with: - | - | Ambiguous overload. The overloaded alternatives of method º in object dsl with types - | [T1, T2] - | (x: BugExp4Point2D.ColumnType[T1]) - | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | [T1, T2] - | (x: T1) - | (y: BugExp4Point2D.ColumnType[T2]) - | (implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) + | value º is not a member of object BugExp4Point2D.IntT. + | An extension method was tried, but could not be fully constructed: + | + | º(x) + | + | failed with: + | + | Ambiguous overload. The overloaded alternatives of method º in object dsl with types + | [T1, T2] + | (x: BugExp4Point2D.ColumnType[T1]) + | (y: BugExp4Point2D.ColumnType[T2]) + | (using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | [T1, T2] + | (x: T1) + | (y: BugExp4Point2D.ColumnType[T2]) + | (using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | both match arguments ((x : BugExp4Point2D.IntT.type))((y : BugExp4Point2D.DoubleT.type)) -- [E008] Not Found Error: tests/neg/i10901.scala:48:38 ---------------------------------------------------------------- 48 | val pos4: Point2D[Int,Double] = x º 201.1 // error | ^^^ @@ -31,8 +31,8 @@ | Ambiguous overload. The overloaded alternatives of method º in object dsl with types | [T1, T2] | (x: BugExp4Point2D.ColumnType[T1]) - | (y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] - | [T1, T2](x: T1)(y: T2)(implicit evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | (y: T2)(using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] + | [T1, T2](x: T1)(y: T2)(using evidence$1: Numeric[T1], evidence$2: Numeric[T2]): BugExp4Point2D.Point2D[T1, T2] | both match arguments ((x : BugExp4Point2D.IntT.type))((201.1d : Double)) -- [E008] Not Found Error: tests/neg/i10901.scala:62:16 ---------------------------------------------------------------- 62 | val y = "abc".foo // error diff --git a/tests/neg/i10901.scala b/tests/neg/i10901.scala index dc1ea6e6eef6..996a0753c2e7 100644 --- a/tests/neg/i10901.scala +++ b/tests/neg/i10901.scala @@ -53,7 +53,7 @@ object BugExp4Point2D { class C object Container: - given C with {} + given C() object Test: extension (x: String)(using C) diff --git a/tests/neg/i11226.check b/tests/neg/i11226.check new file mode 100644 index 000000000000..571f54326808 --- /dev/null +++ b/tests/neg/i11226.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/i11226.scala:13:36 --------------------------------------------------------------------------------- +13 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error + | ^ + | Cannot resolve reference to type (Unsubscriber.this.bus : ManagedActorClassification).Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be called on a receiver whose type does not extend trait ManagedActorClassification. diff --git a/tests/neg/i11226.scala b/tests/neg/i11226.scala new file mode 100644 index 000000000000..34c6eb78fd2d --- /dev/null +++ b/tests/neg/i11226.scala @@ -0,0 +1,14 @@ +trait ActorRef + +trait ActorEventBus { + type Subscriber = ActorRef +} + +trait ManagedActorClassification { this: ActorEventBus => + def unsubscribe(subscriber: Subscriber, from: Any): Unit + def unsubscribe(subscriber: Subscriber): Unit +} + +class Unsubscriber(bus: ManagedActorClassification) { + def test(a: ActorRef): Unit = bus.unsubscribe(a) // error +} \ No newline at end of file diff --git a/tests/neg/i11226a.check b/tests/neg/i11226a.check new file mode 100644 index 000000000000..ecb0760dd01c --- /dev/null +++ b/tests/neg/i11226a.check @@ -0,0 +1,12 @@ +-- [E007] Type Mismatch Error: tests/neg/i11226a.scala:12:48 ----------------------------------------------------------- +12 | def test(a: ActorRef): Unit = bus.unsubscribe(a) // error + | ^ + | Found: (a : ActorRef) + | Required: Unsubscriber.this.bus.Subscriber + | + | Note that I could not resolve reference Unsubscriber.this.bus.Subscriber. + | Subscriber exists as a member of the self type ActorEventBus of trait ManagedActorClassification + | but it cannot be called on a receiver whose type does not extend trait ManagedActorClassification + | + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11226a.scala b/tests/neg/i11226a.scala new file mode 100644 index 000000000000..f30530c5a58e --- /dev/null +++ b/tests/neg/i11226a.scala @@ -0,0 +1,13 @@ +trait ActorRef + +trait ActorEventBus { + type Subscriber = ActorRef +} + +trait ManagedActorClassification { this: ActorEventBus => + def unsubscribe(subscriber: Subscriber): Unit +} + +class Unsubscriber(bus: ManagedActorClassification) { + def test(a: ActorRef): Unit = bus.unsubscribe(a) // error +} \ No newline at end of file diff --git a/tests/neg/i11985.scala b/tests/neg/i11985.scala index fee056594974..52313ab09c99 100644 --- a/tests/neg/i11985.scala +++ b/tests/neg/i11985.scala @@ -11,10 +11,8 @@ object Test { def get(t: TT): C } - given [T <: Tuple, C, EV <: TupleTypeIndex[T, C]]: TupleExtractor[T, C] with { + given [T <: Tuple, C, EV <: TupleTypeIndex[T, C]] => TupleExtractor[T, C]: def get(t: T): C = t.toArray.apply(toIntC[TupleTypeIndex[T, C]]).asInstanceOf[C] // error - } - transparent inline def toIntC[N <: Int]: Int = inline constValue[N] match diff --git a/tests/neg/i12049d.check b/tests/neg/i12049d.check new file mode 100644 index 000000000000..fdb13aae4e43 --- /dev/null +++ b/tests/neg/i12049d.check @@ -0,0 +1,14 @@ +-- [E007] Type Mismatch Error: tests/neg/i12049d.scala:14:52 ----------------------------------------------------------- +14 |val x: M[NotRelevant[Nothing], Relevant[Nothing]] = 2 // error + | ^ + | Found: (2 : Int) + | Required: M[NotRelevant[Nothing], Relevant[Nothing]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce M[NotRelevant[Nothing], Relevant[Nothing]] + | trying to reduce Relevant[Nothing] + | failed since selector Nothing + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i12049d.scala b/tests/neg/i12049d.scala new file mode 100644 index 000000000000..0011ec1f00b1 --- /dev/null +++ b/tests/neg/i12049d.scala @@ -0,0 +1,14 @@ + +trait A +trait B + +type M[X, Y] = Y match + case A => Int + case B => String + +type Relevant[Z] = Z match + case A => B +type NotRelevant[Z] = Z match + case B => A + +val x: M[NotRelevant[Nothing], Relevant[Nothing]] = 2 // error diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index 55806fa5ca1b..8d0a24a60308 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -1,4 +1,4 @@ --- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- +-- [E040] Syntax Error: tests/neg/i12348.scala:2:16 -------------------------------------------------------------------- 2 | given inline x: Int = 0 // error - | ^ - | 'with' expected, but identifier found + | ^ + | an identifier expected, but ':' found diff --git a/tests/neg/i13580.check b/tests/neg/i13580.check new file mode 100644 index 000000000000..8f91bcf9bde5 --- /dev/null +++ b/tests/neg/i13580.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i13580.scala:9:7 ----------------------------------------------------------------------------------- +9 |given (using tracked val w: IntWidth) => IntCandidate: // error + | ^^^^^ + | `using` is already implied here, should not be given explicitly diff --git a/tests/neg/i13580.scala b/tests/neg/i13580.scala new file mode 100644 index 000000000000..7388ee532526 --- /dev/null +++ b/tests/neg/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: // error + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] diff --git a/tests/neg/i14177a.scala b/tests/neg/i14177a.scala index 237eaacb3b66..91a63bdd6345 100644 --- a/tests/neg/i14177a.scala +++ b/tests/neg/i14177a.scala @@ -2,5 +2,5 @@ import scala.compiletime.* trait C[A] -inline given [Tup <: Tuple]: C[Tup] with +inline given [Tup <: Tuple] => C[Tup]: val cs = summonAll[Tuple.Map[Tup, C]] // error: Tuple element types must be known at compile time diff --git a/tests/neg/i15177.FakeEnum.min.alt1.scala b/tests/neg/i15177.FakeEnum.min.alt1.scala new file mode 100644 index 000000000000..a3e4f7819244 --- /dev/null +++ b/tests/neg/i15177.FakeEnum.min.alt1.scala @@ -0,0 +1,7 @@ +// Like tests/neg/i15177.FakeEnum.min.scala +// But with an actual upper-bound requirement +// Which shouldn't be ignored as a part of overcoming the the cycle +trait Foo +trait X[T <: Foo] { trait Id } +object A extends X[B] // error: Type argument B does not conform to upper bound Foo +class B extends A.Id diff --git a/tests/neg/i15177.constr-dep.scala b/tests/neg/i15177.constr-dep.scala new file mode 100644 index 000000000000..14afb7bb9057 --- /dev/null +++ b/tests/neg/i15177.constr-dep.scala @@ -0,0 +1,9 @@ +// An example of how constructor _type_ parameters +// Which can _not_ be passed to the extends part +// That makes it part of the parent type, +// which has been found to be unsound. +class Foo[A] +class Foo1(val x: Int) + extends Foo[ // error: The type of a class parent cannot refer to constructor parameters, but Foo[(Foo1.this.x : Int)] refers to x + x.type + ] diff --git a/tests/neg/i15177.ub.scala b/tests/neg/i15177.ub.scala new file mode 100644 index 000000000000..d504528572ed --- /dev/null +++ b/tests/neg/i15177.ub.scala @@ -0,0 +1,13 @@ +// like tests/pos/i15177.scala +// but with T having an upper bound +// that B doesn't conform to +// just to be sure that not forcing B +// doesn't backdoor an illegal X[B] +class X[T <: C] { + type Id +} +object A + extends X[ // error + B] // error +class B(id: A.Id) +class C diff --git a/tests/neg/i15474.check b/tests/neg/i15474.check deleted file mode 100644 index 9fa8fa6c722a..000000000000 --- a/tests/neg/i15474.check +++ /dev/null @@ -1,29 +0,0 @@ --- Error: tests/neg/i15474.scala:6:39 ---------------------------------------------------------------------------------- -6 | given c: Conversion[ String, Int ] = _.toInt // error - | ^ - | Result of implicit search for ?{ toInt: ? } will change. - | Current result Test2.c will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: augmentString. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Test2.c comes earlier, - | - use an explicit conversion, - | - use an import to get extension method into scope. --- Error: tests/neg/i15474.scala:12:56 --------------------------------------------------------------------------------- -12 | given Ordering[Price] = summon[Ordering[BigDecimal]] // error - | ^ - | Result of implicit search for Ordering[BigDecimal] will change. - | Current result Prices.Price.given_Ordering_Price will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: scala.math.Ordering.BigDecimal. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Prices.Price.given_Ordering_Price comes earlier, - | - use an explicit argument. diff --git a/tests/neg/i15474.scala b/tests/neg/i15474.scala deleted file mode 100644 index b196d1b400ef..000000000000 --- a/tests/neg/i15474.scala +++ /dev/null @@ -1,16 +0,0 @@ -//> using options -Xfatal-warnings - -import scala.language.implicitConversions - -object Test2: - given c: Conversion[ String, Int ] = _.toInt // error - -object Prices { - opaque type Price = BigDecimal - - object Price{ - given Ordering[Price] = summon[Ordering[BigDecimal]] // error - } -} - - diff --git a/tests/neg/i15474b.scala b/tests/neg/i15474b.scala index 0c04b9880e1c..0a99056f8a6e 100644 --- a/tests/neg/i15474b.scala +++ b/tests/neg/i15474b.scala @@ -3,6 +3,6 @@ import scala.language.implicitConversions object Test1: - given c: Conversion[ String, Int ] with + given c: Conversion[ String, Int ]: def apply(from: String): Int = from.toInt // warn: infinite loop in function body // nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i15855.scala b/tests/neg/i15855.scala new file mode 100644 index 000000000000..c1d316ccae81 --- /dev/null +++ b/tests/neg/i15855.scala @@ -0,0 +1,15 @@ +class MyFunction(args: String) + +trait MyFunction0[+R] extends MyFunction { + def apply(): R +} + +def fromFunction0[R](f: Function0[R]): MyFunction0[R] = () => f() // error + +class MyFunctionWithImplicit(implicit args: String) + +trait MyFunction0WithImplicit[+R] extends MyFunctionWithImplicit { + def apply(): R +} + +def fromFunction1[R](f: Function0[R]): MyFunction0WithImplicit[R] = () => f() // error diff --git a/tests/neg/i15987.check b/tests/neg/i15987.check new file mode 100644 index 000000000000..b62c8cac160b --- /dev/null +++ b/tests/neg/i15987.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i15987.scala:26:40 --------------------------------------------------------------------------------- +26 |case class Person(name: String) derives ShowWithExplicit, // error + | ^ + | derived instance ShowWithExplicit[Person] failed to generate: + | method `derived` from object ShowWithExplicit takes explicit term parameters diff --git a/tests/neg/i15987.scala b/tests/neg/i15987.scala new file mode 100644 index 000000000000..743f5f7ccda0 --- /dev/null +++ b/tests/neg/i15987.scala @@ -0,0 +1,30 @@ +trait ShowWithExplicit[A] + +object ShowWithExplicit: + def derived[A, B](explicit: String)(using DummyImplicit)(implicit dummy: DummyImplicit): ShowWithExplicit[A] = ??? + +trait ShowUsingAndImplicit[A] + +object ShowUsingAndImplicit: + def derived[A, B](using DummyImplicit)(implicit dummy: DummyImplicit): ShowUsingAndImplicit[A] = ??? + +trait ShowUsing[A] + +object ShowUsing: + def derived[A](using DummyImplicit): ShowUsing[A] = ??? + +trait ShowImplicit[A] + +object ShowImplicit: + def derived[A](implicit ev: DummyImplicit): ShowImplicit[A] = ??? + +trait ShowContra[-A] + +object ShowContra: + val derived: ShowContra[Any] = ??? + +case class Person(name: String) derives ShowWithExplicit, // error + ShowUsingAndImplicit, + ShowUsing, + ShowImplicit, + ShowContra diff --git a/tests/neg/i16407.check b/tests/neg/i16407.check index 5c6bd19ca8c1..481d70e83ce3 100644 --- a/tests/neg/i16407.check +++ b/tests/neg/i16407.check @@ -1,12 +1,12 @@ -- Error: tests/neg/i16407.scala:2:2 ----------------------------------------------------------------------------------- 2 | f(g()) // error // error | ^ - | cannot resolve reference to type (X.this : Y & X).A - | the classfile defining the type might be missing from the classpath - | or the self type of (X.this : Y & X) might not contain all transitive dependencies + | Cannot resolve reference to type (X.this : Y & X).A. + | The classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies. -- Error: tests/neg/i16407.scala:2:4 ----------------------------------------------------------------------------------- 2 | f(g()) // error // error | ^ - | cannot resolve reference to type (X.this : Y & X).A - | the classfile defining the type might be missing from the classpath - | or the self type of (X.this : Y & X) might not contain all transitive dependencies + | Cannot resolve reference to type (X.this : Y & X).A. + | The classfile defining the type might be missing from the classpath + | or the self type of (X.this : Y & X) might not contain all transitive dependencies. diff --git a/tests/neg/i16453.scala b/tests/neg/i16453.scala index 00495c39e21a..b5767ac35417 100644 --- a/tests/neg/i16453.scala +++ b/tests/neg/i16453.scala @@ -12,7 +12,7 @@ def testScala3() = { given Conversion[Char, String] = ??? given Conversion[Char, Option[Int]] = ??? - given foo: Foo with + given foo: Foo: type T = Int given bar3: Int = 0 given baz3: Char = 'a' diff --git a/tests/neg/i16815.check b/tests/neg/i16815.check new file mode 100644 index 000000000000..8f2f5c57d405 --- /dev/null +++ b/tests/neg/i16815.check @@ -0,0 +1,28 @@ +-- Error: tests/neg/i16815.scala:3:37 ---------------------------------------------------------------------------------- +3 |extension [C1 >: Chain <: Chain](c2: c1.Tail) // error + | ^^ + | right-associative extension method cannot have a forward reference to c1 +-- Error: tests/neg/i16815.scala:6:24 ---------------------------------------------------------------------------------- +6 |extension [C1](c2: (C1, C2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:9:19 ---------------------------------------------------------------------------------- +9 |extension [C1](c2: C2) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:12:24 --------------------------------------------------------------------------------- +12 |extension [C1](c2: (C1, C2, C3)) // error // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:12:28 --------------------------------------------------------------------------------- +12 |extension [C1](c2: (C1, C2, C3)) // error // error + | ^^ + | right-associative extension method cannot have a forward reference to C3 +-- Error: tests/neg/i16815.scala:15:48 --------------------------------------------------------------------------------- +15 |extension [C1](str: String)(using z: (str.type, C2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to C2 +-- Error: tests/neg/i16815.scala:19:31 --------------------------------------------------------------------------------- +19 |extension [D1 <: Int](D2: (D1, D2)) // error + | ^^ + | right-associative extension method cannot have a forward reference to D2 diff --git a/tests/neg/i16815.scala b/tests/neg/i16815.scala new file mode 100644 index 000000000000..595f75e40df4 --- /dev/null +++ b/tests/neg/i16815.scala @@ -0,0 +1,20 @@ +trait Chain { type Tail <: Chain } + +extension [C1 >: Chain <: Chain](c2: c1.Tail) // error + def ra1_:[C2 <: C1](c1: C1): C2 = ??? + +extension [C1](c2: (C1, C2)) // error + def ra2_:[C2 <: C1](c1: (C1, C2)): C2 = ??? + +extension [C1](c2: C2) // error + def ra3_:[C2 <: C1](c1: C1): C2 = ??? + +extension [C1](c2: (C1, C2, C3)) // error // error + def ra4_:[C2 <: C1, C3 <: C1](c1: (C1, C2)): C2 = ??? + +extension [C1](str: String)(using z: (str.type, C2)) // error + def ra5_:[C2 <: Int](c1: C1): C2 = ??? + +type D2 = String +extension [D1 <: Int](D2: (D1, D2)) // error + def sa2_:[D2 <: D1](D1: (D1, D2)): D2 = ??? diff --git a/tests/neg/i16842.check b/tests/neg/i16842.check index 936b08f95dbb..8cad4bc7656f 100644 --- a/tests/neg/i16842.check +++ b/tests/neg/i16842.check @@ -1,4 +1,16 @@ --- Error: tests/neg/i16842.scala:24:7 ---------------------------------------------------------------------------------- -24 | Liter(SemanticArray[SemanticInt.type], x) // error - | ^ - | invalid new prefix (dim: Int): SemanticArray[SemanticInt.type] cannot replace ty.type in type ty.T +-- [E007] Type Mismatch Error: tests/neg/i16842.scala:24:8 ------------------------------------------------------------- +24 | Liter(SemanticArray[SemanticInt.type], x) // error // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Int => SemanticArray[SemanticInt.type] + | Required: SemanticArray[SemanticType] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i16842.scala:24:41 ------------------------------------------------------------ +24 | Liter(SemanticArray[SemanticInt.type], x) // error // error + | ^ + | Found: (x : List[Expr2[SemanticInt.type]]) + | Required: ty.T + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than ty.T + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16842.scala b/tests/neg/i16842.scala index e9935b46c01d..1e7e5cc14339 100644 --- a/tests/neg/i16842.scala +++ b/tests/neg/i16842.scala @@ -21,5 +21,5 @@ def typecheckArrayLiter( a: ArrayLiter ): Liter[SemanticArray[SemanticType]] = { val x: List[Expr2[SemanticInt.type]] = List() - Liter(SemanticArray[SemanticInt.type], x) // error + Liter(SemanticArray[SemanticInt.type], x) // error // error } diff --git a/tests/neg/i16872.check b/tests/neg/i16872.check new file mode 100644 index 000000000000..2e0f9cf81eda --- /dev/null +++ b/tests/neg/i16872.check @@ -0,0 +1,36 @@ +-- [E006] Not Found Error: tests/neg/i16872.scala:8:6 ------------------------------------------------------------------ +8 | aa, // error + | ^^ + | Not found: aa + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:9:6 ------------------------------------------------------------------ +9 | bb, // error + | ^^ + | Not found: bb + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:10:6 ----------------------------------------------------------------- +10 | cc, // error + | ^^ + | Not found: cc + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:16:6 ----------------------------------------------------------------- +16 | dd, // error + | ^^ + | Not found: dd + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:17:6 ----------------------------------------------------------------- +17 | ee, // error + | ^^ + | Not found: ee - did you mean eq? or perhaps ne? + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/i16872.scala:18:6 ----------------------------------------------------------------- +18 | ff, // error + | ^^ + | Not found: ff + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i16872.scala b/tests/neg/i16872.scala new file mode 100644 index 000000000000..931ea57e1bec --- /dev/null +++ b/tests/neg/i16872.scala @@ -0,0 +1,19 @@ +// Using a checkfile to verify where the carets point to. +// Originally they were pointing to "cc," and "ff," +// including the trailing comma + +class Test: + def t1 = + ( + aa, // error + bb, // error + cc, // error + ) + + def meth(a: Int, b: Int, c: Int) = a + b + c + def t2 = + meth( + dd, // error + ee, // error + ff, // error + ) diff --git a/tests/neg/i17121.check b/tests/neg/i17121.check index 59895dd2474a..4a7dd332d8dc 100644 --- a/tests/neg/i17121.check +++ b/tests/neg/i17121.check @@ -3,22 +3,26 @@ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[List[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:15:17 --------------------------------------------------------------------- 15 | type G2[X] = X match { case Consumer[Consumer[t]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[Consumer[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:17:17 --------------------------------------------------------------------- 17 | type G3[X] = X match { case Consumer[Consumer[Consumer[t]]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[Consumer[Consumer[t]]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) -- [E191] Type Error: tests/neg/i17121.scala:19:17 --------------------------------------------------------------------- 19 | type G4[X] = X match { case Consumer[List[Consumer[t]]] => t } // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | The match type contains an illegal case: | case Consumer[List[Consumer[t]]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) diff --git a/tests/neg/i18123.check b/tests/neg/i18123.check new file mode 100644 index 000000000000..d784c4d12673 --- /dev/null +++ b/tests/neg/i18123.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/i18123.scala:25:33 --------------------------------------------------------------------- +25 | (charClassIntersection.rep() | classItem.rep()) // error + | ^^^^^^^^^^^^^^^ + |No given instance of type pkg.Implicits.Repeater[pkg.RegexTree, V] was found. + |I found: + | + | pkg.Implicits.Repeater.GenericRepeaterImplicit[T] + | + |But method GenericRepeaterImplicit in object Repeater does not match type pkg.Implicits.Repeater[pkg.RegexTree, V] + | + |where: V is a type variable with constraint <: Seq[pkg.CharClassIntersection] + |. diff --git a/tests/neg/i18123.scala b/tests/neg/i18123.scala new file mode 100644 index 000000000000..bb220dc78e93 --- /dev/null +++ b/tests/neg/i18123.scala @@ -0,0 +1,25 @@ +// may not compile anymore in Scala 3.4+ +package pkg + +trait P[+T] + +extension [T](inline parse0: P[T]) + inline def | [V >: T](inline other: P[V]): P[V] = ??? + +extension [T](inline parse0: => P[T]) + inline def rep[V](inline min: Int = 0)(using repeater: Implicits.Repeater[T, V]): P[V] = ??? + +object Implicits: + trait Repeater[-T, R] + object Repeater: + implicit def GenericRepeaterImplicit[T]: Repeater[T, Seq[T]] = ??? + +sealed trait RegexTree +abstract class Node extends RegexTree +class CharClassIntersection() extends Node + +def classItem: P[RegexTree] = ??? +def charClassIntersection: P[CharClassIntersection] = ??? + +def x = + (charClassIntersection.rep() | classItem.rep()) // error diff --git a/tests/neg/i19248/Foo.scala b/tests/neg/i19248/Foo.scala new file mode 100644 index 000000000000..f24651234eb9 --- /dev/null +++ b/tests/neg/i19248/Foo.scala @@ -0,0 +1,7 @@ +trait Foo { // error + class Bar + + type T = Foo.this.Bar + + inline def f: Int = ??? +} diff --git a/tests/neg/i19248/Main.scala b/tests/neg/i19248/Main.scala new file mode 100644 index 000000000000..bf4e3a48b279 --- /dev/null +++ b/tests/neg/i19248/Main.scala @@ -0,0 +1,3 @@ +@main +def Main(args: String*): Unit = + () diff --git a/tests/neg/i19248/Scope.scala b/tests/neg/i19248/Scope.scala new file mode 100644 index 000000000000..a3135d93084f --- /dev/null +++ b/tests/neg/i19248/Scope.scala @@ -0,0 +1,4 @@ +object Scope { +} +object Foo { +} diff --git a/tests/neg/i19248/empty.scala b/tests/neg/i19248/empty.scala new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/tests/neg/i19248/empty.scala @@ -0,0 +1 @@ + diff --git a/tests/neg/i19328conversion.scala b/tests/neg/i19328conversion.scala index 46dd1058b579..458bd5abb5b1 100644 --- a/tests/neg/i19328conversion.scala +++ b/tests/neg/i19328conversion.scala @@ -5,7 +5,7 @@ object i19328conversion: type Id[A] = A - given wrapId[A]: Conversion[A, Id[A]] with + given wrapId: [A] => Conversion[A, Id[A]]: def apply(x: A): Id[A] = x def bar(using bool: Boolean): Unit = () diff --git a/tests/neg/i20105.check b/tests/neg/i20105.check new file mode 100644 index 000000000000..5fb33283387b --- /dev/null +++ b/tests/neg/i20105.check @@ -0,0 +1,10 @@ +-- [E199] Syntax Warning: tests/neg/i20105.scala:6:9 ------------------------------------------------------------------- +6 | foo() + | ^^^^^ + | The tail recursive def foo contains a recursive call inside the non-inlined inner def bar + | + | longer explanation available when compiling with `-explain` +-- [E097] Syntax Error: tests/neg/i20105.scala:3:4 --------------------------------------------------------------------- +3 |def foo(): Unit = // error + | ^ + | TailRec optimisation not applicable, method foo contains no recursive calls diff --git a/tests/neg/i20105.scala b/tests/neg/i20105.scala new file mode 100644 index 000000000000..08d54e895ec1 --- /dev/null +++ b/tests/neg/i20105.scala @@ -0,0 +1,9 @@ +import scala.annotation.tailrec +@tailrec +def foo(): Unit = // error + def bar(): Unit = + if (???) + foo() + else + bar() + bar() \ No newline at end of file diff --git a/tests/neg/i20317a.scala b/tests/neg/i20317a.scala new file mode 100644 index 000000000000..d7b8b66eb80e --- /dev/null +++ b/tests/neg/i20317a.scala @@ -0,0 +1,5 @@ +type SemigroupStructural[A] = + A & { def combine(a: A): A } +def combineAll[A <: SemigroupStructural[A]]( + i: A, l: List[A] +): A = l.foldLeft(i)(_.combine(_)) // error diff --git a/tests/neg/i20338a.check b/tests/neg/i20338a.check new file mode 100644 index 000000000000..a329492bd990 --- /dev/null +++ b/tests/neg/i20338a.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20338a.scala:10:15 ----------------------------------------------------------- +10 | test.field = "hello" // error + | ^^^^^^^ + | Found: ("hello" : String) + | Required: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338a.scala b/tests/neg/i20338a.scala new file mode 100644 index 000000000000..b91982297d78 --- /dev/null +++ b/tests/neg/i20338a.scala @@ -0,0 +1,10 @@ +object types: + opaque type Struct = Int + val test: Struct = 25 + extension (s: Struct) + def field: Int = s + def field_=(other: Int) = () + +@main def hello = + import types.* + test.field = "hello" // error \ No newline at end of file diff --git a/tests/neg/i20338b.check b/tests/neg/i20338b.check new file mode 100644 index 000000000000..382d68a0911c --- /dev/null +++ b/tests/neg/i20338b.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20338b.scala:10:8 ------------------------------------------------------------ +10 | f.x = 42 // error + | ^^ + | Found: (42 : Int) + | Required: String + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338b.scala b/tests/neg/i20338b.scala new file mode 100644 index 000000000000..b8a3463862e0 --- /dev/null +++ b/tests/neg/i20338b.scala @@ -0,0 +1,10 @@ +class Foo(_x: Int) + +extension (s: Foo) + def x_=(x: String): Unit = () + def x: Int = ??? + +@main +def Test = + val f = Foo(42) + f.x = 42 // error diff --git a/tests/neg/i20338c.check b/tests/neg/i20338c.check new file mode 100644 index 000000000000..1d19ec0b3042 --- /dev/null +++ b/tests/neg/i20338c.check @@ -0,0 +1,6 @@ +-- [E052] Type Error: tests/neg/i20338c.scala:9:6 ---------------------------------------------------------------------- +9 | f.x = 42 // error + | ^^^^^^^^ + | Reassignment to val x + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20338c.scala b/tests/neg/i20338c.scala new file mode 100644 index 000000000000..cfdf38e73b11 --- /dev/null +++ b/tests/neg/i20338c.scala @@ -0,0 +1,9 @@ +class Foo(val x: Int) + +extension (s: Foo) + def x: Int = 43 + +@main +def Test = + val f = Foo(42) + f.x = 42 // error \ No newline at end of file diff --git a/tests/neg/i20474.scala b/tests/neg/i20474.scala new file mode 100644 index 000000000000..4623ec11dbf3 --- /dev/null +++ b/tests/neg/i20474.scala @@ -0,0 +1,13 @@ +class A +class B extends A + +def f(a: A, c: A) = + val b1: a.type = a + val b2: a.type & B = a.asInstanceOf[a.type & B] + val b3: c.type & A = c + val b4: a.type | c.type = c + + val d1: b1.type = a + val d2: b2.type = a // ok + val d3: b3.type = a // error + val d4: b4.type = a // error \ No newline at end of file diff --git a/tests/neg/i20503.scala b/tests/neg/i20503.scala new file mode 100644 index 000000000000..3fb0573f6c2f --- /dev/null +++ b/tests/neg/i20503.scala @@ -0,0 +1,18 @@ +import language.experimental.captureChecking +import caps.unbox + +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A => B): List[B] = ??? + def foreach[U](f: A => U): Unit = ??? + def nonEmpty: Boolean = ??? + +def runOps(@unbox ops: List[() => Unit]): Unit = + // See i20156, due to limitation in expressiveness of current system, + // we could map over the list of impure elements. OK with existentials. + ops.foreach(op => op()) + +def main(): Unit = + val f: List[() => Unit] -> Unit = (ops: List[() => Unit]) => runOps(ops) // error + val _: List[() => Unit] -> Unit = runOps // error diff --git a/tests/neg/i20511-1.check b/tests/neg/i20511-1.check new file mode 100644 index 000000000000..3f64940bb4fe --- /dev/null +++ b/tests/neg/i20511-1.check @@ -0,0 +1,32 @@ +-- [E083] Type Error: tests/neg/i20511-1.scala:7:7 --------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^^^ + | Int => Double is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- [E083] Type Error: tests/neg/i20511-1.scala:7:27 -------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^ + | Any is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i20511-1.scala:7:38 -------------------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^ + | no eligible member apply at { + | def $anonfun(crustType: Double): Double = pakiet.crustPrice(crustType) + | closure(pakiet.$anonfun:Any) + | } +-- [E083] Type Error: tests/neg/i20511-1.scala:7:45 -------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^^^^^ + | Any is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i20511-1.scala:7:56 -------------------------------------------------------------------------------- +7 |export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error + | ^^^^^^ + | no eligible member unlift at { + | def $anonfun(crustType: Double): Double = pakiet.crustPrice(crustType) + | closure(pakiet.$anonfun:Any) + | } diff --git a/tests/neg/i20511-1.scala b/tests/neg/i20511-1.scala new file mode 100644 index 000000000000..882520b55c07 --- /dev/null +++ b/tests/neg/i20511-1.scala @@ -0,0 +1,7 @@ +package pakiet + +def toppingPrice(size: Int): Double = ??? + +def crustPrice(crustType: Double): Double = ??? + +export toppingPrice.apply, crustPrice.apply, crustPrice.unlift // error // error // error // error // error diff --git a/tests/neg/i20511.check b/tests/neg/i20511.check new file mode 100644 index 000000000000..fefff9f42a6f --- /dev/null +++ b/tests/neg/i20511.check @@ -0,0 +1,14 @@ +-- [E040] Syntax Error: tests/neg/i20511.scala:7:19 -------------------------------------------------------------------- +7 |export toppingPrice, crustPrice // error // error + | ^ + | '.' expected, but ',' found +-- [E040] Syntax Error: tests/neg/i20511.scala:8:0 --------------------------------------------------------------------- +8 |val i = 1 // error + |^^^ + |'.' expected, but 'end of statement' found +-- [E083] Type Error: tests/neg/i20511.scala:7:21 ---------------------------------------------------------------------- +7 |export toppingPrice, crustPrice // error // error + | ^^^^^^^^^^ + | Any is not a valid export prefix, since it is not an immutable path + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20511.scala b/tests/neg/i20511.scala new file mode 100644 index 000000000000..657609536bf0 --- /dev/null +++ b/tests/neg/i20511.scala @@ -0,0 +1,8 @@ +package pakiet + +def toppingPrice(size: Int): Double = ??? + +def crustPrice(crustType: Double): Double = ??? + +export toppingPrice, crustPrice // error // error +val i = 1 // error diff --git a/tests/neg/i20517.check b/tests/neg/i20517.check new file mode 100644 index 000000000000..55aeff46572b --- /dev/null +++ b/tests/neg/i20517.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i20517.scala:10:43 ------------------------------------------------------------ +10 | def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error + | ^^^^^^^^^^^ + | Found: (elem : String) + | Required: NamedTuple.From[(foo : Foo[Any])] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20517.scala b/tests/neg/i20517.scala new file mode 100644 index 000000000000..11c4432434dd --- /dev/null +++ b/tests/neg/i20517.scala @@ -0,0 +1,17 @@ +import scala.language.experimental.namedTuples +import NamedTuple.From + +case class Foo[+T](elem: T) + +trait Base[M[_]]: + def dep(foo: Foo[Any]): M[foo.type] + +class SubAny extends Base[From]: + def dep(foo: Foo[Any]): From[foo.type] = (elem = "") // error + +object Test: + @main def run = + val f: Foo[Int] = Foo(elem = 1) + val b: Base[From] = SubAny() + val nt: (elem: Int) = b.dep(f) + val x: Int = nt.elem // was ClassCastException \ No newline at end of file diff --git a/tests/neg/i20533.check b/tests/neg/i20533.check new file mode 100644 index 000000000000..45dfbd7f4b92 --- /dev/null +++ b/tests/neg/i20533.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i20533.scala:5:8 ----------------------------------------------------------------------------------- +5 | [X] => (x, y) => Map(x -> y) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Provided polymorphic function value doesn't match the expected type [X, Y] => (x$1: X, x$2: Y) => Map[X, Y]. + | Expected type should be a polymorphic function with the same number of type and value parameters. diff --git a/tests/neg/i20533.scala b/tests/neg/i20533.scala new file mode 100644 index 000000000000..20059bd795c6 --- /dev/null +++ b/tests/neg/i20533.scala @@ -0,0 +1,6 @@ +def mapF(h: [X, Y] => (X, Y) => Map[X, Y]): Unit = ??? + +def test = + mapF( + [X] => (x, y) => Map(x -> y) // error + ) diff --git a/tests/neg/i20546.scala b/tests/neg/i20546.scala new file mode 100644 index 000000000000..63bd3706d12e --- /dev/null +++ b/tests/neg/i20546.scala @@ -0,0 +1,22 @@ +import NamedTuple.{NamedTuple, AnyNamedTuple} + +type And[X <: Boolean, Y <: Boolean] <: Boolean = (X, Y) match + case (true, true) => true + case _ => false +type AndLambda = [X <: Boolean, Y <: Boolean] =>> And[X, Y] + +trait Expr2[Result, Scalar <: Boolean]: + type StripScalar[E] = E match + case Expr2[_, s] => s + + type AllScalar[A <: AnyNamedTuple] = Tuple.Fold[Tuple.Map[NamedTuple.DropNames[A], StripScalar], true, AndLambda] // error: cyclic + + +object Minimization: + type And[X <: Boolean, Y <: Boolean] = (X, Y) match + case (true, true) => true + case _ => false + + type AndLambda = [X <: Boolean, Y <: Boolean] =>> And[X, Y] + + type All[A <: Tuple] = Tuple.Fold[A, true, AndLambda] // error: cyclic diff --git a/tests/neg/i20554-a.check b/tests/neg/i20554-a.check new file mode 100644 index 000000000000..ac0890ba133a --- /dev/null +++ b/tests/neg/i20554-a.check @@ -0,0 +1,44 @@ +-- [E201] Syntax Error: tests/neg/i20554-a/Test.scala:3:12 ------------------------------------------------------------- +3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments + | ^ + | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- +-- [E201] Syntax Error: tests/neg/i20554-a/Test.scala:3:15 ------------------------------------------------------------- +3 |@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments + | ^ + | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20554-a/Annotation.java b/tests/neg/i20554-a/Annotation.java new file mode 100644 index 000000000000..728bbded7a06 --- /dev/null +++ b/tests/neg/i20554-a/Annotation.java @@ -0,0 +1,4 @@ +public @interface Annotation { + int a() default 41; + int b() default 42; +} diff --git a/tests/neg/i20554-a/Test.scala b/tests/neg/i20554-a/Test.scala new file mode 100644 index 000000000000..f0b3ea40b87a --- /dev/null +++ b/tests/neg/i20554-a/Test.scala @@ -0,0 +1,4 @@ +//> using options -explain + +@Annotation(3, 4) // error // error : Java defined annotation should be called with named arguments +class Test \ No newline at end of file diff --git a/tests/neg/i20554-b.check b/tests/neg/i20554-b.check new file mode 100644 index 000000000000..637b48ee93ef --- /dev/null +++ b/tests/neg/i20554-b.check @@ -0,0 +1,22 @@ +-- [E201] Syntax Error: tests/neg/i20554-b/Test.scala:3:18 ------------------------------------------------------------- +3 |@SimpleAnnotation(1) // error: the parameters is not named 'value' + | ^ + | Named arguments are required for Java defined annotations + | This can be rewritten automatically under -rewrite -source 3.6-migration. + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Starting from Scala 3.6.0, named arguments are required for Java defined annotations. + | Java defined annotations don't have an exact constructor representation + | and we previously relied on the order of the fields to create one. + | One possible issue with this representation is the reordering of the fields. + | Lets take the following example: + | + | public @interface Annotation { + | int a() default 41; + | int b() default 42; + | } + | + | Reordering the fields is binary-compatible but it might affect the meaning of @Annotation(1) + | + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i20554-b/SimpleAnnotation.java b/tests/neg/i20554-b/SimpleAnnotation.java new file mode 100644 index 000000000000..65b37a7508d2 --- /dev/null +++ b/tests/neg/i20554-b/SimpleAnnotation.java @@ -0,0 +1,4 @@ + +public @interface SimpleAnnotation { + int a() default 1; +} diff --git a/tests/neg/i20554-b/Test.scala b/tests/neg/i20554-b/Test.scala new file mode 100644 index 000000000000..c6586409aa62 --- /dev/null +++ b/tests/neg/i20554-b/Test.scala @@ -0,0 +1,4 @@ +//> using options -explain + +@SimpleAnnotation(1) // error: the parameters is not named 'value' +class Test \ No newline at end of file diff --git a/tests/neg/i20946/Macro_1.scala b/tests/neg/i20946/Macro_1.scala new file mode 100644 index 000000000000..f598f5d278ce --- /dev/null +++ b/tests/neg/i20946/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + try + Ref(TypeRepr.of[T].typeSymbol) + catch + case ex: Throwable => + if ex.getMessage().contains("expected a term symbol, but received ") then + throw ex + + '{()} +} diff --git a/tests/neg/i20946/Test_2.scala b/tests/neg/i20946/Test_2.scala new file mode 100644 index 000000000000..80ae0a95fa4b --- /dev/null +++ b/tests/neg/i20946/Test_2.scala @@ -0,0 +1,5 @@ +inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + +@main +def run = + macroWithAssertFailing[Int](123) // error diff --git a/tests/neg/i20946a/Macro_1.scala b/tests/neg/i20946a/Macro_1.scala new file mode 100644 index 000000000000..b3603fe91b10 --- /dev/null +++ b/tests/neg/i20946a/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +def macroWithAssertFailingImpl[T: Type](t: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + try + TypeIdent(t.asTerm.symbol) + catch + case ex: Throwable => + if ex.getMessage().contains("Expected a type symbol, but got ") then + throw ex + + '{()} +} diff --git a/tests/neg/i20946a/Test_2.scala b/tests/neg/i20946a/Test_2.scala new file mode 100644 index 000000000000..80ae0a95fa4b --- /dev/null +++ b/tests/neg/i20946a/Test_2.scala @@ -0,0 +1,5 @@ +inline def macroWithAssertFailing[T](t: T): Unit = ${ macroWithAssertFailingImpl[T]('t) } + +@main +def run = + macroWithAssertFailing[Int](123) // error diff --git a/tests/neg/i21071.check b/tests/neg/i21071.check new file mode 100644 index 000000000000..b2a3233a31c0 --- /dev/null +++ b/tests/neg/i21071.check @@ -0,0 +1,9 @@ +-- [E051] Reference Error: tests/neg/i21071.scala:9:2 ------------------------------------------------------------------ +9 | foo { // error + | ^^^ + | Ambiguous overload. The overloaded alternatives of method foo in object MySuite with types + | (a: String): Nothing + | (a: List[String]): Nothing + | both match arguments ((??? : => Nothing)) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21071.scala b/tests/neg/i21071.scala new file mode 100644 index 000000000000..ac222cad7936 --- /dev/null +++ b/tests/neg/i21071.scala @@ -0,0 +1,21 @@ +trait Service { + def method: String +} + +object MySuite { + def foo(a: List[String]) = ??? + def foo(a: String) = ??? + + foo { // error + + new Service { + private val underlying: Service = ??? + private val s = "foo" + + export underlying.* + export s.toLowerCase + } + + ??? + } +} diff --git a/tests/neg/i21212.check b/tests/neg/i21212.check index 06740af36d77..5d9fe7728cbc 100644 --- a/tests/neg/i21212.check +++ b/tests/neg/i21212.check @@ -1,4 +1,4 @@ --- [E172] Type Error: tests/neg/i21212.scala:9:52 ---------------------------------------------------------------------- -9 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous +-- [E172] Type Error: tests/neg/i21212.scala:8:52 ---------------------------------------------------------------------- +8 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous | ^ |Ambiguous given instances: both parameter b2 and parameter a2 match type Minimization.A of parameter x of method summon in object Predef diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala index 3b030cefcdc7..99e4c44f9489 100644 --- a/tests/neg/i21212.scala +++ b/tests/neg/i21212.scala @@ -1,5 +1,4 @@ //> using options -source 3.7 - object Minimization: trait A diff --git a/tests/neg/i21239.check b/tests/neg/i21239.check new file mode 100644 index 000000000000..5b6f2f8bcef5 --- /dev/null +++ b/tests/neg/i21239.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i21239.scala:14:18 ------------------------------------------------------------ +14 | def get2: V = get // error + | ^^^ + | Found: AnyRef + | Required: V + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21239.orig.check b/tests/neg/i21239.orig.check new file mode 100644 index 000000000000..26895bd50ed3 --- /dev/null +++ b/tests/neg/i21239.orig.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i21239.orig.scala:32:8 -------------------------------------------------------- +32 | get // error + | ^^^ + | Found: AnyRef + | Required: V + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21239.orig.scala b/tests/neg/i21239.orig.scala new file mode 100644 index 000000000000..3fb39d93446b --- /dev/null +++ b/tests/neg/i21239.orig.scala @@ -0,0 +1,33 @@ +// 1 +// A re-minimisated reproduction of the original issue in kse3 +// The one in the issue removes the usage of the package +// in the second extension bundle, which is crucial to +// why my change broke this code +package kse.flow + +import java.util.concurrent.atomic.AtomicReference + +opaque type Worm[V] = AtomicReference[AnyRef] +object Worm: + val notSetSentinel: AnyRef = new AnyRef {} + + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm + + extension [V](worm: kse.flow.Worm[V]) + + inline def setIfEmpty(v: => V): Boolean = + var old = worm.wormAsAtomic.get() + if old eq Worm.notSetSentinel then + worm.wormAsAtomic.compareAndSet(old, v.asInstanceOf[AnyRef]) + else false + + inline def get: V = worm.wormAsAtomic.get() match + case x if x eq Worm.notSetSentinel => throw new java.lang.IllegalStateException("Retrieved value before being set") + case x => x.asInstanceOf[V] + + inline def getOrSet(v: => V): V = worm.wormAsAtomic.get() match + case x if x eq Worm.notSetSentinel => + setIfEmpty(v) + get // error + case x => x.asInstanceOf[V] diff --git a/tests/neg/i21239.scala b/tests/neg/i21239.scala new file mode 100644 index 000000000000..4eb4d5808857 --- /dev/null +++ b/tests/neg/i21239.scala @@ -0,0 +1,14 @@ +// 2 +// A more minimised reproduction +package lib + +import java.util.concurrent.atomic.AtomicReference + +opaque type Worm[V] = AtomicReference[AnyRef] +object Worm: + extension [V](worm: Worm[V]) + inline def wormAsAtomic: AtomicReference[AnyRef] = worm + + extension [V](worm: lib.Worm[V]) + def get: V = worm.wormAsAtomic.get().asInstanceOf[V] + def get2: V = get // error diff --git a/tests/neg/i21335.check b/tests/neg/i21335.check new file mode 100644 index 000000000000..a7ee092eec0e --- /dev/null +++ b/tests/neg/i21335.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i21335.scala:7:6 ----------------------------------------------------------------------------------- +7 |class Z1 extends Bar1 // error + | ^ + | class Z1 needs to be abstract, since override def bar(): Bar1 in trait Bar1 is not defined +-- Error: tests/neg/i21335.scala:12:6 ---------------------------------------------------------------------------------- +12 |class Z2 extends Bar2 // error + | ^ + | class Z2 needs to be abstract, since def bar(): Bar2 in trait Bar2 is not defined diff --git a/tests/neg/i21335.scala b/tests/neg/i21335.scala new file mode 100644 index 000000000000..270765c80535 --- /dev/null +++ b/tests/neg/i21335.scala @@ -0,0 +1,12 @@ +trait Foo: + def bar(): Foo + +trait Bar1 extends Foo: + override def bar(): Bar1 + +class Z1 extends Bar1 // error + +trait Bar2 extends Foo: + def bar(): Bar2 + +class Z2 extends Bar2 // error diff --git a/tests/neg/i21359.scala b/tests/neg/i21359.scala new file mode 100644 index 000000000000..9d588335c0e6 --- /dev/null +++ b/tests/neg/i21359.scala @@ -0,0 +1,6 @@ +import scala.compiletime.constValueTuple +import scala.deriving.Mirror + +case class Hello(a: Int) +val mirror = summon[Mirror.Of[Hello]] +val test = constValueTuple[mirror.MirroredElemTypes] // error diff --git a/tests/neg/i21535.check b/tests/neg/i21535.check new file mode 100644 index 000000000000..7a24f2196ec8 --- /dev/null +++ b/tests/neg/i21535.check @@ -0,0 +1,11 @@ +-- [E007] Type Mismatch Error: tests/neg/i21535.scala:7:4 -------------------------------------------------------------- +3 | (if (true) then +4 | new A(66) +5 | else +6 | m1() +7 | ).m2(p1 = p); // error + | ^ + | Found: (Int | Short) @uncheckedVariance + | Required: Int & Short + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21535.scala b/tests/neg/i21535.scala new file mode 100644 index 000000000000..f9573f823160 --- /dev/null +++ b/tests/neg/i21535.scala @@ -0,0 +1,16 @@ +def test() = { + val p = 10.toShort + (if (true) then + new A(66) + else + m1() + ).m2(p1 = p); // error + +} + +def m1(): A[Short] = new A(10) + +class A[D](var f: D) { + + def m2(p1: D = f, p2: D = f): Unit = {} +} \ No newline at end of file diff --git a/tests/neg/i21543.check b/tests/neg/i21543.check new file mode 100644 index 000000000000..9fa9a7779d7a --- /dev/null +++ b/tests/neg/i21543.check @@ -0,0 +1,22 @@ +-- [E007] Type Mismatch Error: tests/neg/i21543.scala:10:15 ------------------------------------------------------------ +10 | Cmd(List("1", "2")) // error // error + | ^^^ + | Found: ("1" : String) + | Required: Event + | + | Note that I could not resolve reference Event. + | Event is a private member in a base class + | + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i21543.scala:10:20 ------------------------------------------------------------ +10 | Cmd(List("1", "2")) // error // error + | ^^^ + | Found: ("2" : String) + | Required: Event + | + | Note that I could not resolve reference Event. + | Event is a private member in a base class + | + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21543.scala b/tests/neg/i21543.scala new file mode 100644 index 000000000000..aaadce6d22b4 --- /dev/null +++ b/tests/neg/i21543.scala @@ -0,0 +1,13 @@ +object CompilerCrash { + trait Scope { + private type Event = String + + case class Cmd(events: List[Event]) + } + + new Scope { + val commands = List( + Cmd(List("1", "2")) // error // error + ) + } +} \ No newline at end of file diff --git a/tests/neg/i21652.check b/tests/neg/i21652.check new file mode 100644 index 000000000000..6cc024e1bb55 --- /dev/null +++ b/tests/neg/i21652.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i21652.scala:1:8 ----------------------------------------------------------------------------------- +1 |def k: [A] => (=> A) => A = // error + | ^^^^^^^^^^^^^^^^^ + |Implementation restriction: PolyFunction apply must have exactly one parameter list and optionally type arguments. No by-name nor varags are allowed. diff --git a/tests/neg/i21652.scala b/tests/neg/i21652.scala new file mode 100644 index 000000000000..a49d7f0eb1ce --- /dev/null +++ b/tests/neg/i21652.scala @@ -0,0 +1,2 @@ +def k: [A] => (=> A) => A = // error + [A] => a => a diff --git a/tests/neg/i21696.check b/tests/neg/i21696.check new file mode 100644 index 000000000000..9195263040b3 --- /dev/null +++ b/tests/neg/i21696.check @@ -0,0 +1,13 @@ +-- [E202] Staging Issue Error: tests/neg/i21696.scala:7:52 ------------------------------------------------------------- +7 |def foo[T](using Quotes): Expr[Thing[T]] = '{ Thing[T]() } // error + | ^ + | Reference to T within quotes requires a given scala.quoted.Type[T] in scope + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Referencing `T` inside a quoted expression requires a `scala.quoted.Type[T]` to be in scope. + | Since Scala is subject to erasure at runtime, the type information will be missing during the execution of the code. + | `scala.quoted.Type[T]` is therefore needed to carry `T`'s type information into the quoted code. + | Without an implicit `scala.quoted.Type[T]`, the type `T` cannot be properly referenced within the expression. + | To resolve this, ensure that a `scala.quoted.Type[T]` is available, either through a context-bound or explicitly. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i21696.scala b/tests/neg/i21696.scala new file mode 100644 index 000000000000..7ec30a8a2e41 --- /dev/null +++ b/tests/neg/i21696.scala @@ -0,0 +1,7 @@ +//> using options -explain + +import scala.quoted.{Expr, Quotes} + +case class Thing[T]() + +def foo[T](using Quotes): Expr[Thing[T]] = '{ Thing[T]() } // error diff --git a/tests/neg/i21760.scala b/tests/neg/i21760.scala new file mode 100644 index 000000000000..625e03520dfb --- /dev/null +++ b/tests/neg/i21760.scala @@ -0,0 +1 @@ +open object O // error \ No newline at end of file diff --git a/tests/neg/i21786.check b/tests/neg/i21786.check new file mode 100644 index 000000000000..47f7e2456c3d --- /dev/null +++ b/tests/neg/i21786.check @@ -0,0 +1,6 @@ +-- [E103] Syntax Error: tests/neg/i21786.scala:1:0 --------------------------------------------------------------------- +1 |into class X // error + |^^^^ + |Illegal start of toplevel definition + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i21786.scala b/tests/neg/i21786.scala new file mode 100644 index 000000000000..c5bb9c595d32 --- /dev/null +++ b/tests/neg/i21786.scala @@ -0,0 +1 @@ +into class X // error diff --git a/tests/neg/i21952.scala b/tests/neg/i21952.scala new file mode 100644 index 000000000000..0365d82463c0 --- /dev/null +++ b/tests/neg/i21952.scala @@ -0,0 +1 @@ +val _ = (new Function[(Int, Int), Int] {def apply(a: Int, b: Int): Int = a * b})(2, 3) // error diff --git a/tests/neg/i5004.scala b/tests/neg/i5004.scala index 02105104efd1..ba1abe77f5bf 100644 --- a/tests/neg/i5004.scala +++ b/tests/neg/i5004.scala @@ -2,5 +2,5 @@ object i0 { 1 match { def this(): Int // error def this() -} // error +} } diff --git a/tests/neg/i5397.scala b/tests/neg/i5397.scala index d38b0e67bff9..ebe89875b3df 100644 --- a/tests/neg/i5397.scala +++ b/tests/neg/i5397.scala @@ -16,8 +16,10 @@ object Test { rec3 // error: not in tail position }) - @tailrec def rec4: Unit = { - def local = rec4 // error: not in tail position + // This is technically not breaching tail recursion as rec4 does not call itself, local does + // This instead fails due to having no tail recursion at all + @tailrec def rec4: Unit = { // error: no recursive calls + def local = rec4 } @tailrec def rec5: Int = { diff --git a/tests/neg/i5978.scala b/tests/neg/i5978.scala index 5dddfafb8726..b7e0344ec1c9 100644 --- a/tests/neg/i5978.scala +++ b/tests/neg/i5978.scala @@ -5,7 +5,7 @@ opaque type Position[Buffer] = Int trait TokenParser[Token, R] object TextParser { - given TP: TokenParser[Char, Position[CharSequence]] with {} + given TP: TokenParser[Char, Position[CharSequence]]() given FromCharToken(using T: TokenParser[Char, Position[CharSequence]]) : Conversion[Char, Position[CharSequence]] = ??? diff --git a/tests/neg/i6716-source-3.4.scala b/tests/neg/i6716-source-3.4.scala new file mode 100644 index 000000000000..f6f1961b67a4 --- /dev/null +++ b/tests/neg/i6716-source-3.4.scala @@ -0,0 +1,19 @@ +//> using options -Xfatal-warnings -source 3.4 + +trait Monad[T]: + def id: String +class Foo +object Foo { + given Monad[Foo] with { def id = "Foo" } +} + +opaque type Bar = Foo +object Bar { + given Monad[Bar] = summon[Monad[Foo]] // warn +} + +object Test extends App { + println(summon[Monad[Foo]].id) + println(summon[Monad[Bar]].id) +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) \ No newline at end of file diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check deleted file mode 100644 index 0144f539f53c..000000000000 --- a/tests/neg/i6716.check +++ /dev/null @@ -1,14 +0,0 @@ --- Error: tests/neg/i6716.scala:11:39 ---------------------------------------------------------------------------------- -11 | given Monad[Bar] = summon[Monad[Foo]] // error - | ^ - | Result of implicit search for Monad[Foo] will change. - | Current result Bar.given_Monad_Bar will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: Foo.given_Monad_Foo. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, - | - use an explicit argument. diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index 8b37d4e223ac..a9826cb901c0 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,17 +1,12 @@ - -trait Monad[T]: - def id: String class Foo -object Foo { - given Monad[Foo] with { def id = "Foo" } -} -opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] // error + given Foo() + given List[Foo] = List(summon[Foo]) // ok } -object Test extends App { - println(summon[Monad[Foo]].id) - println(summon[Monad[Bar]].id) +object Baz { + @annotation.nowarn + given List[Foo] = List(summon[Foo]) // error + given Foo() } diff --git a/tests/neg/i7294.check b/tests/neg/i7294.check index d6e559997f78..30c076470899 100644 --- a/tests/neg/i7294.check +++ b/tests/neg/i7294.check @@ -1,25 +1,9 @@ --- Error: tests/neg/i7294.scala:7:10 ----------------------------------------------------------------------------------- -7 | case x: T => x.g(10) // error // error - | ^ - | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. - | Current result foo.f will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that foo.f comes earlier, - | - use an explicit argument. - | - | where: T is a type in given instance f with bounds <: foo.Foo --- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:18 -------------------------------------------------------------- -7 | case x: T => x.g(10) // error // error - | ^^^^^^^ - | Found: Any - | Required: T - | - | where: T is a type in given instance f with bounds <: foo.Foo +-- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:15 -------------------------------------------------------------- +7 | case x: T => x.g(10) // error + | ^ + | Found: (x : Nothing) + | Required: ?{ g: ? } + | Note that implicit conversions were not tried because the result of an implicit conversion + | must be more specific than ?{ g: [applied to (10) returning T] } | | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7294.scala b/tests/neg/i7294.scala index fbb00f9b7e89..2725109e79e8 100644 --- a/tests/neg/i7294.scala +++ b/tests/neg/i7294.scala @@ -4,7 +4,7 @@ package foo trait Foo { def g(x: Any): Any } inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error // error + case x: T => x.g(10) // error } @main def Test = f diff --git a/tests/neg/i7459.scala b/tests/neg/i7459.scala index a17f32b15afa..829132e5179d 100644 --- a/tests/neg/i7459.scala +++ b/tests/neg/i7459.scala @@ -22,7 +22,7 @@ trait Eq[T] { } object Eq { - given Eq[Int] with { + given Eq[Int] { def eqv(x: Int, y: Int) = x == y } diff --git a/tests/neg/i8150.scala b/tests/neg/i8150.scala index 2f0505c6265a..b7edceec9426 100644 --- a/tests/neg/i8150.scala +++ b/tests/neg/i8150.scala @@ -1,3 +1,3 @@ trait A trait B -type T = {given(using a: A) as B} // error: refinement cannot be `given` \ No newline at end of file +type T = {given x(using a: A): B} // error: refinement cannot be `given` \ No newline at end of file diff --git a/tests/neg/i8896-a.scala b/tests/neg/i8896-a.scala index ae2cd6e88f6c..9f0953bfe939 100644 --- a/tests/neg/i8896-a.scala +++ b/tests/neg/i8896-a.scala @@ -4,8 +4,7 @@ trait Foo[A] object Example { - given Foo[Int] with { - } + given Foo[Int]() def foo0[A: Foo]: A => A = identity def foo1[A](implicit foo: Foo[A]): A => A = identity diff --git a/tests/neg/i8896-b.scala b/tests/neg/i8896-b.scala index a2559b00b3cc..f562d2d3b719 100644 --- a/tests/neg/i8896-b.scala +++ b/tests/neg/i8896-b.scala @@ -4,8 +4,7 @@ trait Foo[A] object Example { - given Foo[Int] with { - } + given Foo[Int]() def foo0[A: Foo]: A => A = identity def foo1[A](implicit foo: Foo[A]): A => A = identity diff --git a/tests/neg/i9185.scala b/tests/neg/i9185.scala index 34727eff1c46..8ec28135c1b9 100644 --- a/tests/neg/i9185.scala +++ b/tests/neg/i9185.scala @@ -1,8 +1,8 @@ trait M[F[_]] { def pure[A](x: A): F[A] } object M { extension [A, F[A]](x: A) def pure(using m: M[F]): F[A] = m.pure(x) - given listMonad: M[List] with { def pure[A](x: A): List[A] = List(x) } - given optionMonad: M[Option] with { def pure[A](x: A): Option[A] = Some(x) } + given listMonad: M[List] { def pure[A](x: A): List[A] = List(x) } + given optionMonad: M[Option] { def pure[A](x: A): Option[A] = Some(x) } val value1: List[String] = "ola".pure val value2 = "ola".pure // error val value3 = M.pure("ola") // error diff --git a/tests/neg/i9928.scala b/tests/neg/i9928.scala index a1034b1f20e9..cfe9cf663a85 100644 --- a/tests/neg/i9928.scala +++ b/tests/neg/i9928.scala @@ -2,7 +2,7 @@ trait Magic[F]: extension (x: Int) def read: F object Magic: - given Magic[String] with + given Magic[String]: extension(x: Int) def read: String = println("In string") s"$x" @@ -12,7 +12,7 @@ object Foo: import Magic.given def apply(s: String): Foo = s - given Magic[Foo] with + given Magic[Foo]: extension (x: Int) def read: Foo = println("In foo") Foo(s"$x") diff --git a/tests/neg/illegal-match-types.check b/tests/neg/illegal-match-types.check index f5f0f2d07c51..36862f3b9b92 100644 --- a/tests/neg/illegal-match-types.check +++ b/tests/neg/illegal-match-types.check @@ -3,6 +3,7 @@ | ^ | The match type contains an illegal case: | case Inv[Cov[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 8 | case Inv[Cov[t]] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:10:26 -------------------------------------------------------- @@ -10,6 +11,7 @@ | ^ | The match type contains an illegal case: | case Contra[Cov[t]] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 11 | case Contra[Cov[t]] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:15:22 -------------------------------------------------------- @@ -17,6 +19,7 @@ | ^ | The match type contains an illegal case: | case t & Seq[Any] => t + | The pattern contains an unaccounted type parameter `t`. | (this error can be ignored for now with `-source:3.3`) 16 | case t & Seq[Any] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:22:33 -------------------------------------------------------- @@ -24,19 +27,22 @@ | ^ | The match type contains an illegal case: | case IsSeq[t] => t + | The pattern contains a type alias `IsSeq`. | (this error can be ignored for now with `-source:3.3`) 23 | case IsSeq[t] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:29:34 -------------------------------------------------------- 29 |type TypeMemberExtractorMT[X] = X match // error | ^ - | The match type contains an illegal case: - | case TypeMemberAux[t] => t - | (this error can be ignored for now with `-source:3.3`) + | The match type contains an illegal case: + | case TypeMemberAux[t] => t + | The pattern contains an abstract type member `TypeMember` that does not refine a member in its parent. + | (this error can be ignored for now with `-source:3.3`) 30 | case TypeMemberAux[t] => t -- [E191] Type Error: tests/neg/illegal-match-types.scala:40:35 -------------------------------------------------------- 40 |type TypeMemberExtractorMT2[X] = X match // error | ^ - | The match type contains an illegal case: - | case TypeMemberAux2[t] => t - | (this error can be ignored for now with `-source:3.3`) + | The match type contains an illegal case: + | case TypeMemberAux2[t] => t + | The pattern contains an abstract type member `TypeMember` with bounds that need verification. + | (this error can be ignored for now with `-source:3.3`) 41 | case TypeMemberAux2[t] => t diff --git a/tests/neg/implicit-package-object.scala b/tests/neg/implicit-package-object.scala index 7b73d620b9b8..6d8c5d9ddff0 100644 --- a/tests/neg/implicit-package-object.scala +++ b/tests/neg/implicit-package-object.scala @@ -13,7 +13,7 @@ package A { given ToString[AB] = ab => println(ab) opaque type AC = String - given ToString[AC] with { + given ToString[AC] { def print(ac: AC): Unit = println(ac) } } @@ -31,7 +31,7 @@ package B { opaque type BC = String object BC { - given ToString[BC] with { + given ToString[BC] { def print(bc: BC): Unit = println(bc) } } diff --git a/tests/neg/implied-for.scala b/tests/neg/implied-for.scala index 87f762870400..cb43f799ee50 100644 --- a/tests/neg/implied-for.scala +++ b/tests/neg/implied-for.scala @@ -3,8 +3,8 @@ class B extends T class C extends T object A { - given b: B with {} - given c: C with {} + given b: B() + given c: C() } object Test extends App { diff --git a/tests/neg/import-given.scala b/tests/neg/import-given.scala index 080ed1e77ec5..1e5a9536c605 100644 --- a/tests/neg/import-given.scala +++ b/tests/neg/import-given.scala @@ -1,6 +1,6 @@ class TC object A { - given tc: TC with {} + given tc: TC() def foo(using TC) = () } object B { diff --git a/tests/neg/infix-named-args.check b/tests/neg/infix-named-args.check new file mode 100644 index 000000000000..d960892a9624 --- /dev/null +++ b/tests/neg/infix-named-args.check @@ -0,0 +1,37 @@ +-- [E134] Type Error: tests/neg/infix-named-args.scala:4:13 ------------------------------------------------------------ +4 | def f = 42 + (x = 1) // error // werror + | ^^^^ + | None of the overloaded alternatives of method + in class Int with types + | (x: Double): Double + | (x: Float): Float + | (x: Long): Long + | (x: Int): Int + | (x: Char): Int + | (x: Short): Int + | (x: Byte): Int + | (x: String): String + | match arguments ((x : Int)) (a named tuple) +-- [E204] Syntax Warning: tests/neg/infix-named-args.scala:4:15 -------------------------------------------------------- +4 | def f = 42 + (x = 1) // error // werror + | ^^^^^^^ + |Deprecated syntax: infix named arguments lists are deprecated; in the future it would be interpreted as a single name tuple argument. + |To avoid this warning, either remove the argument names or use dotted selection. + |This can be rewritten automatically under -rewrite -source 3.6-migration. +-- [E204] Syntax Warning: tests/neg/infix-named-args.scala:7:26 -------------------------------------------------------- +7 | def g = new C() `multi` (x = 42, y = 27) // werror + | ^^^^^^^^^^^^^^^^ + |Deprecated syntax: infix named arguments lists are deprecated; in the future it would be interpreted as a single name tuple argument. + |To avoid this warning, either remove the argument names or use dotted selection. + |This can be rewritten automatically under -rewrite -source 3.6-migration. +-- [E204] Syntax Warning: tests/neg/infix-named-args.scala:8:21 -------------------------------------------------------- +8 | def h = new C() ** (x = 42, y = 27) // werror + | ^^^^^^^^^^^^^^^^ + |Deprecated syntax: infix named arguments lists are deprecated; in the future it would be interpreted as a single name tuple argument. + |To avoid this warning, either remove the argument names or use dotted selection. + |This can be rewritten automatically under -rewrite -source 3.6-migration. +-- [E204] Syntax Warning: tests/neg/infix-named-args.scala:15:18 ------------------------------------------------------- +15 | def f = this ** (x = 2) // werror + | ^^^^^^^ + |Deprecated syntax: infix named arguments lists are deprecated; in the future it would be interpreted as a single name tuple argument. + |To avoid this warning, either remove the argument names or use dotted selection. + |This can be rewritten automatically under -rewrite -source 3.6-migration. diff --git a/tests/neg/infix-named-args.scala b/tests/neg/infix-named-args.scala new file mode 100644 index 000000000000..b0ef555cf965 --- /dev/null +++ b/tests/neg/infix-named-args.scala @@ -0,0 +1,16 @@ +import scala.language.experimental.namedTuples + +class C: + def f = 42 + (x = 1) // error // werror + def multi(x: Int, y: Int): Int = x + y + def **(x: Int, y: Int): Int = x + y + def g = new C() `multi` (x = 42, y = 27) // werror + def h = new C() ** (x = 42, y = 27) // werror + +type X = (x: Int) + +class D(d: Int): + def **(x: Int): Int = d * x + def **(x: X): Int = d * x.x + def f = this ** (x = 2) // werror + def g = this ** 2 diff --git a/tests/neg/infix.scala b/tests/neg/infix.scala index dda638c829f9..bfd68a17e656 100644 --- a/tests/neg/infix.scala +++ b/tests/neg/infix.scala @@ -8,7 +8,7 @@ class C: def +(x: Int): Int = ??? object C: - given AnyRef with + given AnyRef: extension (x: C) infix def iop (y: Int) = ??? def mop (y: Int) = ??? diff --git a/tests/neg/interleaving-ab.scala b/tests/neg/interleaving-ab.scala index e446626a2982..afdb2f0a192f 100644 --- a/tests/neg/interleaving-ab.scala +++ b/tests/neg/interleaving-ab.scala @@ -1,11 +1,10 @@ -import scala.language.experimental.clauseInterleaving object Ab: given String = "" given Double = 0 def illegal[A][B](x: A)(using B): B = summon[B] // error: Type parameter lists must be separated by a term or using parameter list - + def ab[A](x: A)[B](using B): B = summon[B] def test = ab[Int](0: Int) // error diff --git a/tests/neg/interleaving-params.scala b/tests/neg/interleaving-params.scala index dc6762cf0214..20f6bbb98d3d 100644 --- a/tests/neg/interleaving-params.scala +++ b/tests/neg/interleaving-params.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving class Params{ def bar[T](x: T)[T]: String = ??? // error diff --git a/tests/neg/interleaving-signatureCollision.scala b/tests/neg/interleaving-signatureCollision.scala index a6a729ed3b62..096073e7bda8 100644 --- a/tests/neg/interleaving-signatureCollision.scala +++ b/tests/neg/interleaving-signatureCollision.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object signatureCollision: def f[T](x: T)[U](y: U) = (x,y) diff --git a/tests/neg/interleaving-typeApply.check b/tests/neg/interleaving-typeApply.check index a50c1455bfbb..ca2ab6fa3f3e 100644 --- a/tests/neg/interleaving-typeApply.check +++ b/tests/neg/interleaving-typeApply.check @@ -1,29 +1,29 @@ --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:11 -------------------------------------------- -10 | f3[String]() // error - | ^ - | Type argument String does not conform to upper bound Int - | - | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:16 -------------------------------------------- -11 | f5[Int][Unit] // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:9:11 --------------------------------------------- +9 | f3[String]() // error + | ^ + | Type argument String does not conform to upper bound Int + | + | longer explanation available when compiling with `-explain` +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:10:16 -------------------------------------------- +10 | f5[Int][Unit] // error | ^ | Type argument Unit does not conform to upper bound String | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:19 -------------------------------------------- -12 | f5[String][Unit] // error // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:19 -------------------------------------------- +11 | f5[String][Unit] // error // error | ^ | Type argument Unit does not conform to upper bound String | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- -12 | f5[String][Unit] // error // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:11:11 -------------------------------------------- +11 | f5[String][Unit] // error // error | ^ | Type argument String does not conform to upper bound Int | | longer explanation available when compiling with `-explain` --- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:13:11 -------------------------------------------- -13 | f7[String]()[Unit] // error +-- [E057] Type Mismatch Error: tests/neg/interleaving-typeApply.scala:12:11 -------------------------------------------- +12 | f7[String]()[Unit] // error | ^ | Type argument String does not conform to upper bound Int | diff --git a/tests/neg/interleaving-typeApply.scala b/tests/neg/interleaving-typeApply.scala index ad21fe2f0329..5ad6e3dc148e 100644 --- a/tests/neg/interleaving-typeApply.scala +++ b/tests/neg/interleaving-typeApply.scala @@ -1,7 +1,6 @@ -import scala.language.experimental.clauseInterleaving object typeApply: - + def f3[T <: Int](using DummyImplicit)[U <: String](): T => T = ??? def f5[T <: Int](using DummyImplicit)[U <: String]: [X <: Unit] => X => X = ??? def f7[T <: Int](using DummyImplicit)[U <: String]()[X <: Unit]: X => X = ??? diff --git a/tests/neg/interleaving-unmatched.scala b/tests/neg/interleaving-unmatched.scala index 2ce3074d07fa..3a4371798a50 100644 --- a/tests/neg/interleaving-unmatched.scala +++ b/tests/neg/interleaving-unmatched.scala @@ -1,4 +1,3 @@ -import scala.language.experimental.clauseInterleaving object unmatched: def f1[T (x: T)] = ??? // error diff --git a/tests/neg/interleavingExperimental.check b/tests/neg/interleavingExperimental.check new file mode 100644 index 000000000000..a5e10506bdc3 --- /dev/null +++ b/tests/neg/interleavingExperimental.check @@ -0,0 +1,4 @@ +-- [E040] Syntax Error: tests/neg/interleavingExperimental.scala:3:15 -------------------------------------------------- +3 |def ba[A](x: A)[B](using B): B = summon[B] // error: clauseInterleaving was experimental until 3.6 + | ^ + | '=' expected, but '[' found diff --git a/tests/neg/interleavingExperimental.scala b/tests/neg/interleavingExperimental.scala new file mode 100644 index 000000000000..ed13707fcb68 --- /dev/null +++ b/tests/neg/interleavingExperimental.scala @@ -0,0 +1,3 @@ +//> using options --source 3.5 + +def ba[A](x: A)[B](using B): B = summon[B] // error: clauseInterleaving was experimental until 3.6 diff --git a/tests/neg/leak-problem-unboxed.scala b/tests/neg/leak-problem-unboxed.scala new file mode 100644 index 000000000000..7de3d84bfcca --- /dev/null +++ b/tests/neg/leak-problem-unboxed.scala @@ -0,0 +1,32 @@ +import language.experimental.captureChecking +import caps.unbox + +// Some capabilities that should be used locally +trait Async: + // some method + def read(): Unit +def usingAsync[X](op: Async^ => X): X = ??? + +case class Box[+T](get: T) + +def useBoxedAsync(@unbox x: Box[Async^]): Unit = + val t0 = x + val t1 = t0.get // ok + t1.read() + +def useBoxedAsync1(@unbox x: Box[Async^]): Unit = x.get.read() // ok + +def test(): Unit = + + val f: Box[Async^] => Unit = (x: Box[Async^]) => useBoxedAsync(x) // error + val _: Box[Async^] => Unit = useBoxedAsync(_) // error + val _: Box[Async^] => Unit = useBoxedAsync // error + val _ = useBoxedAsync(_) // error + val _ = useBoxedAsync // error + + def boom(x: Async^): () ->{f} Unit = + () => f(Box(x)) + + val leaked = usingAsync[() ->{f} Unit](boom) + + leaked() // scope violation \ No newline at end of file diff --git a/tests/neg/leak-problem.scala b/tests/neg/leak-problem.scala new file mode 100644 index 000000000000..354d54d86707 --- /dev/null +++ b/tests/neg/leak-problem.scala @@ -0,0 +1,31 @@ +import language.experimental.captureChecking + +// Some capabilities that should be used locally +trait Async: + // some method + def read(): Unit +def usingAsync[X](op: Async^ => X): X = ??? + +case class Box[+T](get: T) + +def useBoxedAsync(x: Box[Async^]): Unit = + val t0 = x + val t1 = t0.get // error + t1.read() + +def useBoxedAsync1(x: Box[Async^]): Unit = x.get.read() // error + +def test(): Unit = + val useBoxedAsync2 = (x: Box[Async^]) => + val t0 = x + val t1 = x.get // error + t1.read() + + val f: Box[Async^] => Unit = (x: Box[Async^]) => useBoxedAsync(x) + + def boom(x: Async^): () ->{f} Unit = + () => f(Box(x)) + + val leaked = usingAsync[() ->{f} Unit](boom) + + leaked() // scope violation \ No newline at end of file diff --git a/tests/neg/looping-givens.check b/tests/neg/looping-givens.check deleted file mode 100644 index 1e7ee08d79df..000000000000 --- a/tests/neg/looping-givens.check +++ /dev/null @@ -1,48 +0,0 @@ --- Error: tests/neg/looping-givens.scala:9:22 -------------------------------------------------------------------------- -9 | given aa: A = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: a. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: A --- Error: tests/neg/looping-givens.scala:10:22 ------------------------------------------------------------------------- -10 | given bb: B = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: b. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: B --- Error: tests/neg/looping-givens.scala:11:28 ------------------------------------------------------------------------- -11 | given ab: (A & B) = summon // error - | ^ - | Result of implicit search for T will change. - | Current result ab will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: Search Failure: joint(ab, ab). - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that ab comes earlier, - | - use an explicit argument. - | - | where: T is a type variable with constraint <: A & B diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala deleted file mode 100644 index 57dc95f99aab..000000000000 --- a/tests/neg/looping-givens.scala +++ /dev/null @@ -1,11 +0,0 @@ -//> options -source 3.4 - -class A -class B - -given joint(using a: A, b: B): (A & B) = ??? - -def foo(using a: A, b: B) = - given aa: A = summon // error - given bb: B = summon // error - given ab: (A & B) = summon // error diff --git a/tests/neg/main-functions-nameclash.scala b/tests/neg/main-functions-nameclash.scala new file mode 100644 index 000000000000..23a530e28271 --- /dev/null +++ b/tests/neg/main-functions-nameclash.scala @@ -0,0 +1,3 @@ +object foo { + @main def foo(x: Int) = () // error: class foo and object foo produce classes that overwrite one another +} diff --git a/tests/neg/migrate-once.scala b/tests/neg/migrate-once.scala new file mode 100644 index 000000000000..da5b76e4fb8c --- /dev/null +++ b/tests/neg/migrate-once.scala @@ -0,0 +1,5 @@ +//> using options -source:3.5-migration + +object Test: + for Some(x) <- Seq(Option(1)) yield x // error + // was warn before changes, but should warn only until 3.4-migration diff --git a/tests/neg/missing-implicit6.check b/tests/neg/missing-implicit6.check index 8c4cb331808b..7d6e16c0ec93 100644 --- a/tests/neg/missing-implicit6.check +++ b/tests/neg/missing-implicit6.check @@ -1,9 +1,9 @@ --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:34:8 ------------------------------------------------------ -34 | "a".xxx // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:32:8 ------------------------------------------------------ +32 | "a".xxx // error, no suggested import | ^^^^^^^ | value xxx is not a member of String --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:35:8 ------------------------------------------------------ -35 | 123.xxx // error, suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:33:8 ------------------------------------------------------ +33 | 123.xxx // error, suggested import | ^^^^^^^ | value xxx is not a member of Int, but could be made available as an extension method. | @@ -11,8 +11,8 @@ | | import Test.Ops.xxx | --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:36:8 ------------------------------------------------------ -36 | 123.yyy // error, suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:34:8 ------------------------------------------------------ +34 | 123.yyy // error, suggested import | ^^^^^^^ | value yyy is not a member of Int, but could be made available as an extension method. | @@ -20,16 +20,16 @@ | | import Test.Ops.yyy | --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:41:8 ------------------------------------------------------ -41 | 123.xxx // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:39:8 ------------------------------------------------------ +39 | 123.xxx // error, no suggested import | ^^^^^^^ | value xxx is not a member of Int --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:42:8 ------------------------------------------------------ -42 | 123.yyy // error, no suggested import +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:40:8 ------------------------------------------------------ +40 | 123.yyy // error, no suggested import | ^^^^^^^ | value yyy is not a member of Int --- [E008] Not Found Error: tests/neg/missing-implicit6.scala:43:8 ------------------------------------------------------ -43 | 123.zzz // error, suggested import even though there's no instance of Bar in scope +-- [E008] Not Found Error: tests/neg/missing-implicit6.scala:41:8 ------------------------------------------------------ +41 | 123.zzz // error, suggested import even though there's no instance of Bar in scope | ^^^^^^^ | value zzz is not a member of Int, but could be made available as an extension method. | diff --git a/tests/neg/missing-implicit6.scala b/tests/neg/missing-implicit6.scala index ded6e5ba8fed..874ae77bb50f 100644 --- a/tests/neg/missing-implicit6.scala +++ b/tests/neg/missing-implicit6.scala @@ -7,13 +7,11 @@ trait Bar { } object instances { - given foo: Foo with { + given foo: Foo: type Out = Bar - } - given bar: Bar with { + given bar: Bar: type Out = Int - } } object Test { diff --git a/tests/neg/mt-deskolemize-2.scala b/tests/neg/mt-deskolemize-2.scala index 90d506a42e6f..505e47637ac4 100644 --- a/tests/neg/mt-deskolemize-2.scala +++ b/tests/neg/mt-deskolemize-2.scala @@ -1,5 +1,3 @@ -//> using options -language:experimental.betterMatchTypeExtractors - trait Expr: type Value object Expr: diff --git a/tests/neg/named-tuple-selectable.scala b/tests/neg/named-tuple-selectable.scala new file mode 100644 index 000000000000..5cf7e68654ef --- /dev/null +++ b/tests/neg/named-tuple-selectable.scala @@ -0,0 +1,29 @@ +import scala.language.experimental.namedTuples + +class FromFields extends Selectable: + type Fields = (i: Int) + def selectDynamic(key: String) = + List(1, 2, 3) + +trait FromRefs extends Selectable: + def selectDynamic(key: String) = + List(1, 2, 3) + +def test( + fromFlds: FromFields, + fromRefs: FromRefs { val i: Int } +): Unit = + fromFlds.i(0) // error + fromRefs.i(0) // error + + fromFlds.i.apply(0) // error + fromRefs.i.apply(0) // error + + fromFlds.i[Int](List(1)) // error + fromRefs.i[Int](List(1)) // error + + fromFlds.i(List(1)) // error + fromRefs.i(List(1)) // error + + fromFlds.i.apply(List(1)) // error + fromRefs.i.apply(List(1)) // error diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check index 5e0672f20f25..f203f482d117 100644 --- a/tests/neg/namedTypeParams.check +++ b/tests/neg/namedTypeParams.check @@ -92,11 +92,11 @@ | illegal repeated type application | You might have meant something like: | Test.f[Y = String, Int] --- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- -33 | f2[Y = String][X = Int](1, "") // error: Y is undefined +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:32:9 ----------------------------------------------------------- +32 | f2[Y = String][X = Int](1, "") // error: Y is undefined | ^^^^^^ | Type parameter Y is undefined. Expected one of X. --- [E102] Syntax Error: tests/neg/namedTypeParams.scala:34:9 ----------------------------------------------------------- -34 | f2[Y = String](1, "") // error: Y is undefined +-- [E102] Syntax Error: tests/neg/namedTypeParams.scala:33:9 ----------------------------------------------------------- +33 | f2[Y = String](1, "") // error: Y is undefined | ^^^^^^ | Type parameter Y is undefined. Expected one of X. diff --git a/tests/neg/namedTypeParams.scala b/tests/neg/namedTypeParams.scala index 53ef14188e12..489ac1e8cdb6 100644 --- a/tests/neg/namedTypeParams.scala +++ b/tests/neg/namedTypeParams.scala @@ -27,7 +27,6 @@ object Test: object TestInterleaving: import language.experimental.namedTypeArguments - import language.experimental.clauseInterleaving def f2[X](using DummyImplicit)[Y](x: X, y: Y): Int = ??? f2[Y = String][X = Int](1, "") // error: Y is undefined diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index 8016f5646d09..c8f577103a6a 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -44,8 +44,6 @@ class A[T] { def next: T = ??? - import scala.language.experimental.clauseInterleaving - def b[U <: T](x: Int)[V >: T](y: String) = false } @@ -57,8 +55,6 @@ class B extends A[Int] { override def next(): Int = ??? // error: incompatible type - import scala.language.experimental.clauseInterleaving - override def b[T <: Int](x: Int)(y: String) = true // error } @@ -68,8 +64,6 @@ class C extends A[String] { override def next: Int = ??? // error: incompatible type - import scala.language.experimental.clauseInterleaving - override def b[T <: String](x: Int)[U >: Int](y: String) = true // error: incompatible type } diff --git a/tests/neg/parser-stability-1.scala b/tests/neg/parser-stability-1.scala index 661ab87e31e5..560b9cf116e3 100644 --- a/tests/neg/parser-stability-1.scala +++ b/tests/neg/parser-stability-1.scala @@ -1,4 +1,3 @@ object x0 { x1 match // error def this // error -// error \ No newline at end of file diff --git a/tests/neg/runtimeChecked-2.check b/tests/neg/runtimeChecked-2.check new file mode 100644 index 000000000000..1b30d637a6b9 --- /dev/null +++ b/tests/neg/runtimeChecked-2.check @@ -0,0 +1,5 @@ +-- [E030] Match case Unreachable Warning: tests/neg/runtimeChecked-2.scala:10:11 --------------------------------------- +10 | case is: Some[t] => ??? // unreachable + | ^^^^^^^^^^^ + | Unreachable case +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked-2.scala b/tests/neg/runtimeChecked-2.scala new file mode 100644 index 000000000000..bfb5aff2b1ba --- /dev/null +++ b/tests/neg/runtimeChecked-2.scala @@ -0,0 +1,13 @@ +//> using options -Werror -source:future -experimental + +object Foo { + + val xs: Option[Int] = Some(1) + + def test: Int = + xs.runtimeChecked match { // this test asserts that reachability is not avoided by runtimeChecked + case is: Some[t] => is.get + case is: Some[t] => ??? // unreachable + } +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked.check b/tests/neg/runtimeChecked.check new file mode 100644 index 000000000000..3d984e08517d --- /dev/null +++ b/tests/neg/runtimeChecked.check @@ -0,0 +1,7 @@ +-- [E092] Pattern Match Unchecked Warning: tests/neg/runtimeChecked.scala:11:11 ---------------------------------------- +11 | case is: ::[Int/* can not be checked so still err */] => is.head + | ^ + |the type test for ::[Int] cannot be checked at runtime because its type arguments can't be determined from List[Any] + | + | longer explanation available when compiling with `-explain` +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/runtimeChecked.scala b/tests/neg/runtimeChecked.scala new file mode 100644 index 000000000000..d3c1a91844cc --- /dev/null +++ b/tests/neg/runtimeChecked.scala @@ -0,0 +1,14 @@ +//> using options -Werror -source:future -experimental + +object Foo { + + val xs: List[Any] = List(1: Any) + + def test: Int = + xs.runtimeChecked match { // this test asserts that unsound type tests still require @unchecked + // tests/run/runtimeChecked.scala adds @unchecked to the + // unsound type test to avoid the warning. + case is: ::[Int/* can not be checked so still err */] => is.head + } +} +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/scala-uri.check b/tests/neg/scala-uri.check index b6d52d6fffd0..91bcd7ab6a6c 100644 --- a/tests/neg/scala-uri.check +++ b/tests/neg/scala-uri.check @@ -1,5 +1,5 @@ --- [E172] Type Error: tests/neg/scala-uri.scala:31:59 ------------------------------------------------------------------ -31 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error +-- [E172] Type Error: tests/neg/scala-uri.scala:30:59 ------------------------------------------------------------------ +30 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error | ^ |No best given instance of type QueryKeyValue[(String, None.type)] was found for parameter x of method summon in object Predef. |I found: diff --git a/tests/neg/scala-uri.scala b/tests/neg/scala-uri.scala index f3bff269234f..3820f8cf5613 100644 --- a/tests/neg/scala-uri.scala +++ b/tests/neg/scala-uri.scala @@ -1,4 +1,3 @@ -//> using options -source:3.6 import scala.language.implicitConversions trait QueryKey[A] diff --git a/tests/neg/struct-given.scala b/tests/neg/struct-given.scala index 9bcd1630d448..13e0d98795fa 100644 --- a/tests/neg/struct-given.scala +++ b/tests/neg/struct-given.scala @@ -1,5 +1,5 @@ class C -given c[T]: C with +given c: [T] => C: def foo = 1 given d[T]: C = new C { def foo = 1 } diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check index ae734e7aa0b4..14a4d2a08300 100644 --- a/tests/neg/tracked.check +++ b/tests/neg/tracked.check @@ -22,10 +22,10 @@ 17 | tracked type T = Int // error // error | ^^^^ | end of statement expected but 'type' found --- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- -20 | given g2(using tracked val x: Int): C = C(x) // error - | ^^^^^^^^^^^^^^^^^^ - | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:20:25 -------------------------------------------------------------------------------- +20 | given g2: (tracked val x: Int) => C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` -- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- 4 |class C2(tracked var x: Int) // error | ^ diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala index 8d315a7b89ac..9f874ca3c0da 100644 --- a/tests/neg/tracked.scala +++ b/tests/neg/tracked.scala @@ -17,4 +17,4 @@ object D: tracked type T = Int // error // error object E: - given g2(using tracked val x: Int): C = C(x) // error + given g2: (tracked val x: Int) => C = C(x) // error diff --git a/tests/neg/tuple-filter-compat.scala b/tests/neg/tuple-filter-compat.scala new file mode 100644 index 000000000000..f50837fc1d4b --- /dev/null +++ b/tests/neg/tuple-filter-compat.scala @@ -0,0 +1,12 @@ + +type OldFilter[Tup <: Tuple, P[_] <: Boolean] = Nothing +type NewFilter[Tup <: Tuple, P[_ <: Tuple.Union[Tup]] <: Boolean] = Nothing + +trait A: + type X >: OldFilter <: OldFilter + +trait B1 extends A: + type X = OldFilter // ok + +trait B2 extends A: + type X = NewFilter // error: breaking change diff --git a/tests/neg/typeclass-encoding3.scala b/tests/neg/typeclass-encoding3.scala deleted file mode 100644 index ff403314cd1a..000000000000 --- a/tests/neg/typeclass-encoding3.scala +++ /dev/null @@ -1,349 +0,0 @@ -/** 1. Simple type classes with monomorphic implementations and direct extensions. - - trait SemiGroup extends TypeClass { - def add(that: This): This - } - - trait Monoid extends SemiGroup - common { - def unit: This - } - - extension IntOps for Int : Monoid { - def add(that: Int) = this + that - } - common { - def unit = 0 - } - - extension StringOps for String : Monoid { - def add(that: Int) = this ++ that - } - common { - def unit = "" - } - - enum Nat extends Monoid { - case Z - case S(n: Nat) - - def add(that: Nat): Nat = this match { - case S => that - case S(n) => S(n.add(that)) - } - } - common { - def unit = Z - } - - def sum[T: Monoid](xs: List[T]): T = - xs.foldLeft(Monod.impl[T].unit)(_ `add` _) -*/ -object runtime { - - trait TypeClass { - val commons: TypeClassCommon - type This = commons.This - } - - trait TypeClassCommon { self => - type This - type Instance <: TypeClass - def inject(x: This): Instance { val commons: self.type } - } - - trait TypeClassCompanion { - type Impl[T] <: TypeClassCommon { type This = T } - def impl[T](implicit ev: Impl[T]): Impl[T] = ev - } - - implicit def inject[From](x: From) - (implicit ev: TypeClassCommon { type This = From }): ev.Instance { type This = From } = - ev.inject(x) -} -import runtime.* - -object semiGroups { - - trait SemiGroup extends TypeClass { - val commons: SemiGroupCommon - import commons.* - def add(that: This): This - } - trait SemiGroupCommon extends TypeClassCommon { - type Instance <: SemiGroup - } - object SemiGroup extends TypeClassCompanion { - type Impl[T] = SemiGroupCommon { type This = T } - } - - trait Monoid extends SemiGroup { - val commons: MonoidCommon - import commons.* - } - trait MonoidCommon extends SemiGroupCommon { - type Instance <: Monoid - def unit: This - } - object Monoid extends TypeClassCompanion { - type Impl[T] = MonoidCommon { type This = T } - } - - implicit object IntOps extends MonoidCommon { - type This = Int - type Instance = Monoid - def unit: Int = 0 - def inject($this: Int) = new Monoid { - val commons: IntOps.this.type = IntOps.this - def add(that: this.This): this.This = $this + that - } - } - - implicit object StringOps extends MonoidCommon { - type This = String - type Instance = Monoid - def unit = "" - def inject($this: String) = new Monoid { - val commons: StringOps.this.type = StringOps.this - def add(that: this.This): this.This = $this.concat(that) - } - } - - enum Nat extends Monoid { - case Z - case S(n: Nat) - - def add(that: Nat): Nat = this match { - case Z => that - case S(n) => S(n.add(that)) - } - - val commons: Nat.type = Nat - } - object Nat extends MonoidCommon { - type This = Nat - type Instance = Nat - def unit = Nat.Z - def inject($this: Nat) = $this - } - import Nat.{Z, S} - - implicit def NatOps: Nat.type = Nat - - def sum[T](xs: List[T])(implicit ev: Monoid.Impl[T]) = - xs.foldLeft(Monoid.impl[T].unit)((x, y) => x `add` y) - - sum(List(1, 2, 3)) - sum(List("hello ", "world!")) - sum(List(Z, S(Z), S(S(Z)))) -} - -/** 2. Generic implementations of simple type classes. - - trait Ord extends TypeClass { - def compareTo(that: This): Int - def < (that: This) = compareTo(that) < 0 - def > (that: This) = compareTo(that) > 0 - } - common { - val minimum: This - } - - extension IntOrd for Int : Ord { - def compareTo(that: Int) = - if (this < that) -1 else if (this > that) +1 else 0 - } - common { - val minimum = Int.MinValue - } - - extension ListOrd[T : Ord] for List[T] : Ord { - def compareTo(that: List[T]): Int = (this, that) match { - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs, y :: ys) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs.compareTo(ys) - } - } - common { - val minimum = Nil - } - - def min[T: Ord](x: T, y: T) = if (x < y) x else y - - def inf[T: Ord](xs: List[T]): T = (Ord.impl[T].minimum /: xs)(min) -*/ -object ord { - - trait Ord extends TypeClass { - val commons: OrdCommon - import commons.* - def compareTo(that: This): Int - def < (that: This) = compareTo(that) < 0 - def > (that: This) = compareTo(that) > 0 - } - trait OrdCommon extends TypeClassCommon { - type Instance <: Ord - def minimum: This - } - object Ord extends TypeClassCompanion { - type Impl[T] = OrdCommon { type This = T } - } - - implicit object IntOrd extends OrdCommon { - type This = Int - type Instance = Ord - val minimum: Int = Int.MinValue - def inject($this: Int) = new Ord { - val commons: IntOrd.this.type = IntOrd.this - import commons.* - def compareTo(that: this.This): Int = - if (this < that) -1 else if (this > that) +1 else 0 - } - } - - class ListOrd[T](implicit ev: Ord.Impl[T]) extends OrdCommon { self => - type This = List[T] - type Instance = Ord - def minimum: List[T] = Nil - def inject($this: List[T]) = new Ord { - val commons: self.type = self - import commons.* - def compareTo(that: List[T]): Int = ($this, that) match { - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs, y :: ys) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs.compareTo(ys) - } - } - } - - implicit def listOrd[T](implicit ev: Ord.Impl[T]): ListOrd[T] = - new ListOrd[T] - - def min[T](x: T, y: T)(implicit ev: Ord.Impl[T]): T = - if (x < y) x else y - - def inf[T](xs: List[T])(implicit ev: Ord.Impl[T]): T = { - val smallest = Ord.impl[T].minimum - xs.foldLeft(smallest)(min) - } - - inf(List[Int]()) - inf(List(List(1, 2), List(1, 2, 3))) - inf(List(List(List(1), List(2)), List(List(1), List(2), List(3)))) -} - -/** 3. Higher-kinded type classes - - trait Functor[A] extends TypeClass1 { - def map[B](f: A => B): This[B] - } - common { - def pure[A](x: A): This[A] - } - - // Generically, `pure[A]{.map(f)}^n` - def develop[A, F[X] : Functor[X]](n: Int, f: A => A): F[A] = - if (n == 0) Functor.impl[F].pure[A] - else develop[A, F](n - 1, f).map(f) - - trait Monad[A] extends Functor[A] { - def flatMap[B](f: A => This[B]): This[B] - def map[B](f: A => B) = this.flatMap(f.andThen(pure)) - } - - extension ListMonad[T] for List[T] : Monad[T] { - static def pure[A] = Nil - - def flatMap[B](f: A => List[B]): List[B] = this match { - case x :: xs => f(x) ++ xs.flatMap(f) - case Nil => Nil - } - } - - extension MonadFlatten[T[X]: Monad[X]] for T[T[A]] { - def flatten: T[A] = this.flatMap(identity) - } -*/ -object runtime1 { - - trait TypeClass1 { - val commons: TypeClassCommon1 - type This = [X] =>> commons.This[X] - } - - trait TypeClassCommon1 { self => - type This[X] - type Instance[X] <: TypeClass1 - def inject[A](x: This[A]): Instance[A] { val commons: self.type } - } - - trait TypeClassCompanion1 { - type Impl[T[_]] <: TypeClassCommon1 { type This = [X] =>> T[X] } - def impl[T[_]](implicit ev: Impl[T]): Impl[T] = ev - } - - implicit def inject1[A, From[_]](x: From[A]) - (implicit ev: TypeClassCommon1 { - type This = [X] =>> From[X] - }): ev.Instance[A] { type This = [X] =>> From[X] } = - ev.inject(x) -} -import runtime1.* - -object functors { - - trait Functor[A] extends TypeClass1 { - val commons: FunctorCommon - import commons.* - def map[B](f: A => B): This[B] - } - trait FunctorCommon extends TypeClassCommon1 { - type Instance[X] <: Functor[X] - def pure[A](x: A): This[A] - } - object Functor extends TypeClassCompanion1 { - type Impl[T[_]] = FunctorCommon { type This = [X] =>> T[X] } - } - - trait Monad[A] extends Functor[A] { - val commons: MonadCommon - import commons.* - def flatMap[B](f: A => This[B]): This[B] - def map[B](f: A => B) = this.flatMap(f.andThen(commons.pure)) - } - trait MonadCommon extends FunctorCommon { - type Instance[X] <: Monad[X] - } - object Monad extends TypeClassCompanion1 { - type Impl[T[_]] = MonadCommon { type This = [X] =>> T[X] } - } - - def develop[A, F[X]](n: Int, x: A, f: A => A)(implicit ev: Functor.Impl[F]): F[A] = - if (n == 0) Functor.impl[F].pure(x) - else develop(n - 1, x, f).map(f).asInstanceOf - - implicit object ListMonad extends MonadCommon { - type This[+X] = List[X] - type Instance[X] = Monad[X] - def pure[A](x: A) = x :: Nil - def inject[A]($this: List[A]) = new Monad[A] { - val commons: ListMonad.this.type = ListMonad - import commons.* - def flatMap[B](f: A => List[B]): List[B] = $this.flatMap(f) - } - } - - object MonadFlatten { - def flattened[T[_], A]($this: T[T[A]])(implicit ev: Monad.Impl[T]): T[A] = - ??? // $this.flatMap[A](identity) disabled since it does not typecheck - } - - MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) // ok, synthesizes (using ListMonad) - MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) // error -} \ No newline at end of file diff --git a/tests/neg/unsound-reach-4.check b/tests/neg/unsound-reach-4.check deleted file mode 100644 index 47256baf408a..000000000000 --- a/tests/neg/unsound-reach-4.check +++ /dev/null @@ -1,5 +0,0 @@ --- Error: tests/neg/unsound-reach-4.scala:20:19 ------------------------------------------------------------------------ -20 | escaped = boom.use(f) // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: F): box File^{backdoor*} of this expression diff --git a/tests/neg/unsound-reach-4.scala b/tests/neg/unsound-reach-4.scala deleted file mode 100644 index fa395fa117ca..000000000000 --- a/tests/neg/unsound-reach-4.scala +++ /dev/null @@ -1,20 +0,0 @@ -import language.experimental.captureChecking -trait File: - def close(): Unit - -def withFile[R](path: String)(op: File^ => R): R = ??? - -type F = File^ - -trait Foo[+X]: - def use(x: F): X -class Bar extends Foo[File^]: - def use(x: F): File^ = x - -def bad(): Unit = - val backdoor: Foo[File^] = new Bar - val boom: Foo[File^{backdoor*}] = backdoor - - var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - escaped = boom.use(f) // error diff --git a/tests/neg/unsound-reach.check b/tests/neg/unsound-reach.check deleted file mode 100644 index fd5c401416d1..000000000000 --- a/tests/neg/unsound-reach.check +++ /dev/null @@ -1,5 +0,0 @@ --- Error: tests/neg/unsound-reach.scala:18:9 --------------------------------------------------------------------------- -18 | boom.use(f): (f1: File^{backdoor*}) => // error - | ^^^^^^^^ - | Reach capability backdoor* and universal capability cap cannot both - | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression diff --git a/tests/neg/unsound-reach.scala b/tests/neg/unsound-reach.scala deleted file mode 100644 index 468730168019..000000000000 --- a/tests/neg/unsound-reach.scala +++ /dev/null @@ -1,20 +0,0 @@ -import language.experimental.captureChecking -trait File: - def close(): Unit - -def withFile[R](path: String)(op: File^ => R): R = ??? - -trait Foo[+X]: - def use(x: File^)(op: X => Unit): Unit -class Bar extends Foo[File^]: - def use(x: File^)(op: File^ => Unit): Unit = op(x) - -def bad(): Unit = - val backdoor: Foo[File^] = new Bar - val boom: Foo[File^{backdoor*}] = backdoor - - var escaped: File^{backdoor*} = null - withFile("hello.txt"): f => - boom.use(f): (f1: File^{backdoor*}) => // error - escaped = f1 - diff --git a/tests/new/test.scala b/tests/new/test.scala index 16a823547553..18644422ab06 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -2,8 +2,9 @@ import language.experimental.namedTuples type Person = (name: String, age: Int) -def test = - val bob = (name = "Bob", age = 33): (name: String, age: Int) +trait A: + type T + +class B: + type U =:= A { type T = U } - val silly = bob match - case (name = n, age = a) => n.length + a diff --git a/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala b/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala index a76379e22313..7d43463cd569 100644 --- a/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala +++ b/tests/old-tasty-interpreter-prototype/interpreter/TreeInterpreter.scala @@ -146,7 +146,7 @@ abstract class TreeInterpreter[Q <: Quotes & Singleton](using val q: Q) { } case Assign(lhs, rhs) => - log("", tree)(localValue(lhs.symbol).update(eval(rhs))) + log("", tree)(localValue(lhs.symbol).update(eval(rhs))) case If(cond, thenp, elsep) => log("interpretIf", tree)(interpretIf(cond, thenp, elsep)) case While(cond, body) => log("interpretWhile", tree)(interpretWhile(cond, body)) diff --git a/tests/patmat/i13931.scala b/tests/patmat/i13931.scala index 0d8d9eb9dcd3..562f059771c1 100644 --- a/tests/patmat/i13931.scala +++ b/tests/patmat/i13931.scala @@ -3,5 +3,5 @@ class Test: case Seq() => println("empty") case _ => println("non-empty") - def test2 = IndexedSeq() match { case IndexedSeq() => case _ => } + def test2 = IndexedSeq() match { case IndexedSeq() => case null => } def test3 = IndexedSeq() match { case IndexedSeq(1) => case _ => } diff --git a/tests/patmat/i6088.scala b/tests/patmat/i6088.scala index 8d8f676c0101..c88ae35a0c2f 100644 --- a/tests/patmat/i6088.scala +++ b/tests/patmat/i6088.scala @@ -17,7 +17,7 @@ enum ExprF[R[_],I] { /** Companion. */ object ExprF { - given hfunctor: HFunctor[ExprF] with { + given hfunctor: HFunctor[ExprF] { def hmap[A[_], B[_]](nt: A ~> B): ([x] =>> ExprF[A,x]) ~> ([x] =>> ExprF[B,x]) = { new ~>[[x] =>> ExprF[A,x], [x] =>> ExprF[B,x]] { def apply[I](fa: ExprF[A,I]): ExprF[B,I] = fa match { diff --git a/tests/patmat/null.check b/tests/patmat/null.check index d9c265adf377..3b860ddfd850 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,3 +1,4 @@ -6: Pattern Match +6: Match case Unreachable 13: Pattern Match 20: Pattern Match +21: Match case Unreachable diff --git a/tests/patmat/null.scala b/tests/patmat/null.scala index b918109c0cc5..9cff29a5c4e8 100644 --- a/tests/patmat/null.scala +++ b/tests/patmat/null.scala @@ -18,5 +18,6 @@ class Test { case Some(null) => case None => case y => + case _ => } } \ No newline at end of file diff --git a/tests/plugins/run/scriptWrapper/Framework_1.scala b/tests/plugins/run/scriptWrapper/Framework_1.scala new file mode 100644 index 000000000000..c8a15de8342b --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Framework_1.scala @@ -0,0 +1,3 @@ +package framework + +class entrypoint extends scala.annotation.Annotation diff --git a/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala new file mode 100644 index 000000000000..888d5f95838d --- /dev/null +++ b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala @@ -0,0 +1,68 @@ +package scriptWrapper + +import dotty.tools.dotc.* +import core.* +import Contexts.Context +import Contexts.ctx +import plugins.* +import ast.tpd +import util.SourceFile + +class LineNumberPlugin extends StandardPlugin { + val name: String = "linenumbers" + val description: String = "adjusts line numbers of script files" + + override def initialize(options: List[String])(using Context): List[PluginPhase] = FixLineNumbers() :: Nil +} + +// Loosely follows Mill linenumbers plugin (scan for marker with "original" source, adjust line numbers to match) +class FixLineNumbers extends PluginPhase { + + val codeMarker = "//USER_CODE_HERE" + + def phaseName: String = "fixLineNumbers" + override def runsAfter: Set[String] = Set("posttyper") + override def runsBefore: Set[String] = Set("pickler") + + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = { + val sourceContent = ctx.source.content() + val lines = new String(sourceContent).linesWithSeparators.toVector + val codeMarkerLine = lines.indexWhere(_.startsWith(codeMarker)) + + if codeMarkerLine < 0 then + tree + else + val adjustedFile = lines.collectFirst { + case s"//USER_SRC_FILE:./$file" => file.trim + }.getOrElse("") + + val adjustedSrc = ctx.source.file.container.lookupName(adjustedFile, directory = false) match + case null => + report.error(s"could not find file $adjustedFile", tree.sourcePos) + return tree + case file => + SourceFile(file, scala.io.Codec.UTF8) + + val userCodeOffset = ctx.source.lineToOffset(codeMarkerLine + 1) // lines.take(codeMarkerLine).map(_.length).sum + val lineMapper = LineMapper(codeMarkerLine, userCodeOffset, adjustedSrc) + lineMapper.transform(tree) + } + +} + +class LineMapper(markerLine: Int, userCodeOffset: Int, adjustedSrc: SourceFile) extends tpd.TreeMapWithPreciseStatContexts() { + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = { + val tree0 = super.transform(tree) + val pos = tree0.sourcePos + if pos.exists && pos.start >= userCodeOffset then + val tree1 = tree0.cloneIn(adjustedSrc).withSpan(pos.span.shift(-userCodeOffset)) + // if tree1.show.toString == "???" then + // val pos1 = tree1.sourcePos + // sys.error(s"rewrote ??? at ${pos1.source}:${pos1.line + 1}:${pos1.column + 1} (sourced from ${markerLine + 2})") + tree1 + else + tree0 + } + +} diff --git a/tests/plugins/run/scriptWrapper/Test_3.scala b/tests/plugins/run/scriptWrapper/Test_3.scala new file mode 100644 index 000000000000..341af27ee433 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Test_3.scala @@ -0,0 +1,25 @@ +@main def Test: Unit = { + val mainCls = Class.forName("foo_sc") + val mainMethod = mainCls.getMethod("main", classOf[Array[String]]) + val stackTrace: Array[String] = { + try + mainMethod.invoke(null, Array.empty[String]) + sys.error("Expected an exception") + catch + case e: java.lang.reflect.InvocationTargetException => + val cause = e.getCause + if cause != null then + cause.getStackTrace.map(_.toString) + else + throw e + } + + val expected = Set( + "foo_sc$.getRandom(foo_2.scala:3)", // adjusted line number (11 -> 3) + "foo_sc$.brokenRandom(foo_2.scala:5)", // adjusted line number (13 -> 5) + "foo_sc$.run(foo_2.scala:8)", // adjusted line number (16 -> 8) + ) + + val missing = expected -- stackTrace + assert(missing.isEmpty, s"Missing: $missing") +} diff --git a/tests/plugins/run/scriptWrapper/foo_2.scala b/tests/plugins/run/scriptWrapper/foo_2.scala new file mode 100644 index 000000000000..02e3f034e757 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_2.scala @@ -0,0 +1,18 @@ +// generated code +// script: foo.sc +object foo_sc { +def main(args: Array[String]): Unit = { + run // assume some macro generates this by scanning for @entrypoint +} +//USER_SRC_FILE:./foo_original_2.scala +//USER_CODE_HERE +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; +//END_USER_CODE_HERE +} diff --git a/tests/plugins/run/scriptWrapper/foo_original_2.scala b/tests/plugins/run/scriptWrapper/foo_original_2.scala new file mode 100644 index 000000000000..162ddd1724a1 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_original_2.scala @@ -0,0 +1,8 @@ +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; diff --git a/tests/plugins/run/scriptWrapper/plugin.properties b/tests/plugins/run/scriptWrapper/plugin.properties new file mode 100644 index 000000000000..f1fc6067e611 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/plugin.properties @@ -0,0 +1 @@ +pluginClass=scriptWrapper.LineNumberPlugin diff --git a/tests/pos-custom-args/captures/Buffer.scala b/tests/pos-custom-args/captures/Buffer.scala new file mode 100644 index 000000000000..2412e5b388ca --- /dev/null +++ b/tests/pos-custom-args/captures/Buffer.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking + +// Extract of the problem in collection.mutable.Buffers +trait Buffer[A]: + + def apply(i: Int): A = ??? + + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + val g = f + val s = 10 + // capture checking: we need the copy since we box/unbox on g* on the next line + // TODO: This looks fishy, need to investigate + // Alternative would be to mark `f` as @unbox. It's not inferred + // since `^ appears in a function result, not under a box. + val newElems = new Array[(IterableOnce[A]^{f})](s) + var i = 0 + while i < s do + val x = g(this(i)) + newElems(i) = x + i += 1 + this + } \ No newline at end of file diff --git a/tests/pos-custom-args/captures/boxed1.scala b/tests/pos-custom-args/captures/boxed1.scala index 8c6b63ef0134..e2ff69c305d2 100644 --- a/tests/pos-custom-args/captures/boxed1.scala +++ b/tests/pos-custom-args/captures/boxed1.scala @@ -1,6 +1,6 @@ class Box[T](val x: T) -@annotation.capability class Cap +class Cap extends caps.Capability def foo(x: => Int): Unit = () diff --git a/tests/pos-custom-args/captures/cap-problem.scala b/tests/pos-custom-args/captures/cap-problem.scala new file mode 100644 index 000000000000..483b4e938b1b --- /dev/null +++ b/tests/pos-custom-args/captures/cap-problem.scala @@ -0,0 +1,13 @@ +import language.experimental.captureChecking + +trait Suspend: + type Suspension + + def resume(s: Suspension): Unit + +import caps.Capability + +trait Async(val support: Suspend) extends Capability + +class CancelSuspension(ac: Async, suspension: ac.support.Suspension): + ac.support.resume(suspension) diff --git a/tests/pos-custom-args/captures/cap-refine.scala b/tests/pos-custom-args/captures/cap-refine.scala new file mode 100644 index 000000000000..ed0b4d018b88 --- /dev/null +++ b/tests/pos-custom-args/captures/cap-refine.scala @@ -0,0 +1,12 @@ +//> using options -Werror +import caps.Capability + +trait Buffer[T] extends Capability: + def append(x: T): this.type + +def f(buf: Buffer[Int]) = + val buf1 = buf.append(1).append(2) + val buf2: Buffer[Int]^{buf1} = buf1 + + + diff --git a/tests/pos-custom-args/captures/capt-capability.scala b/tests/pos-custom-args/captures/capt-capability.scala index 830d341c7bca..03b5cb1bbabf 100644 --- a/tests/pos-custom-args/captures/capt-capability.scala +++ b/tests/pos-custom-args/captures/capt-capability.scala @@ -1,7 +1,6 @@ -import annotation.capability +import caps.Capability -@capability class Cap -def f1(c: Cap): () ->{c} c.type = () => c // ok +def f1(c: Capability): () ->{c} c.type = () => c // ok def f2: Int = val g: Boolean => Int = ??? @@ -15,15 +14,15 @@ def f3: Int = x def foo() = - val x: Cap = ??? - val y: Cap = x - val x2: () ->{x} Cap = ??? - val y2: () ->{x} Cap = x2 + val x: Capability = ??? + val y: Capability = x + val x2: () ->{x} Capability = ??? + val y2: () ->{x} Capability = x2 - val z1: () => Cap = f1(x) + val z1: () => Capability = f1(x) def h[X](a: X)(b: X) = a val z2 = - if x == null then () => x else () => Cap() + if x == null then () => x else () => new Capability() {} val _ = x diff --git a/tests/pos-custom-args/captures/capt-test.scala b/tests/pos-custom-args/captures/capt-test.scala index e229c685d846..49f199f106f1 100644 --- a/tests/pos-custom-args/captures/capt-test.scala +++ b/tests/pos-custom-args/captures/capt-test.scala @@ -36,3 +36,4 @@ def test(c: Cap, d: Cap) = val a4 = zs.map(identity) val a4c: LIST[Cap ->{d, y} Unit] = a4 + val a5: LIST[Cap ->{d, y} Unit] = zs.map(identity) diff --git a/tests/pos-custom-args/captures/caseclass.scala b/tests/pos-custom-args/captures/caseclass.scala index ffbf878dca49..fe7e02b1b6c2 100644 --- a/tests/pos-custom-args/captures/caseclass.scala +++ b/tests/pos-custom-args/captures/caseclass.scala @@ -1,4 +1,4 @@ -@annotation.capability class C +class C extends caps.Capability object test1: case class Ref(x: String^) @@ -31,4 +31,4 @@ object test2: val y4 = y3 match case Ref(xx) => xx - val y4c: () ->{x3} Unit = y4 + val y4c: () ->{y3} Unit = y4 diff --git a/tests/pos-custom-args/captures/casts.scala b/tests/pos-custom-args/captures/casts.scala new file mode 100644 index 000000000000..572b58d008f6 --- /dev/null +++ b/tests/pos-custom-args/captures/casts.scala @@ -0,0 +1,4 @@ +import language.experimental.captureChecking +def Test = + val x: Any = ??? + val y = x.asInstanceOf[Int => Int] diff --git a/tests/pos-custom-args/captures/cc-poly-varargs.scala b/tests/pos-custom-args/captures/cc-poly-varargs.scala new file mode 100644 index 000000000000..ac76c47d6dd5 --- /dev/null +++ b/tests/pos-custom-args/captures/cc-poly-varargs.scala @@ -0,0 +1,20 @@ +trait Cancellable + +abstract class Source[+T, Cap^] + +extension[T, Cap^](src: Source[T, Cap]^) + def transformValuesWith[U](f: (T -> U)^{Cap^}): Source[U, Cap]^{src, f} = ??? + +def race[T, Cap^](sources: Source[T, Cap]^{Cap^}*): Source[T, Cap]^{Cap^} = ??? + +def either[T1, T2, Cap^](src1: Source[T1, Cap]^{Cap^}, src2: Source[T2, Cap]^{Cap^}): Source[Either[T1, T2], Cap]^{Cap^} = + val left = src1.transformValuesWith(Left(_)) + val right = src2.transformValuesWith(Right(_)) + race(left, right) + + + + + + + diff --git a/tests/pos-custom-args/captures/cc-this.scala b/tests/pos-custom-args/captures/cc-this.scala index 12c62e99d186..d9705df76c55 100644 --- a/tests/pos-custom-args/captures/cc-this.scala +++ b/tests/pos-custom-args/captures/cc-this.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def eff(using Cap): Unit = () diff --git a/tests/pos-custom-args/captures/checkbounds.scala b/tests/pos-custom-args/captures/checkbounds.scala new file mode 100644 index 000000000000..f9cd76ce8b1a --- /dev/null +++ b/tests/pos-custom-args/captures/checkbounds.scala @@ -0,0 +1,22 @@ +trait Dsl: + + sealed trait Nat + case object Zero extends Nat + case class Succ[N <: Nat](n: N) extends Nat + + type Stable[+l <: Nat, +b <: Nat, +A] + type Now[+l <: Nat, +b <: Nat, +A] + type Box[+A] + def stable[l <: Nat, b <: Nat, A](e: Stable[l, b, A]): Now[l, b, Box[A]] + + def program[A](prog: Now[Zero.type, Zero.type, A]): Now[Zero.type, Zero.type, A] + + //val conforms: Zero.type <:< Nat = summon + // ^ need to uncomment this line to compile with captureChecking enabled + + def test: Any = + program[Box[Int]]: + val v : Stable[Zero.type, Zero.type, Int] = ??? + stable[Zero.type, Zero.type, Int](v) +// ^ +// Type argument Dsl.this.Zero.type does not conform to upper bound Dsl.this.Nat \ No newline at end of file diff --git a/tests/pos-custom-args/captures/curried-closures.scala b/tests/pos-custom-args/captures/curried-closures.scala index 0ad729375b3c..262dd4b66b92 100644 --- a/tests/pos-custom-args/captures/curried-closures.scala +++ b/tests/pos-custom-args/captures/curried-closures.scala @@ -1,6 +1,7 @@ -//> using options -experimental +import annotation.experimental +import language.experimental.captureChecking -object Test: +@experimental object Test: def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) val f1 = map2 val fc1: List[Int] -> (Int => Int) -> List[Int] = f1 diff --git a/tests/pos-custom-args/captures/dep-reach.scala b/tests/pos-custom-args/captures/dep-reach.scala index 56343fbf8e53..c81197aa738d 100644 --- a/tests/pos-custom-args/captures/dep-reach.scala +++ b/tests/pos-custom-args/captures/dep-reach.scala @@ -1,9 +1,10 @@ +import caps.unbox object Test: class C type Proc = () => Unit def f(c: C^, d: C^): () ->{c, d} Unit = - def foo(xs: Proc*): () ->{xs*} Unit = + def foo(@unbox xs: Proc*): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () val b: () ->{d} Unit = () => () @@ -12,7 +13,7 @@ object Test: def g(c: C^, d: C^): () ->{c, d} Unit = - def foo(xs: Seq[() => Unit]): () ->{xs*} Unit = + def foo(@unbox xs: Seq[() => Unit]): () ->{xs*} Unit = xs.head val a: () ->{c} Unit = () => () diff --git a/tests/pos-custom-args/captures/eta-expansions.scala b/tests/pos-custom-args/captures/eta-expansions.scala index 1aac7ded1b50..b4e38cdf0856 100644 --- a/tests/pos-custom-args/captures/eta-expansions.scala +++ b/tests/pos-custom-args/captures/eta-expansions.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(d: Cap) = def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala b/tests/pos-custom-args/captures/extending-impure-function.scala similarity index 100% rename from tests/neg-custom-args/captures/extending-impure-function.scala rename to tests/pos-custom-args/captures/extending-impure-function.scala diff --git a/tests/pos-custom-args/captures/filevar-expanded.scala b/tests/pos-custom-args/captures/filevar-expanded.scala new file mode 100644 index 000000000000..58e7a0e67e0a --- /dev/null +++ b/tests/pos-custom-args/captures/filevar-expanded.scala @@ -0,0 +1,38 @@ +import language.experimental.captureChecking +import language.experimental.modularity +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + class IO + + class File: + def write(x: String): Unit = ??? + + class Service(tracked val io: IO^): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io2: IO^)(op: (f: File^{io2}) => T): T = + op(new File) + + def test(io3: IO^) = + withFile(io3): f => + val o = Service(io3) + o.file = f // this is a bit dubious. It's legal since we treat class refinements + // as capture set variables that can be made to include refs coming from outside. + o.log diff --git a/tests/pos-custom-args/captures/filevar-tracked.scala b/tests/pos-custom-args/captures/filevar-tracked.scala new file mode 100644 index 000000000000..dc8d0b18908b --- /dev/null +++ b/tests/pos-custom-args/captures/filevar-tracked.scala @@ -0,0 +1,37 @@ +import language.experimental.captureChecking +import language.experimental.modularity +import compiletime.uninitialized + +object test1: + class File: + def write(x: String): Unit = ??? + + class Service(f: File^): + def log = f.write("log") + + def withFile[T](op: (f: File^) => T): T = + op(new File) + + def test = + withFile: f => + val o = Service(f) + o.log + +object test2: + class IO extends caps.Capability + + class File: + def write(x: String): Unit = ??? + + class Service(tracked val io: IO): + var file: File^{io} = uninitialized + def log = file.write("log") + + def withFile[T](io2: IO)(op: (f: File^{io2}) => T): T = + op(new File) + + def test(io3: IO) = + withFile(io3): f => + val o = Service(io3) + o.file = f + o.log diff --git a/tests/pos-custom-args/captures/filevar.scala b/tests/pos-custom-args/captures/filevar.scala index a6cc7ca9ff47..dc8d0b18908b 100644 --- a/tests/pos-custom-args/captures/filevar.scala +++ b/tests/pos-custom-args/captures/filevar.scala @@ -1,5 +1,5 @@ import language.experimental.captureChecking -import annotation.capability +import language.experimental.modularity import compiletime.uninitialized object test1: @@ -18,20 +18,20 @@ object test1: o.log object test2: - @capability class IO + class IO extends caps.Capability class File: def write(x: String): Unit = ??? - class Service(io: IO): + class Service(tracked val io: IO): var file: File^{io} = uninitialized def log = file.write("log") - def withFile[T](io: IO)(op: (f: File^{io}) => T): T = + def withFile[T](io2: IO)(op: (f: File^{io2}) => T): T = op(new File) - def test(io: IO) = - withFile(io): f => - val o = Service(io) + def test(io3: IO) = + withFile(io3): f => + val o = Service(io3) o.file = f o.log diff --git a/tests/pos-custom-args/captures/i15749.scala b/tests/pos-custom-args/captures/i15749.scala index 0a552ae1a3c5..58274c7cc817 100644 --- a/tests/pos-custom-args/captures/i15749.scala +++ b/tests/pos-custom-args/captures/i15749.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 +// (to make sure we use the sealed policy) class Unit object unit extends Unit @@ -12,4 +14,4 @@ type BoxedLazyVal[T] = Foo[LazyVal[T]] def force[A](v: BoxedLazyVal[A]): A = // Γ ⊢ v.x : □ {cap} Unit -> A - v.x(unit) // was error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file + v.x(unit) // should be error: (unbox v.x)(unit), where (unbox v.x) should be untypable, now ok \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i15923-cases.scala b/tests/pos-custom-args/captures/i15923-cases.scala index 7c5635f7b3dd..4b5a36f208ec 100644 --- a/tests/pos-custom-args/captures/i15923-cases.scala +++ b/tests/pos-custom-args/captures/i15923-cases.scala @@ -2,10 +2,6 @@ trait Cap { def use(): Int } type Id[X] = [T] -> (op: X => T) -> T def mkId[X](x: X): Id[X] = [T] => (op: X => T) => op(x) -def foo(x: Id[Cap^]) = { - x(_.use()) // was error, now OK -} - def bar(io: Cap^, x: Id[Cap^{io}]) = { x(_.use()) } diff --git a/tests/pos-custom-args/captures/i15925.scala b/tests/pos-custom-args/captures/i15925.scala index 63b6962ff9f8..1c448c7377c2 100644 --- a/tests/pos-custom-args/captures/i15925.scala +++ b/tests/pos-custom-args/captures/i15925.scala @@ -1,4 +1,5 @@ import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures class Unit object u extends Unit @@ -6,8 +7,8 @@ object u extends Unit type Foo[X] = [T] -> (op: X => T) -> T type Lazy[X] = Unit => X -def force[X](fx: Foo[Lazy[X]]): X = +def force[X](fx: Foo[Lazy[X] @uncheckedCaptures]): X = fx[X](f => f(u)) -def force2[X](fx: Foo[Unit => X]): X = +def force2[X](fx: Foo[(Unit => X) @uncheckedCaptures]): X = fx[X](f => f(u)) diff --git a/tests/pos-custom-args/captures/i16116.scala b/tests/pos-custom-args/captures/i16116.scala index 0311e744f146..fdc386ac40e1 100644 --- a/tests/pos-custom-args/captures/i16116.scala +++ b/tests/pos-custom-args/captures/i16116.scala @@ -9,14 +9,13 @@ trait CpsMonad[F[_]] { object CpsMonad { type Aux[F[_],C] = CpsMonad[F] { type Context = C } - given CpsMonad[Future] with {} + given CpsMonad[Future]() } @experimental object Test { - @capability - class CpsTransform[F[_]] { + class CpsTransform[F[_]] extends caps.Capability { def await[T](ft: F[T]): T^{ this } = ??? } diff --git a/tests/pos-custom-args/captures/i16226.scala b/tests/pos-custom-args/captures/i16226.scala index 4cd7f0ceea81..071eefbd3420 100644 --- a/tests/pos-custom-args/captures/i16226.scala +++ b/tests/pos-custom-args/captures/i16226.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class LazyRef[T](val elem: () => T): val get: () ->{elem} T = elem diff --git a/tests/pos-custom-args/captures/i19751.scala b/tests/pos-custom-args/captures/i19751.scala index b6023cc0ff87..b41017f4f3e7 100644 --- a/tests/pos-custom-args/captures/i19751.scala +++ b/tests/pos-custom-args/captures/i19751.scala @@ -1,9 +1,8 @@ import language.experimental.captureChecking -import annotation.capability import caps.cap trait Ptr[A] -@capability trait Scope: +trait Scope extends caps.Capability: def allocate(size: Int): Ptr[Unit]^{this} diff --git a/tests/pos-custom-args/captures/i21313.scala b/tests/pos-custom-args/captures/i21313.scala new file mode 100644 index 000000000000..b388b6487cb5 --- /dev/null +++ b/tests/pos-custom-args/captures/i21313.scala @@ -0,0 +1,20 @@ +import caps.CapSet + +trait Async: + def await[T, Cap^](using caps.Contains[Cap, this.type])(src: Source[T, Cap]^): T = + val x: Async^{this} = ??? + val y: Async^{Cap^} = x + val ac: Async^ = ??? + def f(using caps.Contains[Cap, ac.type]) = + val x2: Async^{this} = ??? + val y2: Async^{Cap^} = x2 + val x3: Async^{ac} = ??? + val y3: Async^{Cap^} = x3 + ??? + +trait Source[+T, Cap^]: + final def await(using ac: Async^{Cap^}) = ac.await[T, Cap](this) // Contains[Cap, ac] is assured because {ac} <: Cap. + +def test(using ac1: Async^, ac2: Async^, x: String) = + val src1 = new Source[Int, CapSet^{ac1}] {} + ac1.await(src1) diff --git a/tests/pos-custom-args/captures/i21347.scala b/tests/pos-custom-args/captures/i21347.scala new file mode 100644 index 000000000000..e74c15bff8c1 --- /dev/null +++ b/tests/pos-custom-args/captures/i21347.scala @@ -0,0 +1,11 @@ +//> using scala 3.6.0-RC1-bin-SNAPSHOT + +import language.experimental.captureChecking + +class Box[Cap^] {} + +def run[Cap^](f: Box[Cap]^{Cap^} => Unit): Box[Cap]^{Cap^} = ??? + +def main() = + val b = run(_ => ()) + // val b = run[caps.CapSet](_ => ()) // this compiles \ No newline at end of file diff --git a/tests/pos-custom-args/captures/i21507.scala b/tests/pos-custom-args/captures/i21507.scala new file mode 100644 index 000000000000..bb80dafb3b45 --- /dev/null +++ b/tests/pos-custom-args/captures/i21507.scala @@ -0,0 +1,10 @@ +import language.experimental.captureChecking + +trait Box[Cap^]: + def store(f: (() -> Unit)^{Cap^}): Unit + +def run[Cap^](f: Box[Cap]^{Cap^} => Unit): Box[Cap]^{Cap^} = + new Box[Cap]: + private var item: () ->{Cap^} Unit = () => () + def store(f: () ->{Cap^} Unit): Unit = + item = f // was error, now ok diff --git a/tests/pos-custom-args/captures/i21620.scala b/tests/pos-custom-args/captures/i21620.scala new file mode 100644 index 000000000000..b2c382aa4c75 --- /dev/null +++ b/tests/pos-custom-args/captures/i21620.scala @@ -0,0 +1,11 @@ +class C +def test(x: C^) = + def foo() = + x + () + val f = () => + // println() // uncomenting would give an error, but with + // a different way of handling curried functions should be OK + () => foo() + val _: () -> () ->{x} Unit = f + () diff --git a/tests/pos-custom-args/captures/inline-problem.scala b/tests/pos-custom-args/captures/inline-problem.scala new file mode 100644 index 000000000000..78034c20050a --- /dev/null +++ b/tests/pos-custom-args/captures/inline-problem.scala @@ -0,0 +1,5 @@ +trait Listener[+T] + +inline def consume[T](f: T => Unit): Listener[T]^{f} = ??? + +val consumePure = consume(_ => ()) \ No newline at end of file diff --git a/tests/pos-custom-args/captures/lazyref.scala b/tests/pos-custom-args/captures/lazyref.scala index 3dae51b491b4..2e3a0030bcdc 100644 --- a/tests/pos-custom-args/captures/lazyref.scala +++ b/tests/pos-custom-args/captures/lazyref.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability class LazyRef[T](val elem: () => T): val get: () ->{elem} T = elem diff --git a/tests/pos-custom-args/captures/levels.scala b/tests/pos-custom-args/captures/levels.scala new file mode 100644 index 000000000000..cabd537442a5 --- /dev/null +++ b/tests/pos-custom-args/captures/levels.scala @@ -0,0 +1,23 @@ +class CC + +def test1(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + +def test2(cap1: CC^) = + + class Ref[T](init: T): + private var v: T = init + def setV(x: T): Unit = v = x + def getV: T = v + + val _ = Ref[String => String]((x: String) => x) // ok + val r = Ref((x: String) => x) + + def scope(cap3: CC^) = + def g(x: String): String = if cap3 == cap3 then "" else "a" + r.setV(g) // error + () diff --git a/tests/pos-custom-args/captures/lists.scala b/tests/pos-custom-args/captures/lists.scala index 99505f0bb7a2..5f4991c6be54 100644 --- a/tests/pos-custom-args/captures/lists.scala +++ b/tests/pos-custom-args/captures/lists.scala @@ -18,7 +18,7 @@ object NIL extends LIST[Nothing]: def map[A, B](f: A => B)(xs: LIST[A]): LIST[B] = xs.map(f) -@annotation.capability class Cap +class Cap extends caps.Capability def test(c: Cap, d: Cap, e: Cap) = def f(x: Cap): Unit = if c == x then () @@ -30,7 +30,7 @@ def test(c: Cap, d: Cap, e: Cap) = CONS(z, ys) val zsc: LIST[Cap ->{d, y} Unit] = zs val z1 = zs.head - val z1c: Cap^ ->{y, d} Unit = z1 + val z1c: Cap ->{y, d} Unit = z1 val ys1 = zs.tail val y1 = ys1.head diff --git a/tests/pos-custom-args/captures/logger-tracked.scala b/tests/pos-custom-args/captures/logger-tracked.scala new file mode 100644 index 000000000000..053731de444d --- /dev/null +++ b/tests/pos-custom-args/captures/logger-tracked.scala @@ -0,0 +1,67 @@ +import language.experimental.saferExceptions +import language.experimental.modularity + +class FileSystem extends caps.Capability + +class Logger(using tracked val fs: FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: Logger^{fs} = Logger(using fs) + l.log("hello world!") + val xs: LazyList[Int]^{l} = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + +trait LazyList[+A]: + def isEmpty: Boolean + def head: A + def tail: LazyList[A]^{this} + +object LazyNil extends LazyList[Nothing]: + def isEmpty: Boolean = true + def head = ??? + def tail = ??? + +final class LazyCons[+T](val x: T, val xs: () => LazyList[T]^) extends LazyList[T]: + def isEmpty = false + def head = x + def tail: LazyList[T]^{this} = xs() +end LazyCons + +extension [A](x: A) + def #::(xs1: => LazyList[A]^): LazyList[A]^{xs1} = + LazyCons(x, () => xs1) + +extension [A](xs: LazyList[A]^) + def map[B](f: A => B): LazyList[B]^{xs, f} = + if xs.isEmpty then LazyNil + else f(xs.head) #:: xs.tail.map(f) + +object LazyList: + def from(start: Int): LazyList[Int] = + start #:: from(start + 1) + +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y + +def test2(ct: CanThrow[Exception], fs: FileSystem) = + def x: Int ->{ct} String = ??? + def y: Logger^{fs} = ??? + def p = Pair[Int ->{ct} String, Logger^{fs}](x, y) + def p3 = Pair(x, y) + def f = () => p.fst + + +/* + val l1: Int => String = ??? + val l2: Object^{c} = ??? + val pd = () => Pair(l1, l2) + val p2: Pair[Int => String, Object]^{c} = pd() + val hd = () => p2.fst + +*/ \ No newline at end of file diff --git a/tests/pos-custom-args/captures/logger.scala b/tests/pos-custom-args/captures/logger.scala index d95eeaae74cf..c2cfed0462b6 100644 --- a/tests/pos-custom-args/captures/logger.scala +++ b/tests/pos-custom-args/captures/logger.scala @@ -1,7 +1,6 @@ -import annotation.capability import language.experimental.saferExceptions -@capability class FileSystem +class FileSystem extends caps.Capability class Logger(using fs: FileSystem): def log(s: String): Unit = ??? diff --git a/tests/pos-custom-args/captures/nested-classes-tracked.scala b/tests/pos-custom-args/captures/nested-classes-tracked.scala new file mode 100644 index 000000000000..1c81441f321b --- /dev/null +++ b/tests/pos-custom-args/captures/nested-classes-tracked.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking +import language.experimental.modularity +import annotation.{capability, constructorOnly} + +class IO extends caps.Capability +class Blah +class Pkg(using tracked val io: IO): + class Foo: + def m(foo: Blah^{io}) = ??? +class Pkg2(using tracked val io: IO): + class Foo: + def m(foo: Blah^{io}): Any = io; ??? + +def main(using io: IO) = + val pkg = Pkg() + val f = pkg.Foo() + f.m(???) + val pkg2 = Pkg2() + val f2 = pkg2.Foo() + f2.m(???) + + diff --git a/tests/pos-custom-args/captures/nested-classes.scala b/tests/pos-custom-args/captures/nested-classes.scala index b16fc4365183..1c624d37cee1 100644 --- a/tests/pos-custom-args/captures/nested-classes.scala +++ b/tests/pos-custom-args/captures/nested-classes.scala @@ -1,9 +1,9 @@ import language.experimental.captureChecking import annotation.{capability, constructorOnly} -@capability class IO +class IO extends caps.Capability class Blah -class Pkg(using @constructorOnly io: IO): +class Pkg(using io: IO): class Foo: def m(foo: Blah^{io}) = ??? class Pkg2(using io: IO): @@ -13,9 +13,9 @@ class Pkg2(using io: IO): def main(using io: IO) = val pkg = Pkg() val f = pkg.Foo() - f.m(???) + val x1 = f.m(???) val pkg2 = Pkg2() val f2 = pkg2.Foo() - f2.m(???) + val x2 = f2.m(???) diff --git a/tests/pos-custom-args/captures/null-logger.scala b/tests/pos-custom-args/captures/null-logger.scala index 0b32d045778c..d532b5f74b38 100644 --- a/tests/pos-custom-args/captures/null-logger.scala +++ b/tests/pos-custom-args/captures/null-logger.scala @@ -1,7 +1,6 @@ -import annotation.capability import annotation.constructorOnly -@capability class FileSystem +class FileSystem extends caps.Capability class NullLogger(using @constructorOnly fs: FileSystem) diff --git a/tests/pos-custom-args/captures/opaque-cap.scala b/tests/pos-custom-args/captures/opaque-cap.scala new file mode 100644 index 000000000000..dc3d48a2d311 --- /dev/null +++ b/tests/pos-custom-args/captures/opaque-cap.scala @@ -0,0 +1,6 @@ +import language.experimental.captureChecking + +trait A extends caps.Capability + +object O: + opaque type B = A \ No newline at end of file diff --git a/tests/pos-custom-args/captures/opaque-inline-problem.scala b/tests/pos-custom-args/captures/opaque-inline-problem.scala new file mode 100644 index 000000000000..ed482e3fc164 --- /dev/null +++ b/tests/pos-custom-args/captures/opaque-inline-problem.scala @@ -0,0 +1,11 @@ +trait Async extends caps.Capability: + def group: Int + +object Async: + inline def current(using async: Async): async.type = async + opaque type Spawn <: Async = Async + def blocking[T](f: Spawn ?=> T): T = ??? + +def main() = + Async.blocking: + val a = Async.current.group \ No newline at end of file diff --git a/tests/pos-custom-args/captures/pairs.scala b/tests/pos-custom-args/captures/pairs.scala index e15a76970c29..da7f30185ad3 100644 --- a/tests/pos-custom-args/captures/pairs.scala +++ b/tests/pos-custom-args/captures/pairs.scala @@ -1,6 +1,6 @@ //class CC //type Cap = CC^ -@annotation.capability class Cap +class Cap extends caps.Capability object Generic: @@ -13,6 +13,6 @@ object Generic: def g(x: Cap): Unit = if d == x then () val p = Pair(f, g) val x1 = p.fst - val x1c: Cap^ ->{c} Unit = x1 + val x1c: Cap ->{c} Unit = x1 val y1 = p.snd - val y1c: Cap^ ->{d} Unit = y1 + val y1c: Cap ->{d} Unit = y1 diff --git a/tests/pos-custom-args/captures/path-use.scala b/tests/pos-custom-args/captures/path-use.scala new file mode 100644 index 000000000000..5eb2b60fd218 --- /dev/null +++ b/tests/pos-custom-args/captures/path-use.scala @@ -0,0 +1,19 @@ +import language.experimental.namedTuples + +class IO + +class C(val f: IO^): + val procs: List[Proc] = ??? + +type Proc = () => Unit + +def test(io: IO^) = + val c = C(io) + val f = () => println(c.f) + val _: () ->{c.f} Unit = f + + val x = c.procs + val _: List[() ->{c.procs*} Unit] = x + + val g = () => println(c.procs.head) + val _: () ->{c.procs*} Unit = g diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index f17c25712c39..ab0da9b67d18 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -1,3 +1,5 @@ +import caps.unbox + class C def f(xs: List[C^]) = val y = xs @@ -20,7 +22,7 @@ extension [A](x: A) def :: (xs: List[A]): List[A] = ??? object Nil extends List[Nothing] -def runAll(xs: List[Proc]): Unit = +def runAll(@unbox xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do val next: () ->{xs*} Unit = cur.head @@ -45,10 +47,10 @@ def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = def compose2[A, B, C](f: A => B, g: B => C): A => C = z => g(f(z)) -def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) // Does not work if map takes an impure function, see reaches in neg +//def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = +// ps.map((x, y) => compose1(x, y)) // Does not work, see neg-customargs/../reaches2.scala -@annotation.capability class IO +class IO extends caps.Capability def test(io: IO) = val a: () ->{io} Unit = () => () diff --git a/tests/pos-custom-args/captures/try3.scala b/tests/pos-custom-args/captures/try3.scala index b44ea57ccae4..a1a1bab8724a 100644 --- a/tests/pos-custom-args/captures/try3.scala +++ b/tests/pos-custom-args/captures/try3.scala @@ -1,8 +1,7 @@ import language.experimental.erasedDefinitions -import annotation.capability import java.io.IOException -@annotation.capability class CanThrow[-E] +class CanThrow[-E] extends caps.Capability def handle[E <: Exception, T](op: CanThrow[E] ?=> T)(handler: E => T): T = val x: CanThrow[E] = ??? diff --git a/tests/pos-custom-args/captures/unsafe-captures.scala b/tests/pos-custom-args/captures/unsafe-captures.scala new file mode 100644 index 000000000000..5e0144331344 --- /dev/null +++ b/tests/pos-custom-args/captures/unsafe-captures.scala @@ -0,0 +1,8 @@ +import annotation.unchecked.uncheckedCaptures +class LL[+A] private (private var lazyState: (() => LL.State[A]^) @uncheckedCaptures): + private val res = lazyState() // without unchecked captures we get a van't unbox cap error + + +object LL: + + private trait State[+A] diff --git a/tests/pos-custom-args/captures/untracked-captures.scala b/tests/pos-custom-args/captures/untracked-captures.scala new file mode 100644 index 000000000000..7a090a5dd24f --- /dev/null +++ b/tests/pos-custom-args/captures/untracked-captures.scala @@ -0,0 +1,34 @@ +import caps.untrackedCaptures +class LL[+A] private (@untrackedCaptures lazyState: () => LL.State[A]^): + private val res = lazyState() + + +object LL: + + private trait State[+A] + private object State: + object Empty extends State[Nothing] + + private def newLL[A](state: () => State[A]^): LL[A]^{state} = ??? + + private def sCons[A](hd: A, tl: LL[A]^): State[A]^{tl} = ??? + + def filterImpl[A](ll: LL[A]^, p: A => Boolean): LL[A]^{ll, p} = + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef: LL[A]^{ll} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + + val cl = () => + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // Without untracked captures a type ascription would be needed here + // because the compiler tries to keep track of lazyState in refinements + // of LL and gets confused (c.f Setup.addCaptureRefinements) + + while !found do + found = p(elem) + rest = rest + restRef = rest + val res = if found then sCons(elem, filterImpl(rest, p)) else State.Empty + ??? : State[A]^{ll, p} + val nll = newLL(cl) + nll diff --git a/tests/pos-custom-args/captures/vars.scala b/tests/pos-custom-args/captures/vars.scala index a335be96fed1..5c9598fab508 100644 --- a/tests/pos-custom-args/captures/vars.scala +++ b/tests/pos-custom-args/captures/vars.scala @@ -1,4 +1,4 @@ -@annotation.capability class Cap +class Cap extends caps.Capability def test(cap1: Cap, cap2: Cap) = def f(x: String): String = if cap1 == cap1 then "" else "a" diff --git a/tests/pos-deep-subtype/i20516.scala b/tests/pos-deep-subtype/i20516.scala new file mode 100644 index 000000000000..ff755177bda8 --- /dev/null +++ b/tests/pos-deep-subtype/i20516.scala @@ -0,0 +1,205 @@ +object Main { + trait A {} + trait B {} + trait C {} + trait D {} + trait E {} + trait F {} + trait G {} + trait H {} + trait I {} + trait J {} + trait K {} + trait L {} + trait M {} + trait N {} + trait O {} + trait P {} + trait Q {} + trait R {} + trait S {} + trait T {} + trait U {} + trait V {} + trait W {} + trait X {} + trait Y {} + trait Z {} + + type AlphabeticServices = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + + type EnvOutA = B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutB = A & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutC = A & B & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutD = A & B & C & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutE = A & B & C & D & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutF = A & B & C & D & E & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutG = A & B & C & D & E & F & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutH = A & B & C & D & E & F & G & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutI = A & B & C & D & E & F & G & H & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutJ = A & B & C & D & E & F & G & H & I & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutK = A & B & C & D & E & F & G & H & I & J & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutL = A & B & C & D & E & F & G & H & I & J & K & M & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutM = A & B & C & D & E & F & G & H & I & J & K & L & N & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutN = A & B & C & D & E & F & G & H & I & J & K & L & M & O & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutO = A & B & C & D & E & F & G & H & I & J & K & L & M & N & P & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutP = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & Q & R & S & T & U & V & W & X & Y & Z + type EnvOutQ = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & R & S & T & U & V & W & X & Y & Z + type EnvOutR = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & S & T & U & V & W & X & Y & Z + type EnvOutS = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & T & U & V & W & X & Y & Z + type EnvOutT = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & U & V & W & X & Y & Z + type EnvOutU = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & V & W & X & Y & Z + type EnvOutV = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & W & X & Y & Z + type EnvOutW = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & X & Y & Z + type EnvOutX = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & Y & Z + type EnvOutY = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Z + type EnvOutZ = A & B & C & D & E & F & G & H & I & J & K & L & M & N & O & P & Q & R & S & T & U & V & W & X & Y + + trait Reader[-E, A] { + def map[B](f: A => B): Reader[E, B] = ??? + def flatMap[E2 <: E, B](f: A => Reader[E2, B]): Reader[E2, B] = ??? + } + + def e1: Reader[EnvOutA, Unit] = ??? + def e2: Reader[EnvOutB, Unit] = ??? + def e3: Reader[EnvOutC, Unit] = ??? + def e4: Reader[EnvOutD, Unit] = ??? + def e5: Reader[EnvOutE, Unit] = ??? + def e6: Reader[EnvOutF, Unit] = ??? + def e7: Reader[EnvOutG, Unit] = ??? + def e8: Reader[EnvOutH, Unit] = ??? + def e9: Reader[EnvOutI, Unit] = ??? + def e10: Reader[EnvOutJ, Unit] = ??? + def e11: Reader[EnvOutK, Unit] = ??? + def e12: Reader[EnvOutL, Unit] = ??? + def e13: Reader[EnvOutM, Unit] = ??? + def e14: Reader[EnvOutN, Unit] = ??? + def e15: Reader[EnvOutO, Unit] = ??? + def e16: Reader[EnvOutP, Unit] = ??? + def e17: Reader[EnvOutQ, Unit] = ??? + def e18: Reader[EnvOutR, Unit] = ??? + def e19: Reader[EnvOutS, Unit] = ??? + def e20: Reader[EnvOutT, Unit] = ??? + def e21: Reader[EnvOutU, Unit] = ??? + def e22: Reader[EnvOutV, Unit] = ??? + def e23: Reader[EnvOutW, Unit] = ??? + def e24: Reader[EnvOutX, Unit] = ??? + def e25: Reader[EnvOutY, Unit] = ??? + def e26: Reader[EnvOutZ, Unit] = ??? + + def program: Reader[AlphabeticServices, Unit] = for { + //1 + _ <- e1 + _ <- e2 + _ <- e3 + _ <- e4 + _ <- e5 + _ <- e6 + _ <- e7 + _ <- e8 + _ <- e8 + _ <- e9 + _ <- e10 + _ <- e11 + _ <- e12 + _ <- e13 + _ <- e14 + _ <- e15 + _ <- e16 + _ <- e17 + _ <- e18 + _ <- e19 + _ <- e20 + _ <- e21 + _ <- e22 + _ <- e23 + _ <- e24 + _ <- e25 + _ <- e26 + // 2 + _ <- e1 + _ <- e2 + _ <- e3 + _ <- e4 + _ <- e5 + _ <- e6 + _ <- e7 + _ <- e8 + _ <- e8 + _ <- e9 + _ <- e10 + _ <- e11 + _ <- e12 + _ <- e13 + _ <- e14 + _ <- e15 + _ <- e16 + _ <- e17 + _ <- e18 + _ <- e19 + _ <- e20 + _ <- e21 + _ <- e22 + _ <- e23 + _ <- e24 + _ <- e25 + _ <- e26 + // TODO: optimize the subtype checking for large intersection types further + //3 + // _ <- e1 + // _ <- e2 + // _ <- e3 + // _ <- e4 + // _ <- e5 + // _ <- e6 + // _ <- e7 + // _ <- e8 + // _ <- e8 + // _ <- e9 + // _ <- e10 + // _ <- e11 + // _ <- e12 + // _ <- e13 + // _ <- e14 + // _ <- e15 + // _ <- e16 + // _ <- e17 + // _ <- e18 + // _ <- e19 + // _ <- e20 + // _ <- e21 + // _ <- e22 + // _ <- e23 + // _ <- e24 + // _ <- e25 + // _ <- e26 + // 4 + // _ <- e1 + // _ <- e2 + // _ <- e3 + // _ <- e4 + // _ <- e5 + // _ <- e6 + // _ <- e7 + // _ <- e8 + // _ <- e8 + // _ <- e9 + // _ <- e10 + // _ <- e11 + // _ <- e12 + // _ <- e13 + // _ <- e14 + // _ <- e15 + // _ <- e16 + // _ <- e17 + // _ <- e18 + // _ <- e19 + // _ <- e20 + // _ <- e21 + // _ <- e22 + // _ <- e23 + // _ <- e24 + // _ <- e25 + // _ <- e26 + } yield () +} \ No newline at end of file diff --git a/tests/pos-deep-subtype/i21015.scala b/tests/pos-deep-subtype/i21015.scala new file mode 100644 index 000000000000..390462f19df4 --- /dev/null +++ b/tests/pos-deep-subtype/i21015.scala @@ -0,0 +1,36 @@ + +type Init[Coll[_], A, T <: Tuple] = T match + case EmptyTuple => A + case head *: rest => InitCons[Coll, A, head, rest] + +type InitCons[Coll[_], A, H, Rest <: Tuple] = H match + case Int => Init[Coll, Coll[A], Rest] + case _ => Unit + +def fillVector[A, T <: Tuple](dims: T)(x: => A): Init[Vector, A, T] = + dims match + case _: EmptyTuple => x + case (p : (head *: rest)) => + val (head *: rest) = p + head match + case size: Int => fillVector(rest)(Vector.fill(size)(x)) + case _ => () + + +object Minimization: + + type M1[A] = Int match + case 1 => M2[A] + + type M2[A] = Int match + case 2 => M1[Option[A]] + + def m1[A](x: A): M1[A] = ??? + + val _: M1[Int] = m1(1) // was error + val _: M1[Int] = m1[Int](1) // ok + val _: M1[Int] = + val x = m1(1) + x // ok + +end Minimization diff --git a/tests/pos-java-interop-separate/i6868/MyScala_2.scala b/tests/pos-java-interop-separate/i6868/MyScala_2.scala index e0fd84008f39..607eefafa6a3 100644 --- a/tests/pos-java-interop-separate/i6868/MyScala_2.scala +++ b/tests/pos-java-interop-separate/i6868/MyScala_2.scala @@ -1,4 +1,4 @@ -@MyJava_1("MyScala1", typeA = MyJava_1.MyClassTypeA.B) +@MyJava_1(value = "MyScala1", typeA = MyJava_1.MyClassTypeA.B) object MyScala { def a(mj: MyJava_1): Unit = { println("MyJava") diff --git a/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala index 72bcbe8b6515..2fac9b9a7f8d 100644 --- a/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala +++ b/tests/pos-macros/exprSummonWithTypeVar/Macro_1.scala @@ -2,7 +2,7 @@ import scala.compiletime.{erasedValue, summonFrom} import scala.quoted._ -inline given summonAfterTypeMatch[T]: Any = +inline given summonAfterTypeMatch: [T] => Any = ${ summonAfterTypeMatchExpr[T] } private def summonAfterTypeMatchExpr[T: Type](using Quotes): Expr[Any] = @@ -10,4 +10,4 @@ private def summonAfterTypeMatchExpr[T: Type](using Quotes): Expr[Any] = trait Foo[T] -given IntFoo[T <: Int]: Foo[T] = ??? +given IntFoo: [T <: Int] => Foo[T] = ??? diff --git a/tests/pos-macros/i13021/Width.scala b/tests/pos-macros/i13021/Width.scala index a163e1b5ebf1..60c1b47b99d8 100644 --- a/tests/pos-macros/i13021/Width.scala +++ b/tests/pos-macros/i13021/Width.scala @@ -3,7 +3,7 @@ import scala.quoted.* trait Width[T]: type Out <: Int object Width: - transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + transparent inline given [T] => Width[T] = ${ getWidthMacro[T] } def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = '{ new Width[T] { diff --git a/tests/pos-macros/i16963/Macro_1.scala b/tests/pos-macros/i16963/Macro_1.scala new file mode 100644 index 000000000000..317d8947abd3 --- /dev/null +++ b/tests/pos-macros/i16963/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +inline def myMacro = ${ myMacroExpr } + +def myMacroExpr(using Quotes) = + import quotes.reflect.* + + '{ def innerMethod = (_: String) ?=> ???; () }.asTerm match + case block @ Inlined(_, _, Block(List(defdef: DefDef), _)) => + val rhs = + given Quotes = defdef.symbol.asQuotes + '{ (x: String) ?=> ??? }.asTerm + + Block(List(DefDef(defdef.symbol, _ => Some(rhs))), '{}.asTerm).asExprOf[Unit] diff --git a/tests/pos-macros/i16963/Test_2.scala b/tests/pos-macros/i16963/Test_2.scala new file mode 100644 index 000000000000..389f9e3233a2 --- /dev/null +++ b/tests/pos-macros/i16963/Test_2.scala @@ -0,0 +1 @@ +def method: Unit = myMacro diff --git a/tests/pos-macros/i18228.scala b/tests/pos-macros/i18228.scala index f0b8226fc135..2127ea7686f5 100644 --- a/tests/pos-macros/i18228.scala +++ b/tests/pos-macros/i18228.scala @@ -3,7 +3,7 @@ import scala.quoted.* case class QueryMeta[T](map: Map[String, String]) object QueryMeta: - given [T: Type]: FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: + given [T: Type] => FromExpr[QueryMeta[T]] = new FromExpr[QueryMeta[T]]: def unapply(expr: Expr[QueryMeta[T]])(using q: Quotes): Option[QueryMeta[T]] = import q.reflect.* expr match diff --git a/tests/pos-macros/i18517/Caller.scala b/tests/pos-macros/i18517/Caller.scala new file mode 100644 index 000000000000..3f5ce9eee903 --- /dev/null +++ b/tests/pos-macros/i18517/Caller.scala @@ -0,0 +1,17 @@ +package dummy + +trait BG { + val description: { type Structure } + type Structure = description.Structure +} + +abstract class Caller extends BG { + type Foo >: this.type <: this.type + + transparent inline def generate2() = + ${Macro.impl() } + + final val description = { + generate2() + } +} diff --git a/tests/pos-macros/i18517/Macro.scala b/tests/pos-macros/i18517/Macro.scala new file mode 100644 index 000000000000..d18b07e910a5 --- /dev/null +++ b/tests/pos-macros/i18517/Macro.scala @@ -0,0 +1,7 @@ +package dummy + +import scala.quoted.* + +object Macro: + def impl()(using quotes:Quotes) : Expr[Any] = + '{ null } diff --git a/tests/pos-macros/i18517/User.scala b/tests/pos-macros/i18517/User.scala new file mode 100644 index 000000000000..8216c581937b --- /dev/null +++ b/tests/pos-macros/i18517/User.scala @@ -0,0 +1,6 @@ +package dummy + +trait User: + final def bar(cell:Any) : Unit = + (cell: cell.type) match + case c: (Caller & cell.type) => () diff --git a/tests/pos-macros/i19436/Macro_1.scala b/tests/pos-macros/i19436/Macro_1.scala new file mode 100644 index 000000000000..689f64203131 --- /dev/null +++ b/tests/pos-macros/i19436/Macro_1.scala @@ -0,0 +1,18 @@ + +import scala.quoted.* +import scala.compiletime.summonInline + +trait SomeImplicits: + given int: Int + +object Macro: + + transparent inline def testSummon: SomeImplicits => Int = ${ testSummonImpl } + + private def testSummonImpl(using Quotes): Expr[SomeImplicits => Int] = + import quotes.reflect.* + '{ + (x: SomeImplicits) => + import x.given + summonInline[Int] + } \ No newline at end of file diff --git a/tests/pos-macros/i19436/Test_2.scala b/tests/pos-macros/i19436/Test_2.scala new file mode 100644 index 000000000000..aedaf1cb87fb --- /dev/null +++ b/tests/pos-macros/i19436/Test_2.scala @@ -0,0 +1,2 @@ + +def fn: Unit = Macro.testSummon diff --git a/tests/pos-macros/i20309/Macro_1.scala b/tests/pos-macros/i20309/Macro_1.scala new file mode 100644 index 000000000000..e92e623ea775 --- /dev/null +++ b/tests/pos-macros/i20309/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* +import scala.compiletime.* + +trait Context +object Scope: + def spawn[A](f: Context ?=> A): A = ??? + +type Contextual[T] = Context ?=> T + +object Macros { + inline def transformContextLambda[T](inline expr: Context ?=> T): Context => T = + ${ transformContextLambdaImpl[T]('expr) } + + def transformContextLambdaImpl[T: Type]( + cexpr: Expr[Context ?=> T] + )(using Quotes): Expr[Context => T] = { + import quotes.reflect.* + val tree = asTerm(cexpr) + val traverse = new TreeMap() {} + println(tree.show) + traverse.transformTree(tree)(tree.symbol) + '{ _ => ??? } + } +} diff --git a/tests/pos-macros/i20309/Test_2.scala b/tests/pos-macros/i20309/Test_2.scala new file mode 100644 index 000000000000..6b01708d7ae0 --- /dev/null +++ b/tests/pos-macros/i20309/Test_2.scala @@ -0,0 +1,10 @@ + +transparent inline def inScope[T](inline expr: Context ?=> T): T = + val fn = Macros.transformContextLambda[T](expr) + fn(new Context {}) + +@main def Test = { + inScope { + Scope.spawn[Unit] { () } + } +} diff --git a/tests/pos-macros/i20353/Macro_1.scala b/tests/pos-macros/i20353/Macro_1.scala new file mode 100644 index 000000000000..5845a33ab144 --- /dev/null +++ b/tests/pos-macros/i20353/Macro_1.scala @@ -0,0 +1,22 @@ +//> using options -experimental + +import scala.annotation.{experimental, MacroAnnotation} +import scala.quoted.* + +class ImplicitValue + +object ImplicitValue: + inline given ImplicitValue = + ${ makeImplicitValue } + + def makeImplicitValue(using Quotes) = + import quotes.reflect.* + '{ ImplicitValue() } +end ImplicitValue + +@experimental +class Test extends MacroAnnotation: + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]) = + import quotes.reflect.* + Implicits.search(TypeRepr.of[ImplicitValue]) + List(definition) diff --git a/tests/pos-macros/i20353/Test_2.scala b/tests/pos-macros/i20353/Test_2.scala new file mode 100644 index 000000000000..ee1704b691f7 --- /dev/null +++ b/tests/pos-macros/i20353/Test_2.scala @@ -0,0 +1,17 @@ +//> using options -experimental + +class OuterClass: + @Test + class InnerClass + + @Test + object InnerObject +end OuterClass + +object OuterObject: + @Test + class InnerClass + + @Test + object InnerObject +end OuterObject diff --git a/tests/pos-macros/i20458/Macro_1.scala b/tests/pos-macros/i20458/Macro_1.scala new file mode 100644 index 000000000000..803eff68062a --- /dev/null +++ b/tests/pos-macros/i20458/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.quoted._ + +inline def matchCustom[F](): Unit = ${ matchCustomImpl[F] } + +private def matchCustomImpl[F: Type](using q: Quotes): Expr[Unit] = { + import q.reflect.* + val any = TypeRepr.of[Any].typeSymbol + assert(!any.termRef.widenTermRefByName.toString.contains("ClassInfo")) + any.termRef.widenTermRefByName.asType match + case '[t] => () + '{ () } +} diff --git a/tests/pos-macros/i20458/Test_2.scala b/tests/pos-macros/i20458/Test_2.scala new file mode 100644 index 000000000000..1118f4483e23 --- /dev/null +++ b/tests/pos-macros/i20458/Test_2.scala @@ -0,0 +1 @@ +def main() = matchCustom() diff --git a/tests/pos-macros/i20471/Macro_1.scala b/tests/pos-macros/i20471/Macro_1.scala new file mode 100644 index 000000000000..2fd940dbc4e2 --- /dev/null +++ b/tests/pos-macros/i20471/Macro_1.scala @@ -0,0 +1,63 @@ +import scala.annotation.experimental +import scala.quoted.* +import scala.annotation.tailrec + +object FlatMap { + @experimental inline def derived[F[_]]: FlatMap[F] = MacroFlatMap.derive +} +trait FlatMap[F[_]]{ + def tailRecM[A, B](a: A)(f: A => F[Either[A, B]]): F[B] +} + +@experimental +object MacroFlatMap: + + inline def derive[F[_]]: FlatMap[F] = ${ flatMap } + + def flatMap[F[_]: Type](using Quotes): Expr[FlatMap[F]] = '{ + new FlatMap[F]: + def tailRecM[A, B](a: A)(f: A => F[Either[A, B]]): F[B] = + ${ deriveTailRecM('{ a }, '{ f }) } + } + + def deriveTailRecM[F[_]: Type, A: Type, B: Type]( + a: Expr[A], + f: Expr[A => F[Either[A, B]]] + )(using q: Quotes): Expr[F[B]] = + import quotes.reflect.* + + val body: PartialFunction[(Symbol, TypeRepr), Term] = { + case (method, tpe) => { + given q2: Quotes = method.asQuotes + '{ + def step(x: A): B = ??? + ??? + }.asTerm + } + } + + val term = '{ $f($a) }.asTerm + val name = Symbol.freshName("$anon") + val parents = List(TypeTree.of[Object], TypeTree.of[F[B]]) + + extension (sym: Symbol) def overridableMembers: List[Symbol] = + val member1 = sym.methodMember("abstractEffect")(0) + val member2 = sym.methodMember("concreteEffect")(0) + def meth(member: Symbol) = Symbol.newMethod(sym, member.name, This(sym).tpe.memberType(member), Flags.Override, Symbol.noSymbol) + List(meth(member1), meth(member2)) + + val cls = Symbol.newClass(Symbol.spliceOwner, name, parents.map(_.tpe), _.overridableMembers, None) + + def transformDef(method: DefDef)(argss: List[List[Tree]]): Option[Term] = + val sym = method.symbol + Some(body.apply((sym, method.returnTpt.tpe))) + + val members = cls.declarations + .filterNot(_.isClassConstructor) + .map: sym => + sym.tree match + case method: DefDef => DefDef(sym, transformDef(method)) + case _ => report.errorAndAbort(s"Not supported: $sym in ${sym.owner}") + + val newCls = New(TypeIdent(cls)).select(cls.primaryConstructor).appliedToNone + Block(ClassDef(cls, parents, members) :: Nil, newCls).asExprOf[F[B]] diff --git a/tests/pos-macros/i20471/Main_2.scala b/tests/pos-macros/i20471/Main_2.scala new file mode 100644 index 000000000000..bdd1cd32ea26 --- /dev/null +++ b/tests/pos-macros/i20471/Main_2.scala @@ -0,0 +1,7 @@ +import scala.annotation.experimental + +@experimental +object autoFlatMapTests: + trait TestAlgebra[T] derives FlatMap: + def abstractEffect(a: String): T + def concreteEffect(a: String): T = abstractEffect(a + " concreteEffect") diff --git a/tests/pos-macros/i20574/Exports.scala b/tests/pos-macros/i20574/Exports.scala new file mode 100644 index 000000000000..328d832fad88 --- /dev/null +++ b/tests/pos-macros/i20574/Exports.scala @@ -0,0 +1,3 @@ +object Exports{ + export OverloadedInline.* +} diff --git a/tests/pos-macros/i20574/Macros.scala b/tests/pos-macros/i20574/Macros.scala new file mode 100644 index 000000000000..a40c1f361ce1 --- /dev/null +++ b/tests/pos-macros/i20574/Macros.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros{ + + inline def A() : String = { + ${ A_impl } + } + + def A_impl(using Quotes): Expr[String] = { + Expr("Whatever") + } + + inline def B[T]: Int = { + ${ B_Impl[T] } + } + + def B_Impl[T](using Quotes): Expr[Int] = { + Expr(0) + } +} diff --git a/tests/pos-macros/i20574/OverloadedInline.scala b/tests/pos-macros/i20574/OverloadedInline.scala new file mode 100644 index 000000000000..5bf2347c45c0 --- /dev/null +++ b/tests/pos-macros/i20574/OverloadedInline.scala @@ -0,0 +1,13 @@ +import Macros.* + +object OverloadedInline{ + + A() + inline def overloaded_inline[T]: Unit = { + overloaded_inline[T](0) + } + + inline def overloaded_inline[T](dummy: Int): Unit = { + val crash = B[T] + } +} diff --git a/tests/pos-macros/i20574/Test.scala b/tests/pos-macros/i20574/Test.scala new file mode 100644 index 000000000000..abc2b4eb0bc9 --- /dev/null +++ b/tests/pos-macros/i20574/Test.scala @@ -0,0 +1,5 @@ +import Exports.* + +object Test { + overloaded_inline[Unit] +} diff --git a/tests/pos-macros/i21271/Macro.scala b/tests/pos-macros/i21271/Macro.scala new file mode 100644 index 000000000000..09d29ecc65af --- /dev/null +++ b/tests/pos-macros/i21271/Macro.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait Schema +object Schema: + lazy val sampleDate: String = "" // lazy val requried to reproduce + + inline def derived: Schema = + annotations + new Schema {} + +inline def annotations: Int = ${ annotationsImpl } +def annotationsImpl(using Quotes): Expr[Int] = Expr(1) diff --git a/tests/pos-macros/i21271/Test.scala b/tests/pos-macros/i21271/Test.scala new file mode 100644 index 000000000000..c0ba38212b09 --- /dev/null +++ b/tests/pos-macros/i21271/Test.scala @@ -0,0 +1 @@ +val inputValueSchema = Schema.derived diff --git a/tests/pos-macros/i21672/Macro_1.scala b/tests/pos-macros/i21672/Macro_1.scala new file mode 100644 index 000000000000..2e17631d6cf4 --- /dev/null +++ b/tests/pos-macros/i21672/Macro_1.scala @@ -0,0 +1,10 @@ +object Repro { + inline def apply(): Unit = ${ applyImpl } + + import scala.quoted.* + def applyImpl(using q: Quotes): Expr[Unit] = { + import q.reflect.* + report.info(TypeRepr.of[Some[String]].typeSymbol.pos.toString) + '{ () } + } +} diff --git a/tests/pos-macros/i21672/Test_2.scala b/tests/pos-macros/i21672/Test_2.scala new file mode 100644 index 000000000000..b164962100af --- /dev/null +++ b/tests/pos-macros/i21672/Test_2.scala @@ -0,0 +1,3 @@ +//> using options -Xfatal-warnings +object Test: + Repro() diff --git a/tests/pos-macros/i21802/Macro.scala b/tests/pos-macros/i21802/Macro.scala new file mode 100644 index 000000000000..e2eb1287c727 --- /dev/null +++ b/tests/pos-macros/i21802/Macro.scala @@ -0,0 +1,15 @@ +class MetricsGroup[A] +object MetricsGroup: + import scala.quoted.* + + transparent inline final def refine[A]: MetricsGroup[A] = + ${ refineImpl[A] } + + private def refineImpl[A](using qctx: Quotes, tpe: Type[A]): Expr[MetricsGroup[A]] = + import qctx.reflect.* + + val mt = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[A]) + val tpe = Refinement(TypeRepr.of[MetricsGroup[A]], "apply", mt).asType + tpe match + case '[tpe] => + '{ MetricsGroup[A]().asInstanceOf[MetricsGroup[A] & tpe] } diff --git a/tests/pos-macros/i21802/Test.scala b/tests/pos-macros/i21802/Test.scala new file mode 100644 index 000000000000..70063653c43c --- /dev/null +++ b/tests/pos-macros/i21802/Test.scala @@ -0,0 +1,13 @@ +//> using options -experimental -Ydebug + +class ProbeFailedException(cause: Exception) extends Exception(cause) +trait Probing: + self: Metrics => + val probeFailureCounter: MetricsGroup[Counter] = + counters("ustats_probe_failures_count").labelled + + +trait Counter +class Metrics: + class counters(name: String): + transparent inline final def labelled: MetricsGroup[Counter] = MetricsGroup.refine[Counter] diff --git a/tests/pos-macros/macro-docs.scala b/tests/pos-macros/macro-docs.scala index f3cd6e3ef00a..820aae451486 100644 --- a/tests/pos-macros/macro-docs.scala +++ b/tests/pos-macros/macro-docs.scala @@ -2,12 +2,12 @@ import scala.quoted.* object MacrosMD_ToExpr { - given ToExpr[Boolean] with { + given ToExpr[Boolean] { def apply(b: Boolean)(using Quotes) = if (b) '{ true } else '{ false } } - given ToExpr[Int] with { + given ToExpr[Int] { def apply(n: Int)(using Quotes) = n match { case Int.MinValue => '{ Int.MinValue } case _ if n < 0 => '{ - ${ apply(-n) } } @@ -17,7 +17,7 @@ object MacrosMD_ToExpr { } } - given [T: ToExpr : Type]: ToExpr[List[T]] with { + given [T: ToExpr : Type] => ToExpr[List[T]] { def apply(xs: List[T])(using Quotes) = xs match { case head :: tail => '{ ${ Expr(head) } :: ${ apply(tail) } } case Nil => '{ Nil: List[T] } diff --git a/tests/pos-macros/nil-liftable.scala b/tests/pos-macros/nil-liftable.scala index a3277510d7bf..c95fc7698e5a 100644 --- a/tests/pos-macros/nil-liftable.scala +++ b/tests/pos-macros/nil-liftable.scala @@ -1,7 +1,7 @@ import scala.quoted.* class Test: - given NilToExpr: ToExpr[Nil.type] with { + given NilToExpr: ToExpr[Nil.type] { def apply(xs: Nil.type)(using Quotes): Expr[Nil.type] = '{ Nil } } diff --git a/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala new file mode 100644 index 000000000000..97b7d7566e9a --- /dev/null +++ b/tests/pos-macros/quote-sym-newboundedtype/Macro_1.scala @@ -0,0 +1,49 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +transparent inline def transparentTestMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeBasicType(owner: Symbol): Symbol = + Symbol.newBoundedType(owner, "tpe", Flags.EmptyFlags, TypeBounds.lower(TypeRepr.of[String]), Symbol.noSymbol) + + def makeTypesForClass(owner: Symbol): List[Symbol] = + val typeLambda = TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]) + List( + makeBasicType(owner), + // type Bla >: Nothing <: [X] =>> Int + Symbol.newBoundedType( + owner, + "tpe1", + Flags.EmptyFlags, + TypeBounds.upper(typeLambda), + Symbol.noSymbol + ), + // type Bar >: [X] =>> Int <: [X] =>> Int + Symbol.newBoundedType( + owner, + "tpe2", + Flags.EmptyFlags, + TypeBounds(typeLambda, typeLambda), + Symbol.noSymbol + ) + ) + + val typeDef = TypeDef(makeBasicType(Symbol.spliceOwner)) + // Expr printer does not work here, see comment: + // https://github.com/scala/scala3/pull/20347#issuecomment-2096824617 + println(typeDef.toString) + assert(typeDef.toString == "TypeDef(tpe,TypeTree[TypeBounds(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class java)),object lang),String),TypeRef(ThisType(TypeRef(NoPrefix,module class scala)),class Any))])") + + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => makeTypesForClass(sym), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List( + TypeDef(clsSymbol.typeMember("tpe")), + TypeDef(clsSymbol.typeMember("tpe1")), + TypeDef(clsSymbol.typeMember("tpe2")), + )) + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} diff --git a/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala b/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala new file mode 100644 index 000000000000..2d479a09695a --- /dev/null +++ b/tests/pos-macros/quote-sym-newboundedtype/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental -Yno-experimental +def test = + testMacro + transparentTestMacro diff --git a/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala new file mode 100644 index 000000000000..60f0587b85a7 --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype-in-trait/Macro_1.scala @@ -0,0 +1,35 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +transparent inline def transparentTestMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Object] = { + import quotes.reflect.* + + def makeBasicType(owner: Symbol): Symbol = + Symbol.newTypeAlias(owner, "tpe", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + + def makeTypesForClass(owner: Symbol): List[Symbol] = + val typeLambda = TypeLambda.apply(List("X"), _ => List(TypeBounds.empty), _ => TypeRepr.of[Int]) + List( + makeBasicType(owner), + // type Foo = [X] =>> Int + Symbol.newTypeAlias( + owner, + "tpe1", + Flags.EmptyFlags, + typeLambda, + Symbol.noSymbol + ), + ) + + val clsSymbol = Symbol.newClass(Symbol.spliceOwner, "CLS", List(TypeRepr.of[Object]), sym => makeTypesForClass(sym), None) + val classDef: ClassDef = ClassDef(clsSymbol, List(TypeTree.of[Object]), List( + TypeDef(clsSymbol.typeMember("tpe")), + TypeDef(clsSymbol.typeMember("tpe1")), + )) + + Block(List(classDef), Apply(Select(New(TypeIdent(clsSymbol)), clsSymbol.primaryConstructor), List.empty)).asExprOf[Object] +} diff --git a/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala b/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala new file mode 100644 index 000000000000..2d479a09695a --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype-in-trait/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental -Yno-experimental +def test = + testMacro + transparentTestMacro diff --git a/tests/pos-macros/quote-sym-newtype/Macro_1.scala b/tests/pos-macros/quote-sym-newtype/Macro_1.scala new file mode 100644 index 000000000000..9973ba1e047e --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype/Macro_1.scala @@ -0,0 +1,13 @@ +//> using options -experimental -Yno-experimental +import scala.quoted.* + +inline def testMacro = ${ testImpl } + +def testImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + val sym = Symbol.newTypeAlias(Symbol.spliceOwner, "mytype", Flags.EmptyFlags, TypeRepr.of[String], Symbol.noSymbol) + val typeDef = TypeDef(sym) + assert(typeDef.show == "type mytype = java.lang.String") + + Block(List(typeDef), '{()}.asTerm).asExprOf[Unit] +} diff --git a/tests/pos-macros/quote-sym-newtype/Test_2.scala b/tests/pos-macros/quote-sym-newtype/Test_2.scala new file mode 100644 index 000000000000..5a272acbdda4 --- /dev/null +++ b/tests/pos-macros/quote-sym-newtype/Test_2.scala @@ -0,0 +1,2 @@ +//> using options -experimental -Yno-experimental +def test = testMacro diff --git a/tests/pos-macros/quoted-patten-with-type-params.scala b/tests/pos-macros/quoted-patten-with-type-params.scala new file mode 100644 index 000000000000..030e3415476e --- /dev/null +++ b/tests/pos-macros/quoted-patten-with-type-params.scala @@ -0,0 +1,14 @@ +import scala.quoted.* +import scala.language.experimental.quotedPatternsWithPolymorphicFunctions + +def test(body: Expr[Any])(using Quotes): Expr[String] = + body match + case '{ [A] => (x : A, y : A) => (x, y) } => ??? + // Bounded type parameters are allowed when they are not used in + // higher-order patterns + case '{ [A <: Iterable[Int]] => (x : A) => x } => ??? + case '{ [A] => (x : A, y : A) => $b[A](x, y) : A } => + '{ $b[String]("truthy", "falsy") } + case '{ [A, B] => (x : A, f : A => B) => $b[A, B](x, f) : B} => + '{ $b[Int, String](10, (x:Int)=>x.toHexString) } + case _ => Expr("not matched") diff --git a/tests/pos-macros/skolem/Macro_1.scala b/tests/pos-macros/skolem/Macro_1.scala new file mode 100644 index 000000000000..65b14cffbc5b --- /dev/null +++ b/tests/pos-macros/skolem/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +object Macro { + + def impl(expr: Expr[Any])(using Quotes): Expr[Unit] = + println(expr.show) + '{ () } + + inline def macr(inline x: Any): Unit = ${impl('x)} +} diff --git a/tests/pos-macros/skolem/Test_2.scala b/tests/pos-macros/skolem/Test_2.scala new file mode 100644 index 000000000000..e243b8844c23 --- /dev/null +++ b/tests/pos-macros/skolem/Test_2.scala @@ -0,0 +1,9 @@ +trait Foo: + val x: Int + def ho(p: x.type => x.type): Unit = () + +object Test { + var f: Foo = ??? + Macro.macr: + f.ho(arg => arg) +} diff --git a/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala b/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala deleted file mode 100644 index 6b5bfbc3e00e..000000000000 --- a/tests/pos-with-compiler-cc/backend/ScalaPrimitivesOps.scala +++ /dev/null @@ -1,232 +0,0 @@ -package dotty.tools -package backend - -object ScalaPrimitivesOps extends ScalaPrimitivesOps - -class ScalaPrimitivesOps { - // Arithmetic unary operations - inline val POS = 1 // +x - inline val NEG = 2 // -x - inline val NOT = 3 // ~x - - // Arithmetic binary operations - inline val ADD = 10 // x + y - inline val SUB = 11 // x - y - inline val MUL = 12 // x * y - inline val DIV = 13 // x / y - inline val MOD = 14 // x % y - - // Bitwise operations - inline val OR = 20 // x | y - inline val XOR = 21 // x ^ y - inline val AND = 22 // x & y - - // Shift operations - inline val LSL = 30 // x << y - inline val LSR = 31 // x >>> y - inline val ASR = 32 // x >> y - - // Comparison operations - inline val ID = 40 // x eq y - inline val NI = 41 // x ne y - inline val EQ = 42 // x == y - inline val NE = 43 // x != y - inline val LT = 44 // x < y - inline val LE = 45 // x <= y - inline val GT = 46 // x > y - inline val GE = 47 // x >= y - - // Boolean unary operations - inline val ZNOT = 50 // !x - - // Boolean binary operations - inline val ZOR = 60 // x || y - inline val ZAND = 61 // x && y - - // Array operations - inline val LENGTH = 70 // x.length - inline val APPLY = 71 // x(y) - inline val UPDATE = 72 // x(y) = z - - // Any operations - inline val IS = 80 // x.is[y] - inline val AS = 81 // x.as[y] - inline val HASH = 87 // x.## - - // AnyRef operations - inline val SYNCHRONIZED = 90 // x.synchronized(y) - - // String operations - inline val CONCAT = 100 // String.valueOf(x)+String.valueOf(y) - - // coercions - inline val COERCE = 101 - - // RunTime operations - inline val BOX = 110 // RunTime.box_(x) - inline val UNBOX = 111 // RunTime.unbox_(x) - inline val NEW_ZARRAY = 112 // RunTime.zarray(x) - inline val NEW_BARRAY = 113 // RunTime.barray(x) - inline val NEW_SARRAY = 114 // RunTime.sarray(x) - inline val NEW_CARRAY = 115 // RunTime.carray(x) - inline val NEW_IARRAY = 116 // RunTime.iarray(x) - inline val NEW_LARRAY = 117 // RunTime.larray(x) - inline val NEW_FARRAY = 118 // RunTime.farray(x) - inline val NEW_DARRAY = 119 // RunTime.darray(x) - inline val NEW_OARRAY = 120 // RunTime.oarray(x) - - inline val ZARRAY_LENGTH = 131 // RunTime.zarray_length(x) - inline val BARRAY_LENGTH = 132 // RunTime.barray_length(x) - inline val SARRAY_LENGTH = 133 // RunTime.sarray_length(x) - inline val CARRAY_LENGTH = 134 // RunTime.carray_length(x) - inline val IARRAY_LENGTH = 135 // RunTime.iarray_length(x) - inline val LARRAY_LENGTH = 136 // RunTime.larray_length(x) - inline val FARRAY_LENGTH = 137 // RunTime.farray_length(x) - inline val DARRAY_LENGTH = 138 // RunTime.darray_length(x) - inline val OARRAY_LENGTH = 139 // RunTime.oarray_length(x) - - inline val ZARRAY_GET = 140 // RunTime.zarray_get(x,y) - inline val BARRAY_GET = 141 // RunTime.barray_get(x,y) - inline val SARRAY_GET = 142 // RunTime.sarray_get(x,y) - inline val CARRAY_GET = 143 // RunTime.carray_get(x,y) - inline val IARRAY_GET = 144 // RunTime.iarray_get(x,y) - inline val LARRAY_GET = 145 // RunTime.larray_get(x,y) - inline val FARRAY_GET = 146 // RunTime.farray_get(x,y) - inline val DARRAY_GET = 147 // RunTime.darray_get(x,y) - inline val OARRAY_GET = 148 // RunTime.oarray_get(x,y) - - inline val ZARRAY_SET = 150 // RunTime.zarray(x,y,z) - inline val BARRAY_SET = 151 // RunTime.barray(x,y,z) - inline val SARRAY_SET = 152 // RunTime.sarray(x,y,z) - inline val CARRAY_SET = 153 // RunTime.carray(x,y,z) - inline val IARRAY_SET = 154 // RunTime.iarray(x,y,z) - inline val LARRAY_SET = 155 // RunTime.larray(x,y,z) - inline val FARRAY_SET = 156 // RunTime.farray(x,y,z) - inline val DARRAY_SET = 157 // RunTime.darray(x,y,z) - inline val OARRAY_SET = 158 // RunTime.oarray(x,y,z) - - inline val B2B = 200 // RunTime.b2b(x) - inline val B2S = 201 // RunTime.b2s(x) - inline val B2C = 202 // RunTime.b2c(x) - inline val B2I = 203 // RunTime.b2i(x) - inline val B2L = 204 // RunTime.b2l(x) - inline val B2F = 205 // RunTime.b2f(x) - inline val B2D = 206 // RunTime.b2d(x) - - inline val S2B = 210 // RunTime.s2b(x) - inline val S2S = 211 // RunTime.s2s(x) - inline val S2C = 212 // RunTime.s2c(x) - inline val S2I = 213 // RunTime.s2i(x) - inline val S2L = 214 // RunTime.s2l(x) - inline val S2F = 215 // RunTime.s2f(x) - inline val S2D = 216 // RunTime.s2d(x) - - inline val C2B = 220 // RunTime.c2b(x) - inline val C2S = 221 // RunTime.c2s(x) - inline val C2C = 222 // RunTime.c2c(x) - inline val C2I = 223 // RunTime.c2i(x) - inline val C2L = 224 // RunTime.c2l(x) - inline val C2F = 225 // RunTime.c2f(x) - inline val C2D = 226 // RunTime.c2d(x) - - inline val I2B = 230 // RunTime.i2b(x) - inline val I2S = 231 // RunTime.i2s(x) - inline val I2C = 232 // RunTime.i2c(x) - inline val I2I = 233 // RunTime.i2i(x) - inline val I2L = 234 // RunTime.i2l(x) - inline val I2F = 235 // RunTime.i2f(x) - inline val I2D = 236 // RunTime.i2d(x) - - inline val L2B = 240 // RunTime.l2b(x) - inline val L2S = 241 // RunTime.l2s(x) - inline val L2C = 242 // RunTime.l2c(x) - inline val L2I = 243 // RunTime.l2i(x) - inline val L2L = 244 // RunTime.l2l(x) - inline val L2F = 245 // RunTime.l2f(x) - inline val L2D = 246 // RunTime.l2d(x) - - inline val F2B = 250 // RunTime.f2b(x) - inline val F2S = 251 // RunTime.f2s(x) - inline val F2C = 252 // RunTime.f2c(x) - inline val F2I = 253 // RunTime.f2i(x) - inline val F2L = 254 // RunTime.f2l(x) - inline val F2F = 255 // RunTime.f2f(x) - inline val F2D = 256 // RunTime.f2d(x) - - inline val D2B = 260 // RunTime.d2b(x) - inline val D2S = 261 // RunTime.d2s(x) - inline val D2C = 262 // RunTime.d2c(x) - inline val D2I = 263 // RunTime.d2i(x) - inline val D2L = 264 // RunTime.d2l(x) - inline val D2F = 265 // RunTime.d2f(x) - inline val D2D = 266 // RunTime.d2d(x) - - /** Check whether the given operation code is an array operation. */ - def isArrayOp(code: Int): Boolean = - isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code) - - def isArrayNew(code: Int): Boolean = code match { - case NEW_ZARRAY | NEW_BARRAY | NEW_SARRAY | NEW_CARRAY | - NEW_IARRAY | NEW_LARRAY | NEW_FARRAY | NEW_DARRAY | - NEW_OARRAY => true - case _ => false - } - - def isArrayLength(code: Int): Boolean = code match { - case ZARRAY_LENGTH | BARRAY_LENGTH | SARRAY_LENGTH | CARRAY_LENGTH | - IARRAY_LENGTH | LARRAY_LENGTH | FARRAY_LENGTH | DARRAY_LENGTH | - OARRAY_LENGTH | LENGTH => true - case _ => false - } - - def isArrayGet(code: Int): Boolean = code match { - case ZARRAY_GET | BARRAY_GET | SARRAY_GET | CARRAY_GET | - IARRAY_GET | LARRAY_GET | FARRAY_GET | DARRAY_GET | - OARRAY_GET | APPLY => true - case _ => false - } - - def isArraySet(code: Int): Boolean = code match { - case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET | - IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET | - OARRAY_SET | UPDATE => true - case _ => false - } - - /** Check whether the given code is a comparison operator */ - def isComparisonOp(code: Int): Boolean = code match { - case ID | NI | EQ | NE | - LT | LE | GT | GE => true - - case _ => false - } - def isUniversalEqualityOp(code: Int): Boolean = (code == EQ) || (code == NE) - def isReferenceEqualityOp(code: Int): Boolean = (code == ID) || (code == NI) - - def isArithmeticOp(code: Int): Boolean = code match { - case POS | NEG | NOT => true; // unary - case ADD | SUB | MUL | - DIV | MOD => true; // binary - case OR | XOR | AND | - LSL | LSR | ASR => true; // bitwise - case _ => false - } - - def isLogicalOp(code: Int): Boolean = code match { - case ZNOT | ZAND | ZOR => true - case _ => false - } - - def isShiftOp(code: Int): Boolean = code match { - case LSL | LSR | ASR => true - case _ => false - } - - def isBitwiseOp(code: Int): Boolean = code match { - case OR | XOR | AND => true - case _ => false - } - - def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D) - -} diff --git a/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala b/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala deleted file mode 100644 index b3d98d425b2a..000000000000 --- a/tests/pos-with-compiler-cc/backend/WorklistAlgorithm.scala +++ /dev/null @@ -1,57 +0,0 @@ -package dotty.tools -package backend - -/** - * Simple implementation of a worklist algorithm. A processing - * function is applied repeatedly to the first element in the - * worklist, as long as the stack is not empty. - * - * The client class should mix-in this class and initialize the worklist - * field and define the `processElement` method. Then call the `run` method - * providing a function that initializes the worklist. - * - * @author Martin Odersky - * @version 1.0 - * @see [[scala.tools.nsc.backend.icode.Linearizers]] - */ -trait WorklistAlgorithm { - type Elem - class WList { - private var list: List[Elem] = Nil - def isEmpty = list.isEmpty - def nonEmpty = !isEmpty - def push(e: Elem): Unit = { list = e :: list } - def pop(): Elem = { - val head = list.head - list = list.tail - head - } - def pushAll(xs: Iterable[Elem]): Unit = xs.foreach(push) - def clear(): Unit = list = Nil - - } - - val worklist: WList - - /** - * Run the iterative algorithm until the worklist remains empty. - * The initializer is run once before the loop starts and should - * initialize the worklist. - */ - def run(initWorklist: => Unit) = { - initWorklist - - while (worklist.nonEmpty) - processElement(dequeue) - } - - /** - * Process the current element from the worklist. - */ - def processElement(e: Elem): Unit - - /** - * Remove and return the first element to be processed from the worklist. - */ - def dequeue: Elem -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala b/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala deleted file mode 100644 index e6393ce82054..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/AsmUtils.scala +++ /dev/null @@ -1,65 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm.tree.{AbstractInsnNode} -import java.io.PrintWriter -import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} -import scala.tools.asm.ClassReader - -object AsmUtils { - - /** - * Print the bytecode of methods generated by GenBCode to the standard output. Only methods - * whose name contains `traceMethodPattern` are traced. - */ - final val traceMethodEnabled = sys.env.contains("printBCODE") - final val traceMethodPattern = sys.env.getOrElse("printBCODE", "") - - /** - * Print the bytecode of classes generated by GenBCode to the standard output. - */ - inline val traceClassEnabled = false - inline val traceClassPattern = "" - - /** - * Print the bytedcode of classes as they are serialized by the ASM library. The serialization - * performed by `asm.ClassWriter` can change the code generated by GenBCode. For example, it - * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead - * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780). - */ - inline val traceSerializedClassEnabled = false - inline val traceSerializedClassPattern = "" - - def traceMethod(mnode: MethodNode1): Unit = { - println(s"Bytecode for method ${mnode.name}") - val p = new Textifier - val tracer = new TraceMethodVisitor(p) - mnode.accept(tracer) - val w = new PrintWriter(System.out) - p.print(w) - w.flush() - } - - def traceClass(cnode: ClassNode1): Unit = { - println(s"Bytecode for class ${cnode.name}") - val w = new PrintWriter(System.out) - cnode.accept(new TraceClassVisitor(w)) - w.flush() - } - - def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes)) - - def readClass(bytes: Array[Byte]): ClassNode1 = { - val node = new ClassNode1() - new ClassReader(bytes).accept(node, 0) - node - } - - def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match { - case -1 => instruction.toString - case op => scala.tools.asm.util.Printer.OPCODES(op) - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala deleted file mode 100644 index d95638be2695..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeAsmCommon.scala +++ /dev/null @@ -1,158 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.report - -/** - * This trait contains code shared between GenBCode and GenASM that depends on types defined in - * the compiler cake (Global). - */ -final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { - import interface.given - import DottyBackendInterface.symExtensions - - /** - * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a - * member class. This method is used to decide if we should emit an EnclosingMethod attribute. - * It is also used to decide whether the "owner" field in the InnerClass attribute should be - * null. - */ - def isAnonymousOrLocalClass(classSym: Symbol): Boolean = { - assert(classSym.isClass, s"not a class: $classSym") - // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are - // always top-level. However, SI-8900 shows an example where the weak name-based implementation - // of isDelambdafyFunction failed (for a function declared in a package named "lambda"). - classSym.isAnonymousClass || { - val originalOwner = classSym.originalOwner - originalOwner != NoSymbol && !originalOwner.isClass - } - } - - /** - * Returns the enclosing method for non-member classes. In the following example - * - * class A { - * def f = { - * class B { - * class C - * } - * } - * } - * - * the method returns Some(f) for B, but None for C, because C is a member class. For non-member - * classes that are not enclosed by a method, it returns None: - * - * class A { - * { class B } - * } - * - * In this case, for B, we return None. - * - * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes). - * This is a source-level property, so we need to use the originalOwner chain to reconstruct it. - */ - private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = { - assert(classSym.isClass, classSym) - def enclosingMethod(sym: Symbol): Option[Symbol] = { - if (sym.isClass || sym == NoSymbol) None - else if (sym.is(Method)) Some(sym) - else enclosingMethod(sym.originalOwner) - } - enclosingMethod(classSym.originalOwner) - } - - /** - * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level - * property, this method looks at the originalOwner chain. See doc in BTypes. - */ - private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = { - assert(classSym.isClass, classSym) - def enclosingClass(sym: Symbol): Symbol = { - if (sym.isClass) sym - else enclosingClass(sym.originalOwner.originalLexicallyEnclosingClass) - } - enclosingClass(classSym.originalOwner.originalLexicallyEnclosingClass) - } - - /*final*/ case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) - - /** - * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not - * an anonymous or local class). See doc in BTypes. - * - * The class is parametrized by two functions to obtain a bytecode class descriptor for a class - * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend - * on the implementation of GenASM / GenBCode, so they need to be passed in. - */ - def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = { - if (isAnonymousOrLocalClass(classSym)) { - val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) - report.debuglog(s"enclosing method for $classSym is $methodOpt (in ${methodOpt.map(_.enclosingClass)})") - Some(EnclosingMethodEntry( - classDesc(enclosingClassForEnclosingMethodAttribute(classSym)), - methodOpt.map(_.javaSimpleName).orNull, - methodOpt.map(methodDesc).orNull)) - } else { - None - } - } -} - -object BCodeAsmCommon{ - def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { - val ca = new Array[Char](bytes.length) - var idx = 0 - while(idx < bytes.length) { - val b: Byte = bytes(idx) - assert((b & ~0x7f) == 0) - ca(idx) = b.asInstanceOf[Char] - idx += 1 - } - - ca - } - - final def arrEncode(bSeven: Array[Byte]): Array[String] = { - var strs: List[String] = Nil - // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) - var prevOffset = 0 - var offset = 0 - var encLength = 0 - while(offset < bSeven.length) { - val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) - val newEncLength = encLength.toLong + deltaEncLength - if(newEncLength >= 65535) { - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - encLength = 0 - prevOffset = offset - } else { - encLength += deltaEncLength - offset += 1 - } - } - if(prevOffset < offset) { - assert(offset == bSeven.length) - val ba = bSeven.slice(prevOffset, offset) - strs ::= new java.lang.String(ubytesToCharArray(ba)) - } - assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? - strs.reverse.toArray - } - - - def strEncode(bSeven: Array[Byte]): String = { - val ca = ubytesToCharArray(bSeven) - new java.lang.String(ca) - // debug val bvA = new asm.ByteVector; bvA.putUTF8(s) - // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes) - // debug assert(enc(idx) == bvA.getByte(idx + 2)) - // debug assert(bvA.getLength == enc.size + 2) - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala deleted file mode 100644 index 6f067a0e5ef0..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeBodyBuilder.scala +++ /dev/null @@ -1,1776 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.switch -import scala.collection.mutable.SortedMap - -import scala.tools.asm -import scala.tools.asm.{Handle, Opcodes} -import BCodeHelpers.InvokeStyle - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _} -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.StdNames.{nme, str} -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.transform.Erasure -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeBodyBuilder extends BCodeSkelBuilder { - // import global._ - // import definitions._ - import tpd._ - import int.{_, given} - import DottyBackendInterface.symExtensions - import bTypes._ - import coreBTypes._ - - protected val primitives: DottyPrimitives - - /* - * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. - */ - abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { - - import Primitives.TestOp - - /* ---------------- helper utils for generating methods and code ---------------- */ - - def emit(opc: Int): Unit = { mnode.visitInsn(opc) } - - def emitZeroOf(tk: BType): Unit = { - tk match { - case BOOL => bc.boolconst(false) - case BYTE | - SHORT | - CHAR | - INT => bc.iconst(0) - case LONG => bc.lconst(0) - case FLOAT => bc.fconst(0) - case DOUBLE => bc.dconst(0) - case UNIT => () - case _ => emit(asm.Opcodes.ACONST_NULL) - } - } - - /* - * Emits code that adds nothing to the operand stack. - * Two main cases: `tree` is an assignment, - * otherwise an `adapt()` to UNIT is performed if needed. - */ - def genStat(tree: Tree): Unit = { - lineNumber(tree) - - tree match { - case Assign(lhs @ DesugaredSelect(qual, _), rhs) => - val isStatic = lhs.symbol.isStaticMember - if (!isStatic) { genLoadQualifier(lhs) } - genLoad(rhs, symInfoTK(lhs.symbol)) - lineNumber(tree) - // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError - val receiverClass = qual.tpe.typeSymbol - fieldStore(lhs.symbol, receiverClass) - - case Assign(lhs, rhs) => - val s = lhs.symbol - val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - - rhs match { - case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) - if larg.symbol == s && tk.isIntSizedType && x.isShortRange => - lineNumber(tree) - bc.iinc(idx, x.intValue) - - case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) - if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => - lineNumber(tree) - bc.iinc(idx, -x.intValue) - - case _ => - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) - } - - case _ => - genLoad(tree, UNIT) - } - } - - /* Generate code for primitive arithmetic operations. */ - def genArithmeticOp(tree: Tree, code: Int): BType = tree match{ - case Apply(fun @ DesugaredSelect(larg, _), args) => - var resKind = tpeTK(larg) - - assert(resKind.isNumericType || (resKind == BOOL), - s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") - - import ScalaPrimitivesOps._ - - args match { - // unary operation - case Nil => - genLoad(larg, resKind) - code match { - case POS => () // nothing - case NEG => bc.neg(resKind) - case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind) - case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code") - } - - // binary operation - case rarg :: Nil => - val isShift = isShiftOp(code) - resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg)) - - if (isShift || isBitwiseOp(code)) { - assert(resKind.isIntegralType || (resKind == BOOL), - s"$resKind incompatible with arithmetic modulo operation.") - } - - genLoad(larg, resKind) - genLoad(rarg, if (isShift) INT else resKind) - - (code: @switch) match { - case ADD => bc add resKind - case SUB => bc sub resKind - case MUL => bc mul resKind - case DIV => bc div resKind - case MOD => bc rem resKind - - case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind) - - case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind) - - case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]") - } - - case _ => - abort(s"Too many arguments for primitive function: $tree") - } - lineNumber(tree) - resKind - } - - /* Generate primitive array operations. */ - def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ - - case Apply(DesugaredSelect(arrayObj, _), args) => - import ScalaPrimitivesOps._ - val k = tpeTK(arrayObj) - genLoad(arrayObj, k) - val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) - - var generatedType = expectedType - - if (isArrayGet(code)) { - // load argument on stack - assert(args.length == 1, s"Too many arguments for array get operation: $tree"); - genLoad(args.head, INT) - generatedType = k.asArrayBType.componentType - bc.aload(elementType) - } - else if (isArraySet(code)) { - val List(a1, a2) = args - genLoad(a1, INT) - genLoad(a2) - generatedType = UNIT - bc.astore(elementType) - } else { - generatedType = INT - emit(asm.Opcodes.ARRAYLENGTH) - } - lineNumber(tree) - - generatedType - } - - def genLoadIfTo(tree: If, expectedType: BType, dest: LoadDestination): BType = tree match{ - case If(condp, thenp, elsep) => - - val success = new asm.Label - val failure = new asm.Label - - val hasElse = !elsep.isEmpty && (elsep match { - case Literal(value) if value.tag == UnitTag => false - case _ => true - }) - - genCond(condp, success, failure, targetIfNoJump = success) - markProgramPoint(success) - - if dest == LoadDestination.FallThrough then - if hasElse then - val thenKind = tpeTK(thenp) - val elseKind = tpeTK(elsep) - def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT - val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) - - val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf)) - markProgramPoint(failure) - genLoadTo(elsep, resKind, LoadDestination.FallThrough) - markProgramPoint(postIf) - resKind - else - genLoad(thenp, UNIT) - markProgramPoint(failure) - UNIT - end if - else - genLoadTo(thenp, expectedType, dest) - markProgramPoint(failure) - if hasElse then - genLoadTo(elsep, expectedType, dest) - else - genAdaptAndSendToDest(UNIT, expectedType, dest) - expectedType - end if - } - - def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { - case Apply(fun @ DesugaredSelect(receiver, _), _) => - val sym = tree.symbol - - val code = primitives.getPrimitive(tree, receiver.tpe) - - import ScalaPrimitivesOps._ - - if (isArithmeticOp(code)) genArithmeticOp(tree, code) - else if (code == CONCAT) genStringConcat(tree) - else if (code == HASH) genScalaHash(receiver) - else if (isArrayOp(code)) genArrayOp(tree, code, expectedType) - else if (isLogicalOp(code) || isComparisonOp(code)) { - val success, failure, after = new asm.Label - genCond(tree, success, failure, targetIfNoJump = success) - // success block - markProgramPoint(success) - bc boolconst true - bc goTo after - // failure block - markProgramPoint(failure) - bc boolconst false - // after - markProgramPoint(after) - - BOOL - } - else if (isCoercion(code)) { - genLoad(receiver) - lineNumber(tree) - genCoercion(code) - coercionTo(code) - } - else abort( - s"Primitive operation not handled yet: ${sym.showFullName}(${fun.symbol.name}) at: ${tree.span}" - ) - } - - def genLoad(tree: Tree): Unit = { - genLoad(tree, tpeTK(tree)) - } - - /* Generate code for trees that produce values on the stack */ - def genLoad(tree: Tree, expectedType: BType): Unit = - genLoadTo(tree, expectedType, LoadDestination.FallThrough) - - /* Generate code for trees that produce values, sent to a given `LoadDestination`. */ - def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit = - var generatedType = expectedType - var generatedDest = LoadDestination.FallThrough - - lineNumber(tree) - - tree match { - case tree@ValDef(_, _, _) => - val sym = tree.symbol - /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called - while duplicating a finalizer that contains this ValDef. */ - val loc = locals.getOrMakeLocal(sym) - val Local(tk, _, idx, isSynth) = loc - if (tree.rhs == tpd.EmptyTree) { emitZeroOf(tk) } - else { genLoad(tree.rhs, tk) } - bc.store(idx, tk) - val localVarStart = currProgramPoint() - if (!isSynth) { // there are case ValDef's emitted by patmat - varsInScope ::= (sym -> localVarStart) - } - generatedType = UNIT - - case t @ If(_, _, _) => - generatedType = genLoadIfTo(t, expectedType, dest) - generatedDest = dest - - case t @ Labeled(_, _) => - generatedType = genLabeledTo(t, expectedType, dest) - generatedDest = dest - - case r: Return => - genReturn(r) - generatedDest = LoadDestination.Return - - case t @ WhileDo(_, _) => - generatedDest = genWhileDo(t) - generatedType = UNIT - - case t @ Try(_, _, _) => - generatedType = genLoadTry(t) - - case t: Apply if t.fun.symbol eq defn.throwMethod => - val thrownExpr = t.args.head - val thrownKind = tpeTK(thrownExpr) - genLoadTo(thrownExpr, thrownKind, LoadDestination.Throw) - generatedDest = LoadDestination.Throw - - case New(tpt) => - abort(s"Unexpected New(${tpt.tpe.showSummary()}/$tpt) reached GenBCode.\n" + - " Call was genLoad" + ((tree, expectedType))) - - case t @ Closure(env, call, tpt) => - val functionalInterface: Symbol = - if !tpt.isEmpty then tpt.tpe.classSymbol - else t.tpe.classSymbol - val (fun, args) = call match { - case Apply(fun, args) => (fun, args) - case t @ DesugaredSelect(_, _) => (t, Nil) // TODO: use Select - case t @ Ident(_) => (t, Nil) - } - - if (!fun.symbol.isStaticMember) { - // load receiver of non-static implementation of lambda - - // darkdimius: I haven't found in spec `this` reference should go - // but I was able to derrive it by reading - // AbstractValidatingLambdaMetafactory.validateMetafactoryArgs - - val DesugaredSelect(prefix, _) = fun: @unchecked - genLoad(prefix) - } - - genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind) - generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface) - - case app @ Apply(_, _) => - generatedType = genApply(app, expectedType) - - case This(qual) => - val symIsModuleClass = tree.symbol.is(ModuleClass) - assert(tree.symbol == claszSymbol || symIsModuleClass, - s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit") - if (symIsModuleClass && tree.symbol != claszSymbol) { - generatedType = genLoadModule(tree) - } - else { - mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - // When compiling Array.scala, the constructor invokes `Array.this.super.`. The expectedType - // is `[Object` (computed by typeToBType, the type of This(Array) is `Array[T]`). If we would set - // the generatedType to `Array` below, the call to adapt at the end would fail. The situation is - // similar for primitives (`I` vs `Int`). - if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) { - generatedType = classBTypeFromSymbol(claszSymbol) - } - } - - case DesugaredSelect(Ident(nme.EMPTY_PACKAGE), module) => - assert(tree.symbol.is(Module), s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.span}") - genLoadModule(tree) - - case DesugaredSelect(qualifier, _) => - val sym = tree.symbol - generatedType = symInfoTK(sym) - val qualSafeToElide = tpd.isIdempotentExpr(qualifier) - - def genLoadQualUnlessElidable(): Unit = { if (!qualSafeToElide) { genLoadQualifier(tree) } } - - // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError - def receiverClass = qualifier.tpe.typeSymbol - if (sym.is(Module)) { - genLoadQualUnlessElidable() - genLoadModule(tree) - } else if (sym.isStaticMember) { - genLoadQualUnlessElidable() - fieldLoad(sym, receiverClass) - } else { - genLoadQualifier(tree) - fieldLoad(sym, receiverClass) - } - - case t @ Ident(name) => - val sym = tree.symbol - val tk = symInfoTK(sym) - generatedType = tk - - val desugared = cachedDesugarIdent(t) - desugared match { - case None => - if (!sym.is(Package)) { - if (sym.is(Module)) genLoadModule(sym) - else locals.load(sym) - } - case Some(t) => - genLoad(t, generatedType) - } - - case Literal(value) => - if (value.tag != UnitTag) (value.tag, expectedType) match { - case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG - case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE - case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = srNullRef - case _ => genConstant(value); generatedType = tpeTK(tree) - } - - case blck @ Block(stats, expr) => - if(stats.isEmpty) - genLoadTo(expr, expectedType, dest) - else - genBlockTo(blck, expectedType, dest) - generatedDest = dest - - case Typed(Super(_, _), _) => - genLoadTo(tpd.This(claszSymbol.asClass), expectedType, dest) - generatedDest = dest - - case Typed(expr, _) => - genLoadTo(expr, expectedType, dest) - generatedDest = dest - - case Assign(_, _) => - generatedType = UNIT - genStat(tree) - - case av @ ArrayValue(_, _) => - generatedType = genArrayValue(av) - - case mtch @ Match(_, _) => - generatedType = genMatchTo(mtch, expectedType, dest) - generatedDest = dest - - case tpd.EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) } - - - case t: TypeApply => // dotty specific - generatedType = genTypeApply(t) - - case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.span}") - } - - // emit conversion and send to the right destination - if generatedDest == LoadDestination.FallThrough then - genAdaptAndSendToDest(generatedType, expectedType, dest) - end genLoadTo - - def genAdaptAndSendToDest(generatedType: BType, expectedType: BType, dest: LoadDestination): Unit = - if generatedType != expectedType then - adapt(generatedType, expectedType) - - dest match - case LoadDestination.FallThrough => - () - case LoadDestination.Jump(label) => - bc goTo label - case LoadDestination.Return => - bc emitRETURN returnType - case LoadDestination.Throw => - val thrownType = expectedType - // `throw null` is valid although scala.Null (as defined in src/libray-aux) isn't a subtype of Throwable. - // Similarly for scala.Nothing (again, as defined in src/libray-aux). - assert(thrownType.isNullType || thrownType.isNothingType || thrownType.asClassBType.isSubtypeOf(jlThrowableRef)) - emit(asm.Opcodes.ATHROW) - end genAdaptAndSendToDest - - // ---------------- field load and store ---------------- - - /* - * must-single-thread - */ - def fieldLoad( field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = true, hostClass) - - /* - * must-single-thread - */ - def fieldStore(field: Symbol, hostClass: Symbol = null): Unit = fieldOp(field, isLoad = false, hostClass) - - /* - * must-single-thread - */ - private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = { - val useSpecificReceiver = specificReceiver != null && !field.isScalaStatic - - val owner = internalName(if (useSpecificReceiver) specificReceiver else field.owner) - val fieldJName = field.javaSimpleName - val fieldDescr = symInfoTK(field).descriptor - val isStatic = field.isStaticMember - val opc = - if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD } - else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD } - mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr) - - } - - // ---------------- emitting constant values ---------------- - - /* - * For ClazzTag: - * must-single-thread - * Otherwise it's safe to call from multiple threads. - */ - def genConstant(const: Constant): Unit = { - (const.tag/*: @switch*/) match { - - case BooleanTag => bc.boolconst(const.booleanValue) - - case ByteTag => bc.iconst(const.byteValue) - case ShortTag => bc.iconst(const.shortValue) - case CharTag => bc.iconst(const.charValue) - case IntTag => bc.iconst(const.intValue) - - case LongTag => bc.lconst(const.longValue) - case FloatTag => bc.fconst(const.floatValue) - case DoubleTag => bc.dconst(const.doubleValue) - - case UnitTag => () - - case StringTag => - assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` - mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag - - case NullTag => emit(asm.Opcodes.ACONST_NULL) - - case ClazzTag => - val tp = toTypeKind(const.typeValue) - if tp.isPrimitive then - val boxedClass = boxedClassOfPrimitive(tp.asPrimitiveBType) - mnode.visitFieldInsn( - asm.Opcodes.GETSTATIC, - boxedClass.internalName, - "TYPE", // field name - jlClassRef.descriptor - ) - else - mnode.visitLdcInsn(tp.toASMType) - - case _ => abort(s"Unknown constant value: $const") - } - } - - private def genLabeledTo(tree: Labeled, expectedType: BType, dest: LoadDestination): BType = tree match { - case Labeled(bind, expr) => - - val labelSym = bind.symbol - - if dest == LoadDestination.FallThrough then - val resKind = tpeTK(tree) - val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget)) - genLoad(expr, resKind) - markProgramPoint(jumpTarget) - resKind - else - registerJumpDest(labelSym, expectedType, dest) - genLoadTo(expr, expectedType, dest) - expectedType - end if - } - - private def genReturn(r: Return): Unit = { - val expr: Tree = r.expr - val fromSym: Symbol = if (r.from.symbol.is(LabelFlag)) r.from.symbol else NoSymbol - - if (NoSymbol == fromSym) { - // return from enclosing method - cleanups match { - case Nil => - // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`. - genLoadTo(expr, returnType, LoadDestination.Return) - case nextCleanup :: rest => - genLoad(expr, returnType) - lineNumber(r) - val saveReturnValue = (returnType != UNIT) - if (saveReturnValue) { - // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - if (earlyReturnVar == null) { - earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar", expr.tpe, expr.span) - } - locals.store(earlyReturnVar) - } - bc goTo nextCleanup - shouldEmitCleanup = true - } - } else { - // return from labeled - assert(fromSym.is(LabelFlag), fromSym) - assert(!fromSym.is(Method), fromSym) - - /* TODO At the moment, we disregard cleanups, because by construction we don't have return-from-labels - * that cross cleanup boundaries. However, in theory such crossings are valid, so we should take care - * of them. - */ - val (exprExpectedType, exprDest) = findJumpDest(fromSym) - genLoadTo(expr, exprExpectedType, exprDest) - } - } // end of genReturn() - - def genWhileDo(tree: WhileDo): LoadDestination = tree match{ - case WhileDo(cond, body) => - - val isInfinite = cond == tpd.EmptyTree - - val loop = new asm.Label - markProgramPoint(loop) - - if isInfinite then - val dest = LoadDestination.Jump(loop) - genLoadTo(body, UNIT, dest) - dest - else - body match - case Literal(value) if value.tag == UnitTag => - // this is the shape of do..while loops - val exitLoop = new asm.Label - genCond(cond, loop, exitLoop, targetIfNoJump = exitLoop) - markProgramPoint(exitLoop) - case _ => - val success = new asm.Label - val failure = new asm.Label - genCond(cond, success, failure, targetIfNoJump = success) - markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop)) - markProgramPoint(failure) - end match - LoadDestination.FallThrough - } - - def genTypeApply(t: TypeApply): BType = (t: @unchecked) match { - case TypeApply(fun@DesugaredSelect(obj, _), targs) => - - val sym = fun.symbol - val cast = - if (sym == defn.Any_isInstanceOf) false - else if (sym == defn.Any_asInstanceOf) true - else abort(s"Unexpected type application $fun[sym: ${sym.showFullName}] in: $t") - val l = tpeTK(obj) - val r = tpeTK(targs.head) - genLoadQualifier(fun) - - // TODO @lry make pattern match - if (l.isPrimitive && r.isPrimitive) - genConversion(l, r, cast) - else if (l.isPrimitive) { - bc drop l - if (cast) { - mnode.visitTypeInsn(asm.Opcodes.NEW, jlClassCastExceptionRef.internalName) - bc dup ObjectRef - emit(asm.Opcodes.ATHROW) - } else { - bc boolconst false - } - } - else if (r.isPrimitive && cast) { - abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $t") - } - else if (r.isPrimitive) { - bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType) - } - else { - assert(r.isRef, r) // ensure that it's not a method - genCast(r.asRefBType, cast) - } - - if (cast) r else BOOL - } // end of genTypeApply() - - - private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = { - val dims = arr.dimension - var elemKind = arr.elementType - val argsSize = args.length - if (argsSize > dims) { - report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) - } - if (argsSize < dims) { - /* In one step: - * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) - * however the above does not enter a TypeName for each nested arrays in chrs. - */ - for (i <- args.length until dims) elemKind = ArrayBType(elemKind) - } - genLoadArguments(args, List.fill(args.size)(INT)) - (argsSize /*: @switch*/) match { - case 1 => bc newarray elemKind - case _ => - val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor - mnode.visitMultiANewArrayInsn(descr, argsSize) - } - } - - - private def genApply(app: Apply, expectedType: BType): BType = { - var generatedType = expectedType - lineNumber(app) - app match { - case Apply(_, args) if app.symbol eq defn.newArrayMethod => - val List(elemClaz, Literal(c: Constant), ArrayValue(_, dims)) = args: @unchecked - - generatedType = toTypeKind(c.typeValue) - mkArrayConstructorCall(generatedType.asArrayBType, app, dims) - case Apply(t :TypeApply, _) => - generatedType = - if (t.symbol ne defn.Object_synchronized) genTypeApply(t) - else genSynchronized(app, expectedType) - - case Apply(fun @ DesugaredSelect(Super(superQual, _), _), args) => - // 'super' call: Note: since constructors are supposed to - // return an instance of what they construct, we have to take - // special care. On JVM they are 'void', and Scala forbids (syntactically) - // to call super constructors explicitly and/or use their 'returned' value. - // therefore, we can ignore this fact, and generate code that leaves nothing - // on the stack (contrary to what the type in the AST says). - - // scala/bug#10290: qual can be `this.$outer()` (not just `this`), so we call genLoad (not just ALOAD_0) - genLoad(superQual) - genLoadArguments(args, paramTKs(app)) - generatedType = genCallMethod(fun.symbol, InvokeStyle.Super, app.span) - - // 'new' constructor call: Note: since constructors are - // thought to return an instance of what they construct, - // we have to 'simulate' it by DUPlicating the freshly created - // instance (on JVM, methods return VOID). - case Apply(fun @ DesugaredSelect(New(tpt), nme.CONSTRUCTOR), args) => - val ctor = fun.symbol - assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}") - - generatedType = toTypeKind(tpt.tpe) - assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType") - - generatedType match { - case arr: ArrayBType => - mkArrayConstructorCall(arr, app, args) - - case rt: ClassBType => - assert(classBTypeFromSymbol(ctor.owner) == rt, s"Symbol ${ctor.owner.showFullName} is different from $rt") - mnode.visitTypeInsn(asm.Opcodes.NEW, rt.internalName) - bc dup generatedType - genLoadArguments(args, paramTKs(app)) - genCallMethod(ctor, InvokeStyle.Special, app.span) - - case _ => - abort(s"Cannot instantiate $tpt of kind: $generatedType") - } - - case Apply(fun, List(expr)) if Erasure.Boxing.isBox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => - val nativeKind = tpeTK(expr) - genLoad(expr, nativeKind) - val MethodNameAndType(mname, methodType) = asmBoxTo(nativeKind) - bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) - generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType) - - case Apply(fun, List(expr)) if Erasure.Boxing.isUnbox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => - genLoad(expr) - val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe) - generatedType = boxType - val MethodNameAndType(mname, methodType) = asmUnboxTo(boxType) - bc.invokestatic(srBoxesRuntimeRef.internalName, mname, methodType.descriptor, itf = false) - - case app @ Apply(fun, args) => - val sym = fun.symbol - - if (isPrimitive(fun)) { // primitive method call - generatedType = genPrimitiveOp(app, expectedType) - } else { // normal method call - val invokeStyle = - if (sym.isStaticMember) InvokeStyle.Static - else if (sym.is(Private) || sym.isClassConstructor) InvokeStyle.Special - else if (app.hasAttachment(BCodeHelpers.UseInvokeSpecial)) InvokeStyle.Special - else InvokeStyle.Virtual - - if (invokeStyle.hasInstance) genLoadQualifier(fun) - genLoadArguments(args, paramTKs(app)) - - val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier - val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] - if (isArrayClone) { - // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call - // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac. - // Arrays have a public method `clone` (jls 10.7). - // - // The JVMS is not explicit about this, but that receiver type can be an array type - // descriptor (instead of a class internal name): - // invokevirtual #2; //Method "[I".clone:()Ljava/lang/Object - // - // Note that using `Object.clone()` would work as well, but only because the JVM - // relaxes protected access specifically if the receiver is an array: - // http://hg.openjdk.java.net/jdk8/jdk8/hotspot/file/87ee5ee27509/src/share/vm/interpreter/linkResolver.cpp#l439 - // Example: `class C { override def clone(): Object = "hi" }` - // Emitting `def f(c: C) = c.clone()` as `Object.clone()` gives a VerifyError. - val target: String = tpeTK(qual).asRefBType.classOrArrayType - val methodBType = asmMethodType(sym) - bc.invokevirtual(target, sym.javaSimpleName, methodBType.descriptor) - generatedType = methodBType.returnType - } else { - val receiverClass = if (!invokeStyle.isVirtual) null else { - // receiverClass is used in the bytecode to as the method receiver. using sym.owner - // may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455. - val qualSym = qual.tpe.typeSymbol - if (qualSym == defn.ArrayClass) { - // For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver - // in the bytecode. Using the array descriptor (like we do for clone above) seems - // to work as well, but it seems safer not to change this. Javac also uses Object. - // Note that array apply/update/length are handled by isPrimitive (above). - assert(sym.owner == defn.ObjectClass, s"unexpected array call: $app") - defn.ObjectClass - } else qualSym - } - generatedType = genCallMethod(sym, invokeStyle, app.span, receiverClass) - } - } - } - - generatedType - } // end of genApply() - - private def genArrayValue(av: tpd.JavaSeqLiteral): BType = { - val ArrayValue(tpt, elems) = av: @unchecked - - lineNumber(av) - genArray(elems, tpt) - } - - private def genArray(elems: List[Tree], elemType: Type): BType = { - val elmKind = toTypeKind(elemType) - val generatedType = ArrayBType(elmKind) - - bc iconst elems.length - bc newarray elmKind - - var i = 0 - var rest = elems - while (!rest.isEmpty) { - bc dup generatedType - bc iconst i - genLoad(rest.head, elmKind) - bc astore elmKind - rest = rest.tail - i = i + 1 - } - - generatedType - } - - /* A Match node contains one or more case clauses, each case clause lists one or more - * Int/String values to use as keys, and a code block. The exception is the "default" case - * clause which doesn't list any key (there is exactly one of these per match). - */ - private def genMatchTo(tree: Match, expectedType: BType, dest: LoadDestination): BType = tree match { - case Match(selector, cases) => - lineNumber(tree) - - val (generatedType, postMatch, postMatchDest) = - if dest == LoadDestination.FallThrough then - val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch)) - else - (expectedType, null, dest) - - // Only two possible selector types exist in `Match` trees at this point: Int and String - if (tpeTK(selector) == INT) { - - /* On a first pass over the case clauses, we flatten the keys and their - * targets (the latter represented with asm.Labels). That representation - * allows JCodeMethodV to emit a lookupswitch or a tableswitch. - * - * On a second pass, we emit the switch blocks, one for each different target. - */ - - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var default: asm.Label = null - var switchBlocks: List[(asm.Label, Tree)] = Nil - - genLoad(selector, INT) - - // collect switch blocks and their keys, but don't emit yet any switch-block. - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - val switchBlockPoint = new asm.Label - switchBlocks ::= (switchBlockPoint, body) - pat match { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = switchBlockPoint - case Alternative(alts) => - alts foreach { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } - - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) - - // emit switch-blocks. - for (sb <- switchBlocks.reverse) { - val (caseLabel, caseBody) = sb - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } else { - - /* Since the JVM doesn't have a way to switch on a string, we switch - * on the `hashCode` of the string then do an `equals` check (with a - * possible second set of jumps if blocks can be reach from multiple - * string alternatives). - * - * This mirrors the way that Java compiles `switch` on Strings. - */ - - var default: asm.Label = null - var indirectBlocks: List[(asm.Label, Tree)] = Nil - - - // Cases grouped by their hashCode - val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] - var caseFallback: Tree = null - - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - pat match { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Right(body)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = new asm.Label - indirectBlocks ::= (default, body) - case Alternative(alts) => - // We need an extra basic block since multiple strings can lead to this code - val indirectCaseGroupLabel = new asm.Label - indirectBlocks ::= (indirectCaseGroupLabel, body) - alts foreach { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Left(indirectCaseGroupLabel)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } - - // Organize the hashCode options into switch cases - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil - for ((hashValue, hashCases) <- casesByHash) { - val switchBlockPoint = new asm.Label - hashBlocks ::= (switchBlockPoint, hashCases) - flatKeys ::= hashValue - targets ::= switchBlockPoint - } - - // Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it - genLoadIfTo( - If( - tree.selector.select(defn.Any_==).appliedTo(nullLiteral), - Literal(Constant(0)), - tree.selector.select(defn.Any_hashCode).appliedToNone - ), - INT, - LoadDestination.FallThrough - ) - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) - - // emit blocks for each hash case - for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) { - markProgramPoint(hashLabel) - for ((caseString, indirectLblOrBody) <- caseAlternatives) { - val comparison = if (caseString == null) defn.Any_== else defn.Any_equals - val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) - val keepGoing = new asm.Label - indirectLblOrBody match { - case Left(jump) => - genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) - - case Right(caseBody) => - val thisCaseMatches = new asm.Label - genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) - markProgramPoint(thisCaseMatches) - genLoadTo(caseBody, generatedType, postMatchDest) - } - markProgramPoint(keepGoing) - } - bc goTo default - } - - // emit blocks for common patterns - for ((caseLabel, caseBody) <- indirectBlocks.reverse) { - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } - - if postMatch != null then - markProgramPoint(postMatch) - generatedType - } - - def genBlockTo(tree: Block, expectedType: BType, dest: LoadDestination): Unit = tree match { - case Block(stats, expr) => - - val savedScope = varsInScope - varsInScope = Nil - stats foreach genStat - genLoadTo(expr, expectedType, dest) - emitLocalVarScopes() - varsInScope = savedScope - } - - /** Add entries to the `LocalVariableTable` JVM attribute for all the vars in - * `varsInScope`, ending at the current program point. - */ - def emitLocalVarScopes(): Unit = - if (emitVars) { - val end = currProgramPoint() - for ((sym, start) <- varsInScope.reverse) { - emitLocalVarScope(sym, start, end) - } - } - end emitLocalVarScopes - - def adapt(from: BType, to: BType): Unit = { - if (!from.conformsTo(to)) { - to match { - case UNIT => bc drop from - case _ => bc.emitT2T(from, to) - } - } else if (from.isNothingType) { - /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and - * loading a (phantom) value of type Nothing. - * - * The Nothing type in Scala's type system does not exist in the JVM. In bytecode, Nothing - * is mapped to scala.runtime.Nothing$. To the JVM, a call to Predef.??? looks like it would - * return an object of type Nothing$. We need to do something with that phantom object on - * the stack. "Phantom" because it never exists: such methods always throw, but the JVM does - * not know that. - * - * Note: The two verifiers (old: type inference, new: type checking) have different - * requirements. Very briefly: - * - * Old (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.2.1): at - * each program point, no matter what branches were taken to get there - * - Stack is same size and has same typed values - * - Local and stack values need to have consistent types - * - In practice, the old verifier seems to ignore unreachable code and accept any - * instructions after an ATHROW. For example, there can be another ATHROW (without - * loading another throwable first). - * - * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) - * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6 - * or higher. - * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting - * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: - * - Every time when generating an ATHROW, a new basic block is started. - * - During classfile writing, such basic blocks are found to be dead: no branches go there - * - Eliminating dead code would probably require complex shifts in the output byte buffer - * - But there's an easy solution: replace all code in the dead block with with - * `nop; nop; ... nop; athrow`, making sure the bytecode size stays the same - * - The corresponding stack frame can be easily generated: on entering a dead the block, - * the frame requires a single Throwable on the stack. - * - Since there are no branches to the dead block, the frame requirements are never violated. - * - * To summarize the above: it does matter what we emit after an ATHROW. - * - * NOW: if we end up here because we emitted a load of a (phantom) value of type Nothing$, - * there was no ATHROW emitted. So, we have to make the verifier happy and do something - * with that value. Since Nothing$ extends Throwable, the easiest is to just emit an ATHROW. - * - * If we ended up here because we generated a "throw e" expression, we know the last - * emitted instruction was an ATHROW. As explained above, it is OK to emit a second ATHROW, - * the verifiers will be happy. - */ - if (lastInsn.getOpcode != asm.Opcodes.ATHROW) - emit(asm.Opcodes.ATHROW) - } else if (from.isNullType) { - /* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL. - * This is required to pass the verifier: in Scala's type system, Null conforms to any - * reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which - * is not a subtype of all reference types. Example: - * - * def nl: Null = null // in bytecode, nl has return type scala.runtime.Null$ - * val a: String = nl // OK for Scala but not for the JVM, scala.runtime.Null$ does not conform to String - * - * In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is - * inserted instead - after all, an expression of type scala.runtime.Null$ can only be null. - */ - if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) { - bc drop from - emit(asm.Opcodes.ACONST_NULL) - } - } - else (from, to) match { - case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) - case _ => () - } - } - - /* Emit code to Load the qualifier of `tree` on top of the stack. */ - def genLoadQualifier(tree: Tree): Unit = { - lineNumber(tree) - tree match { - case DesugaredSelect(qualifier, _) => genLoad(qualifier) - case t: Ident => // dotty specific - cachedDesugarIdent(t) match { - case Some(sel) => genLoadQualifier(sel) - case None => - assert(t.symbol.owner == this.claszSymbol) - } - case _ => abort(s"Unknown qualifier $tree") - } - } - - def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = - args match - case arg :: args1 => - btpes match - case btpe :: btpes1 => - genLoad(arg, btpe) - genLoadArguments(args1, btpes1) - case _ => - case _ => - - def genLoadModule(tree: Tree): BType = { - val module = ( - if (!tree.symbol.is(PackageClass)) tree.symbol - else tree.symbol.info.member(nme.PACKAGE).symbol match { - case NoSymbol => abort(s"SI-5604: Cannot use package as value: $tree") - case s => abort(s"SI-5604: found package class where package object expected: $tree") - } - ) - lineNumber(tree) - genLoadModule(module) - symInfoTK(module) - } - - def genLoadModule(module: Symbol): Unit = { - def inStaticMethod = methSymbol != null && methSymbol.isStaticMember - if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { - mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - } else { - val mbt = symInfoTK(module).asClassBType - mnode.visitFieldInsn( - asm.Opcodes.GETSTATIC, - mbt.internalName /* + "$" */ , - str.MODULE_INSTANCE_FIELD, - mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor - ) - } - } - - def genConversion(from: BType, to: BType, cast: Boolean): Unit = { - if (cast) { bc.emitT2T(from, to) } - else { - bc drop from - bc boolconst (from == to) - } - } - - def genCast(to: RefBType, cast: Boolean): Unit = { - if (cast) { bc checkCast to } - else { bc isInstance to } - } - - /* Is the given symbol a primitive operation? */ - def isPrimitive(fun: Tree): Boolean = { - primitives.isPrimitive(fun) - } - - /* Generate coercion denoted by "code" */ - def genCoercion(code: Int): Unit = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () - case _ => - val from = coercionFrom(code) - val to = coercionTo(code) - bc.emitT2T(from, to) - } - } - - /* Generate string concatenation - * - * On JDK 8: create and append using `StringBuilder` - * On JDK 9+: use `invokedynamic` with `StringConcatFactory` - */ - def genStringConcat(tree: Tree): BType = { - lineNumber(tree) - liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x - case List(Literal(Constant("")), arg) => - genLoad(arg, ObjectRef) - genCallMethod(defn.String_valueOf_Object, InvokeStyle.Static) - - case concatenations => - val concatArguments = concatenations.view - .filter { - case Literal(Constant("")) => false // empty strings are no-ops in concatenation - case _ => true - } - .map { - case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass => - // Eliminate boxing of primitive values. Boxing is introduced by erasure because - // there's only a single synthetic `+` method "added" to the string class. - value - case other => other - } - .toList - - // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (classfileVersion < asm.Opcodes.V9) { - - // Estimate capacity needed for the string builder - val approxBuilderSize = concatArguments.view.map { - case Literal(Constant(s: String)) => s.length - case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length - case _ => 0 - }.sum - bc.genNewStringBuilder(approxBuilderSize) - - for (elem <- concatArguments) { - val elemType = tpeTK(elem) - genLoad(elem, elemType) - bc.genStringBuilderAppend(elemType) - } - bc.genStringBuilderEnd - } else { - - /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If - * the string concatenation is longer (unlikely), we spill into multiple calls - */ - val MaxIndySlots = 200 - val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument - val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant - - val recipe = new StringBuilder() - val argTypes = Seq.newBuilder[asm.Type] - val constVals = Seq.newBuilder[String] - var totalArgSlots = 0 - var countConcats = 1 // ie. 1 + how many times we spilled - - for (elem <- concatArguments) { - val tpe = tpeTK(elem) - val elemSlots = tpe.size - - // Unlikely spill case - if (totalArgSlots + elemSlots >= MaxIndySlots) { - bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) - countConcats += 1 - totalArgSlots = 0 - recipe.setLength(0) - argTypes.clear() - constVals.clear() - } - - elem match { - case Literal(Constant(s: String)) => - if (s.contains(TagArg) || s.contains(TagConst)) { - totalArgSlots += elemSlots - recipe.append(TagConst) - constVals += s - } else { - recipe.append(s) - } - - case other => - totalArgSlots += elemSlots - recipe.append(TagArg) - val tpe = tpeTK(elem) - argTypes += tpe.toASMType - genLoad(elem, tpe) - } - } - bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) - - // If we spilled, generate one final concat - if (countConcats > 1) { - bc.genIndyStringConcat( - TagArg.toString * countConcats, - Seq.fill(countConcats)(StringRef.toASMType), - Seq.empty - ) - } - } - } - StringRef - } - - /** - * Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the - * invocation instruction, otherwise `method.owner`. A specific receiver class is needed to - * prevent an IllegalAccessError, (aladdin bug 455). - */ - def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = { - val methodOwner = method.owner - - // the class used in the invocation's method descriptor in the classfile - val receiverClass = { - if (specificReceiver != null) - assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver") - - val useSpecificReceiver = specificReceiver != null && !defn.isBottomClass(specificReceiver) && !method.isScalaStatic - val receiver = if (useSpecificReceiver) specificReceiver else methodOwner - - // workaround for a JVM bug: https://bugs.openjdk.java.net/browse/JDK-8154587 - // when an interface method overrides a member of Object (note that all interfaces implicitly - // have superclass Object), the receiver needs to be the interface declaring the override (and - // not a sub-interface that inherits it). example: - // trait T { override def clone(): Object = "" } - // trait U extends T - // class C extends U - // class D { def f(u: U) = u.clone() } - // The invocation `u.clone()` needs `T` as a receiver: - // - using Object is illegal, as Object.clone is protected - // - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug. - // Note that a mixin forwarder is generated, so the correct method is executed in the end: - // class C { override def clone(): Object = super[T].clone() } - val isTraitMethodOverridingObjectMember = { - receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter - style.isVirtual && - isEmittedInterface(receiver) && - defn.ObjectType.decl(method.name).symbol.exists && { // fast path - compute overrideChain on the next line only if necessary - val syms = method.allOverriddenSymbols.toList - !syms.isEmpty && syms.last.owner == defn.ObjectClass - } - } - if (isTraitMethodOverridingObjectMember) methodOwner else receiver - } - - receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits - val receiverName = internalName(receiverClass) - - val jname = method.javaSimpleName - val bmType = asmMethodType(method) - val mdescr = bmType.descriptor - - val isInterface = isEmittedInterface(receiverClass) - import InvokeStyle._ - if (style == Super) { - if (isInterface && !method.is(JavaDefined)) { - val args = new Array[BType](bmType.argumentTypes.length + 1) - val ownerBType = toTypeKind(method.owner.info) - bmType.argumentTypes.copyToArray(args, 1) - val staticDesc = MethodBType(ownerBType :: bmType.argumentTypes, bmType.returnType).descriptor - val staticName = traitSuperAccessorName(method) - bc.invokestatic(receiverName, staticName, staticDesc, isInterface) - } else { - bc.invokespecial(receiverName, jname, mdescr, isInterface) - } - } else { - val opc = style match { - case Static => Opcodes.INVOKESTATIC - case Special => Opcodes.INVOKESPECIAL - case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL - } - bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface) - } - - bmType.returnType - } // end of genCallMethod() - - /* Generate the scala ## method. */ - def genScalaHash(tree: Tree): BType = { - genLoad(tree, ObjectRef) - genCallMethod(NoSymbol, InvokeStyle.Static) // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase - } - - /* - * Returns a list of trees that each should be concatenated, from left to right. - * It turns a chained call like "a".+("b").+("c") into a list of arguments. - */ - def liftStringConcat(tree: Tree): List[Tree] = tree match { - case tree @ Apply(fun @ DesugaredSelect(larg, method), rarg) => - if (isPrimitive(fun) && - primitives.getPrimitive(tree, larg.tpe) == ScalaPrimitivesOps.CONCAT) - liftStringConcat(larg) ::: rarg - else - tree :: Nil - case _ => - tree :: Nil - } - - /* Emit code to compare the two top-most stack values using the 'op' operator. */ - private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - if (targetIfNoJump == success) genCJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - bc.emitIF_ICMP(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) - bc.emitIF_ACMP(op, success) - } else { - import Primitives._ - def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { - case LONG => emit(asm.Opcodes.LCMP) - case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) - case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } - bc.emitIF(op, success) - } - if (targetIfNoJump != failure) bc goTo failure - } - } - - /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ - private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { - import Primitives._ - if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT - bc.emitIF(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) - (op: @unchecked) match { // references are only compared with EQ and NE - case EQ => bc emitIFNULL success - case NE => bc emitIFNONNULL success - } - } else { - def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { - case LONG => - emit(asm.Opcodes.LCONST_0) - emit(asm.Opcodes.LCMP) - case FLOAT => - emit(asm.Opcodes.FCONST_0) - emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) - case DOUBLE => - emit(asm.Opcodes.DCONST_0) - emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } - bc.emitIF(op, success) - } - if (targetIfNoJump != failure) bc goTo failure - } - } - - def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match { - case ScalaPrimitivesOps.ID => Primitives.EQ - case ScalaPrimitivesOps.NI => Primitives.NE - case ScalaPrimitivesOps.EQ => Primitives.EQ - case ScalaPrimitivesOps.NE => Primitives.NE - case ScalaPrimitivesOps.LT => Primitives.LT - case ScalaPrimitivesOps.LE => Primitives.LE - case ScalaPrimitivesOps.GT => Primitives.GT - case ScalaPrimitivesOps.GE => Primitives.GE - } - - /* - * Generate code for conditional expressions. - * The jump targets success/failure of the test are `then-target` and `else-target` resp. - */ - private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { - - def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = { - val op = testOpForPrimitive(code) - def isNull(t: Tree): Boolean = t match { - case Literal(Constant(null)) => true - case _ => false - } - def ifOneIsNull(l: Tree, r: Tree): Tree = if (isNull(l)) r else if (isNull(r)) l else null - val nonNullSide = if (ScalaPrimitivesOps.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null - if (nonNullSide != null) { - // special-case reference (in)equality test for null (null eq x, x eq null) - genLoad(nonNullSide, ObjectRef) - genCZJUMP(success, failure, op, ObjectRef, targetIfNoJump) - } else { - val tk = tpeTK(l).maxType(tpeTK(r)) - genLoad(l, tk) - genLoad(r, tk) - genCJUMP(success, failure, op, tk, targetIfNoJump) - } - } - - def loadAndTestBoolean() = { - genLoad(tree, BOOL) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - - lineNumber(tree) - tree match { - - case tree @ Apply(fun, args) if primitives.isPrimitive(fun.symbol) => - import ScalaPrimitivesOps.{ ZNOT, ZAND, ZOR, EQ } - - // lhs and rhs of test - lazy val DesugaredSelect(lhs, _) = fun: @unchecked - val rhs = if (args.isEmpty) tpd.EmptyTree else args.head // args.isEmpty only for ZNOT - - def genZandOrZor(and: Boolean): Unit = { - // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited). - val keepGoing = new asm.Label - - if (and) genCond(lhs, keepGoing, failure, targetIfNoJump = keepGoing) - else genCond(lhs, success, keepGoing, targetIfNoJump = keepGoing) - - markProgramPoint(keepGoing) - genCond(rhs, success, failure, targetIfNoJump) - } - - primitives.getPrimitive(fun.symbol) match { - case ZNOT => genCond(lhs, failure, success, targetIfNoJump) - case ZAND => genZandOrZor(and = true) - case ZOR => genZandOrZor(and = false) - case code => - if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) { - // rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality). - if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump) - else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump) - } else if (ScalaPrimitivesOps.isComparisonOp(code)) { - genComparisonOp(lhs, rhs, code) - } else - loadAndTestBoolean() - } - - case Block(stats, expr) => - /* Push the decision further down the `expr`. - * This is particularly effective for the shape of do..while loops. - */ - val savedScope = varsInScope - varsInScope = Nil - stats foreach genStat - genCond(expr, success, failure, targetIfNoJump) - emitLocalVarScopes() - varsInScope = savedScope - - case If(condp, thenp, elsep) => - val innerSuccess = new asm.Label - val innerFailure = new asm.Label - genCond(condp, innerSuccess, innerFailure, targetIfNoJump = innerSuccess) - markProgramPoint(innerSuccess) - genCond(thenp, success, failure, targetIfNoJump = innerFailure) - markProgramPoint(innerFailure) - genCond(elsep, success, failure, targetIfNoJump) - - case _ => loadAndTestBoolean() - } - - } // end of genCond() - - /* - * Generate the "==" code for object references. It is equivalent of - * if (l eq null) r eq null else l.equals(r); - * - * @param l left-hand-side of the '==' - * @param r right-hand-side of the '==' - */ - def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { - - /* True if the equality comparison is between values that require the use of the rich equality - * comparator (scala.runtime.Comparator.equals). This is the case when either side of the - * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character. - * When it is statically known that both sides are equal and subtypes of Number of Character, - * not using the rich equality is possible (their own equals method will do ok.) - */ - val mustUseAnyComparator: Boolean = { - val areSameFinals = l.tpe.typeSymbol.is(Final) && r.tpe.typeSymbol.is(Final) && (l.tpe =:= r.tpe) - // todo: remove - def isMaybeBoxed(sym: Symbol): Boolean = { - (sym == defn.ObjectClass) || - (sym == defn.JavaSerializableClass) || - (sym == defn.ComparableClass) || - (sym derivesFrom defn.BoxedNumberClass) || - (sym derivesFrom defn.BoxedCharClass) || - (sym derivesFrom defn.BoxedBooleanClass) - } - !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol) - } - def isNull(t: Tree): Boolean = t match { - case Literal(Constant(null)) => true - case _ => false - } - def isNonNullExpr(t: Tree): Boolean = t.isInstanceOf[Literal] || ((t.symbol ne null) && t.symbol.is(Module)) - - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { - if (l.tpe <:< defn.BoxedNumberClass.info) { - if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - else if (r.tpe <:< defn.BoxedCharClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) - else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) - } else defn.BoxesRunTimeModule_externalEquals - } - - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - genCallMethod(equalsMethod, InvokeStyle.Static) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - else { - if (isNull(l)) { - // null == expr -> expr eq null - genLoad(r, ObjectRef) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNull(r)) { - // expr == null -> expr eq null - genLoad(l, ObjectRef) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNonNullExpr(l)) { - // SI-7852 Avoid null check if L is statically non-null. - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - genCallMethod(defn.Any_equals, InvokeStyle.Virtual) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } else { - // l == r -> if (l eq null) r eq null else l.equals(r) - val eqEqTempLocal = locals.makeLocal(ObjectRef, nme.EQEQ_LOCAL_VAR.mangledString, defn.ObjectType, r.span) - val lNull = new asm.Label - val lNonNull = new asm.Label - - genLoad(l, ObjectRef) - genLoad(r, ObjectRef) - locals.store(eqEqTempLocal) - bc dup ObjectRef - genCZJUMP(lNull, lNonNull, Primitives.EQ, ObjectRef, targetIfNoJump = lNull) - - markProgramPoint(lNull) - bc drop ObjectRef - locals.load(eqEqTempLocal) - genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump = lNonNull) - - markProgramPoint(lNonNull) - locals.load(eqEqTempLocal) - genCallMethod(defn.Any_equals, InvokeStyle.Virtual) - genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - } - } - - - def genSynchronized(tree: Apply, expectedType: BType): BType - def genLoadTry(tree: Try): BType - - def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = { - import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE} - - report.debuglog(s"Using invokedynamic rather than `new ${ctor.owner}`") - val generatedType = classBTypeFromSymbol(functionalInterface) - // Lambdas should be serializable if they implement a SAM that extends Serializable or if they - // implement a scala.Function* class. - val isSerializable = functionalInterface.isSerializable || defn.isFunctionClass(functionalInterface) - val isInterface = isEmittedInterface(lambdaTarget.owner) - val invokeStyle = - if (lambdaTarget.isStaticMember) asm.Opcodes.H_INVOKESTATIC - else if (lambdaTarget.is(Private) || lambdaTarget.isClassConstructor) asm.Opcodes.H_INVOKESPECIAL - else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE - else asm.Opcodes.H_INVOKEVIRTUAL - - val targetHandle = - new asm.Handle(invokeStyle, - classBTypeFromSymbol(lambdaTarget.owner).internalName, - lambdaTarget.javaSimpleName, - asmMethodType(lambdaTarget).descriptor, - /* itf = */ isInterface) - - val (a,b) = lambdaTarget.info.firstParamTypes.splitAt(environmentSize) - var (capturedParamsTypes, lambdaParamTypes) = (a,b) - - if (invokeStyle != asm.Opcodes.H_INVOKESTATIC) capturedParamsTypes = lambdaTarget.owner.info :: capturedParamsTypes - - // Requires https://github.com/scala/scala-java8-compat on the runtime classpath - val returnUnit = lambdaTarget.info.resultType.typeSymbol == defn.UnitClass - val functionalInterfaceDesc: String = generatedType.descriptor - val desc = capturedParamsTypes.map(tpe => toTypeKind(tpe)).mkString(("("), "", ")") + functionalInterfaceDesc - // TODO specialization - val instantiatedMethodType = new MethodBType(lambdaParamTypes.map(p => toTypeKind(p)), toTypeKind(lambdaTarget.info.resultType)).toASMType - - val samMethod = atPhase(erasurePhase) { - val samMethods = toDenot(functionalInterface).info.possibleSamMethods.toList - samMethods match { - case x :: Nil => x.symbol - case Nil => abort(s"${functionalInterface.show} is not a functional interface. It doesn't have abstract methods") - case xs => abort(s"${functionalInterface.show} is not a functional interface. " + - s"It has the following abstract methods: ${xs.map(_.name).mkString(", ")}") - } - } - - val methodName = samMethod.javaSimpleName - val samMethodType = asmMethodType(samMethod).toASMType - // scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object` - // version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method. - val needsGenericBridge = samMethodType != instantiatedMethodType - val bridgeMethods = atPhase(erasurePhase){ - samMethod.allOverriddenSymbols.toList - } - val overriddenMethodTypes = bridgeMethods.map(b => asmMethodType(b).toASMType) - - // any methods which `samMethod` overrides need bridges made for them - // this is done automatically during erasure for classes we generate, but LMF needs to have them explicitly mentioned - // so we have to compute them at this relatively late point. - val bridgeTypes = ( - if (needsGenericBridge) - instantiatedMethodType +: overriddenMethodTypes - else - overriddenMethodTypes - ).distinct.filterNot(_ == samMethodType) - - val needsBridges = bridgeTypes.nonEmpty - - def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0 - val flags = flagIf(isSerializable, FLAG_SERIALIZABLE) | flagIf(needsBridges, FLAG_BRIDGES) - - val bsmArgs0 = Seq(samMethodType, targetHandle, instantiatedMethodType) - val bsmArgs1 = if (flags != 0) Seq(Int.box(flags)) else Seq.empty - val bsmArgs2 = if needsBridges then bridgeTypes.length +: bridgeTypes else Seq.empty - - val bsmArgs = bsmArgs0 ++ bsmArgs1 ++ bsmArgs2 - - val metafactory = - if (flags != 0) - jliLambdaMetaFactoryAltMetafactoryHandle // altMetafactory required to be able to pass the flags and additional arguments if needed - else - jliLambdaMetaFactoryMetafactoryHandle - - bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*) - - generatedType - } - } - - /** Does this symbol actually correspond to an interface that will be emitted? - * In the backend, this should be preferred over `isInterface` because it - * also returns true for the symbols of the fake companion objects we - * create for Java-defined classes as well as for Java annotations - * which we represent as classes. - */ - private def isEmittedInterface(sym: Symbol): Boolean = sym.isInterface || - sym.is(JavaDefined) && (toDenot(sym).isAnnotation || sym.is(ModuleClass) && (sym.companionClass.is(PureInterface)) || sym.companionClass.is(Trait)) - - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala deleted file mode 100644 index 5ad6a99f6055..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeHelpers.scala +++ /dev/null @@ -1,960 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.threadUnsafe -import scala.tools.asm -import scala.tools.asm.AnnotationVisitor -import scala.tools.asm.ClassWriter -import scala.collection.mutable - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees -import dotty.tools.dotc.core.Annotations._ -import dotty.tools.dotc.core.Constants._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Names.Name -import dotty.tools.dotc.core.NameKinds.ExpandedName -import dotty.tools.dotc.core.Signature -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.NameKinds -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.TypeErasure -import dotty.tools.dotc.transform.GenericSignatures -import dotty.tools.dotc.transform.ElimErasedValueType -import dotty.tools.io.AbstractFile -import dotty.tools.dotc.report - -import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions - -/* - * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { - // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce - - //import global._ - //import bTypes._ - //import coreBTypes._ - import bTypes._ - import tpd._ - import coreBTypes._ - import int.{_, given} - import DottyBackendInterface._ - - def ScalaATTRName: String = "Scala" - def ScalaSignatureATTRName: String = "ScalaSig" - - @threadUnsafe lazy val AnnotationRetentionAttr: ClassSymbol = requiredClass("java.lang.annotation.Retention") - @threadUnsafe lazy val AnnotationRetentionSourceAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("SOURCE") - @threadUnsafe lazy val AnnotationRetentionClassAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("CLASS") - @threadUnsafe lazy val AnnotationRetentionRuntimeAttr: TermSymbol = requiredClass("java.lang.annotation.RetentionPolicy").linkedClass.requiredValue("RUNTIME") - - val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - - /* - * must-single-thread - */ - def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - getFile(base, clsName, suffix) - } - - /* - * must-single-thread - */ - def getOutFolder(csym: Symbol, cName: String): AbstractFile = { - try { - outputDirectory - } catch { - case ex: Throwable => - report.error(em"Couldn't create file for class $cName\n${ex.getMessage}", ctx.source.atSpan(csym.span)) - null - } - } - - final def traitSuperAccessorName(sym: Symbol): String = { - val nameString = sym.javaSimpleName.toString - if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString - else nameString + "$" - } - - // ----------------------------------------------------------------------------------------- - // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) - // Background: - // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - // http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - // https://issues.scala-lang.org/browse/SI-3872 - // ----------------------------------------------------------------------------------------- - - /* An `asm.ClassWriter` that uses `jvmWiseLUB()` - * The internal name of the least common ancestor of the types given by inameA and inameB. - * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow - */ - final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) { - - /** - * This method is thread-safe: it depends only on the BTypes component, which does not depend - * on global. TODO @lry move to a different place where no global is in scope, on bTypes. - */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { - val a = classBTypeFromInternalName(inameA) - val b = classBTypeFromInternalName(inameB) - val lub = a.jvmWiseLUB(b) - val lubName = lub.internalName - assert(lubName != "scala/Any") - lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } - - /* - * must-single-thread - */ - def initBytecodeWriter(): BytecodeWriter = { - (None: Option[AbstractFile] /*getSingleOutput*/) match { // todo: implement - case Some(f) if f.hasExtension("jar") => - new DirectToJarfileWriter(f.file) - case _ => - factoryNonJarBytecodeWriter() - } - } - - /* - * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner - * classes in BTypes.scala. - * - * `refedInnerClasses` may contain duplicates, need not contain the enclosing inner classes of - * each inner class it lists (those are looked up and included). - * - * This method serializes in the InnerClasses JVM attribute in an appropriate order, - * not necessarily that given by `refedInnerClasses`. - * - * can-multi-thread - */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { - // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler - val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) - allNestedClasses ++= declaredInnerClasses - refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) - for nestedClass <- allNestedClasses - do { - // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. - val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked - jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } - - /* - * can-multi-thread - */ - def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - new asm.Attribute(name) { - override def write(classWriter: ClassWriter, code: Array[Byte], - codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { - val byteVector = new asm.ByteVector(len) - byteVector.putByteArray(b, offset, len) - byteVector - } - } - } - - /* - * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only - * i.e., the pickle is contained in a custom annotation, see: - * (1) `addAnnotations()`, - * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10 - * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5 - * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9) - * other than both ending up encoded as attributes (JVMS 4.7) - * (with the caveat that the "ScalaSig" attribute is associated to some classes, - * while the "Signature" attribute can be associated to classes, methods, and fields.) - * - */ - trait BCPickles { - - import dotty.tools.dotc.core.unpickleScala2.{ PickleFormat, PickleBuffer } - - val versionPickle = { - val vp = new PickleBuffer(new Array[Byte](16), -1, 0) - assert(vp.writeIndex == 0, vp) - vp writeNat PickleFormat.MajorVersion - vp writeNat PickleFormat.MinorVersion - vp writeNat 0 - vp - } - - /* - * can-multi-thread - */ - def pickleMarkerLocal = { - createJAttribute(ScalaSignatureATTRName, versionPickle.bytes, 0, versionPickle.writeIndex) - } - - /* - * can-multi-thread - */ - def pickleMarkerForeign = { - createJAttribute(ScalaATTRName, new Array[Byte](0), 0, 0) - } - } // end of trait BCPickles - - trait BCInnerClassGen extends Pure { - - def debugLevel = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames - - final val emitSource = debugLevel >= 1 - final val emitLines = debugLevel >= 2 - final val emitVars = debugLevel >= 3 - - /** - * The class internal name for a given class symbol. - */ - final def internalName(sym: Symbol): String = { - // For each java class, the scala compiler creates a class and a module (thus a module class). - // If the `sym` is a java module class, we use the java class instead. This ensures that the - // ClassBType is created from the main class (instead of the module class). - // The two symbols have the same name, so the resulting internalName is the same. - val classSym = if (sym.is(JavaDefined) && sym.is(ModuleClass)) sym.linkedClass else sym - getClassBType(classSym).internalName - } - - private def assertClassNotArray(sym: Symbol): Unit = { - assert(sym.isClass, sym) - assert(sym != defn.ArrayClass || compilingArray, sym) - } - - private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { - assertClassNotArray(sym) - assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) - } - - /** - * The ClassBType for a class symbol. - * - * The class symbol scala.Nothing is mapped to the class scala.runtime.Nothing$. Similarly, - * scala.Null is mapped to scala.runtime.Null$. This is because there exist no class files - * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes - * in the classfile method signature. - * - * Note that the referenced class symbol may be an implementation class. For example when - * compiling a mixed-in method that forwards to the static method in the implementation class, - * the class descriptor of the receiver (the implementation class) is obtained by creating the - * ClassBType. - */ - final def getClassBType(sym: Symbol): ClassBType = { - assertClassNotArrayNotPrimitive(sym) - - if (sym == defn.NothingClass) srNothingRef - else if (sym == defn.NullClass) srNullRef - else classBTypeFromSymbol(sym) - } - - /* - * must-single-thread - */ - final def asmMethodType(msym: Symbol): MethodBType = { - assert(msym.is(Method), s"not a method-symbol: $msym") - val resT: BType = - if (msym.isClassConstructor || msym.isConstructor) UNIT - else toTypeKind(msym.info.resultType) - MethodBType(msym.info.firstParamTypes map toTypeKind, resT) - } - - /** - * The jvm descriptor of a type. - */ - final def typeDescriptor(t: Type): String = { toTypeKind(t).descriptor } - - /** - * The jvm descriptor for a symbol. - */ - final def symDescriptor(sym: Symbol): String = getClassBType(sym).descriptor - - final def toTypeKind(tp: Type): BType = typeToTypeKind(tp)(BCodeHelpers.this)(this) - - } // end of trait BCInnerClassGen - - trait BCAnnotGen extends BCInnerClassGen { - - /* - * must-single-thread - */ - def emitAnnotations(cw: asm.ClassVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = cw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitAnnotations(mw: asm.MethodVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = mw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitAnnotations(fw: asm.FieldVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val av = fw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) - emitAssocs(av, assocs, BCodeHelpers.this)(this) - } - - /* - * must-single-thread - */ - def emitParamNames(jmethod: asm.MethodVisitor, params: List[Symbol]) = - for param <- params do - var access = asm.Opcodes.ACC_FINAL - if param.is(Artifact) then access |= asm.Opcodes.ACC_SYNTHETIC - jmethod.visitParameter(param.name.mangledString, access) - - /* - * must-single-thread - */ - def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[Annotation]]): Unit = - val annotationss = pannotss map (_ filter shouldEmitAnnotation) - if (annotationss forall (_.isEmpty)) return - for ((annots, idx) <- annotationss.zipWithIndex; - annot <- annots) { - val typ = annot.tree.tpe - val assocs = assocsFromApply(annot.tree) - val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, typeDescriptor(typ.asInstanceOf[Type]), isRuntimeVisible(annot)) - emitAssocs(pannVisitor, assocs, BCodeHelpers.this)(this) - } - - - private def shouldEmitAnnotation(annot: Annotation): Boolean = { - annot.symbol.is(JavaDefined) && - retentionPolicyOf(annot) != AnnotationRetentionSourceAttr - } - - private def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, Object)], bcodeStore: BCodeHelpers) - (innerClasesStore: bcodeStore.BCInnerClassGen) = { - for ((name, value) <- assocs) - emitArgument(av, name.mangledString, value.asInstanceOf[Tree], bcodeStore)(innerClasesStore) - av.visitEnd() - } - - private def emitArgument(av: AnnotationVisitor, - name: String, - arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = { - val narg = normalizeArgument(arg) - // Transformation phases are not run on annotation trees, so we need to run - // `constToLiteral` at this point. - val t = atPhase(erasurePhase)(constToLiteral(narg)) - t match { - case Literal(const @ Constant(_)) => - const.tag match { - case BooleanTag | ByteTag | ShortTag | CharTag | IntTag | LongTag | FloatTag | DoubleTag => av.visit(name, const.value) - case StringTag => - assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` - av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag - case ClazzTag => av.visit(name, typeToTypeKind(TypeErasure.erasure(const.typeValue))(bcodeStore)(innerClasesStore).toASMType) - } - case Ident(nme.WILDCARD) => - // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything - case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnum) => - val edesc = innerClasesStore.typeDescriptor(t.tpe) // the class descriptor of the enumeration class. - val evalue = t.symbol.javaSimpleName // value the actual enumeration value. - av.visitEnum(name, edesc, evalue) - case t: SeqLiteral => - val arrAnnotV: AnnotationVisitor = av.visitArray(name) - for (arg <- t.elems) { emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) } - arrAnnotV.visitEnd() - - case Apply(fun, args) if fun.symbol == defn.ArrayClass.primaryConstructor || - toDenot(fun.symbol).owner == defn.ArrayClass.linkedClass && fun.symbol.name == nme.apply => - val arrAnnotV: AnnotationVisitor = av.visitArray(name) - - var actualArgs = if (fun.tpe.isImplicitMethod) { - // generic array method, need to get implicit argument out of the way - fun.asInstanceOf[Apply].args - } else args - - val flatArgs = actualArgs.flatMap { arg => - normalizeArgument(arg) match { - case t: tpd.SeqLiteral => t.elems - case e => List(e) - } - } - for(arg <- flatArgs) { - emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) - } - arrAnnotV.visitEnd() - /* - case sb @ ScalaSigBytes(bytes) => - // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files) - // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure. - if (sb.fitsInOneString) { - av.visit(name, BCodeAsmCommon.strEncode(sb)) - } else { - val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name) - for(arg <- BCodeAsmCommon.arrEncode(sb)) { arrAnnotV.visit(name, arg) } - arrAnnotV.visitEnd() - } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape. - */ - case t @ Apply(constr, args) if t.tpe.classSymbol.is(JavaAnnotation) => - val typ = t.tpe.classSymbol.denot.info - val assocs = assocsFromApply(t) - val desc = innerClasesStore.typeDescriptor(typ) // the class descriptor of the nested annotation class - val nestedVisitor = av.visitAnnotation(name, desc) - emitAssocs(nestedVisitor, assocs, bcodeStore)(innerClasesStore) - - case t => - report.error(em"Annotation argument is not a constant", t.sourcePos) - } - } - - private def normalizeArgument(arg: Tree): Tree = arg match { - case Trees.NamedArg(_, arg1) => normalizeArgument(arg1) - case Trees.Typed(arg1, _) => normalizeArgument(arg1) - case _ => arg - } - - private def isRuntimeVisible(annot: Annotation): Boolean = - if (toDenot(annot.tree.tpe.typeSymbol).hasAnnotation(AnnotationRetentionAttr)) - retentionPolicyOf(annot) == AnnotationRetentionRuntimeAttr - else { - // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the - // annotation is emitted with visibility `RUNTIME` - // dotty bug: #389 - true - } - - private def retentionPolicyOf(annot: Annotation): Symbol = - annot.tree.tpe.typeSymbol.getAnnotation(AnnotationRetentionAttr). - flatMap(_.argument(0).map(_.tpe.termSymbol)).getOrElse(AnnotationRetentionClassAttr) - - private def assocsFromApply(tree: Tree): List[(Name, Tree)] = { - tree match { - case Block(_, expr) => assocsFromApply(expr) - case Apply(fun, args) => - fun.tpe.widen match { - case MethodType(names) => - (names zip args).filter { - case (_, t: tpd.Ident) if (t.tpe.normalizedPrefix eq NoPrefix) => false - case _ => true - } - } - } - } - } // end of trait BCAnnotGen - - trait BCJGenSigGen { - import int.given - - def getCurrentCUnit(): CompilationUnit - - /** - * Generates the generic signature for `sym` before erasure. - * - * @param sym The symbol for which to generate a signature. - * @param owner The owner of `sym`. - * @return The generic signature of `sym` before erasure, as specified in the Java Virtual - * Machine Specification, §4.3.4, or `null` if `sym` doesn't need a generic signature. - * @see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3.4 - */ - def getGenericSignature(sym: Symbol, owner: Symbol): String = { - atPhase(erasurePhase) { - val memberTpe = - if (sym.is(Method)) sym.denot.info - else owner.denot.thisType.memberInfo(sym) - getGenericSignatureHelper(sym, owner, memberTpe).orNull - } - } - - } // end of trait BCJGenSigGen - - trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { - - /* Add a forwarder for method m. Used only from addForwarders(). - * - * must-single-thread - */ - private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = { - val moduleName = internalName(module) - val methodInfo = module.thisType.memberInfo(m) - val paramJavaTypes: List[BType] = methodInfo.firstParamTypes map toTypeKind - // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) - - /* Forwarders must not be marked final, - * as the JVM will not allow redefinition of a final static method, - * and we don't know what classes might be subclassing the companion class. See SI-4827. - */ - // TODO: evaluate the other flags we might be dropping on the floor here. - val flags = GenBCodeOps.PublicStatic | ( - if (m.is(JavaVarargs)) asm.Opcodes.ACC_VARARGS else 0 - ) | ( - if (isSynthetic) asm.Opcodes.ACC_SYNTHETIC else 0 - ) - - // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } - val jgensig = getStaticForwarderGenericSignature(m, module) - val (throws, others) = m.annotations.partition(_.symbol eq defn.ThrowsAnnot) - val thrownExceptions: List[String] = getExceptions(throws) - - val jReturnType = toTypeKind(methodInfo.resultType) - val mdesc = MethodBType(paramJavaTypes, jReturnType).descriptor - val mirrorMethodName = m.javaSimpleName - val mirrorMethod: asm.MethodVisitor = jclass.visitMethod( - flags, - mirrorMethodName, - mdesc, - jgensig, - mkArrayS(thrownExceptions) - ) - - emitAnnotations(mirrorMethod, others) - val params: List[Symbol] = Nil // backend uses this to emit annotations on parameter lists of forwarders - // to static methods of companion class - // Old assumption: in Dotty this link does not exists: there is no way to get from method type - // to inner symbols of DefDef - // TODO: now we have paramSymss and could use it here. - emitParamAnnotations(mirrorMethod, params.map(_.annotations)) - - mirrorMethod.visitCode() - - mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, str.MODULE_INSTANCE_FIELD, symDescriptor(module)) - - var index = 0 - for(jparamType <- paramJavaTypes) { - mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) - assert(!jparamType.isInstanceOf[MethodBType], jparamType) - index += jparamType.size - } - - mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false) - mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) - - mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments - mirrorMethod.visitEnd() - - } - - /* Add forwarders for all methods defined in `module` that don't conflict - * with methods in the companion class of `module`. A conflict arises when - * a method with the same name is defined both in a class and its companion object: - * method signature is not taken into account. - * - * must-single-thread - */ - def addForwarders(jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol): Unit = { - assert(moduleClass.is(ModuleClass), moduleClass) - report.debuglog(s"Dumping mirror class for object: $moduleClass") - - val linkedClass = moduleClass.companionClass - lazy val conflictingNames: Set[Name] = { - (linkedClass.info.allMembers.collect { case d if d.name.isTermName => d.name }).toSet - } - report.debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - - for (m0 <- sortedMembersBasedOnFlags(moduleClass.info, required = Method, excluded = ExcludedForwarder)) { - val m = if (m0.is(Bridge)) m0.nextOverriddenSymbol else m0 - if (m == NoSymbol) - report.log(s"$m0 is a bridge method that overrides nothing, something went wrong in a previous phase.") - else if (m.isType || m.is(Deferred) || (m.owner eq defn.ObjectClass) || m.isConstructor || m.name.is(ExpandedName)) - report.debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'") - else if (conflictingNames(m.name)) - report.log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") - else if (m.accessBoundary(defn.RootClass) ne defn.RootClass) - report.log(s"No forwarder for non-public member $m") - else { - report.log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") - // It would be simpler to not generate forwarders for these methods, - // but that wouldn't be binary-compatible with Scala 3.0.0, so instead - // we generate ACC_SYNTHETIC forwarders so Java compilers ignore them. - val isSynthetic = - m0.name.is(NameKinds.SyntheticSetterName) || - // Only hide bridges generated at Erasure, mixin forwarders are also - // marked as bridge but shouldn't be hidden since they don't have a - // non-bridge overload. - m0.is(Bridge) && m0.initial.validFor.firstPhaseId == erasurePhase.next.id - addForwarder(jclass, moduleClass, m, isSynthetic) - } - } - } - - /** The members of this type that have all of `required` flags but none of `excluded` flags set. - * The members are sorted by name and signature to guarantee a stable ordering. - */ - private def sortedMembersBasedOnFlags(tp: Type, required: Flag, excluded: FlagSet): List[Symbol] = { - // The output of `memberNames` is a Set, sort it to guarantee a stable ordering. - val names = tp.memberNames(takeAllFilter).toSeq.sorted - val buffer = mutable.ListBuffer[Symbol]() - names.foreach { name => - buffer ++= tp.memberBasedOnFlags(name, required, excluded) - .alternatives.sortBy(_.signature)(Signature.lexicographicOrdering).map(_.symbol) - } - buffer.toList - } - - /* - * Quoting from JVMS 4.7.5 The Exceptions Attribute - * "The Exceptions attribute indicates which checked exceptions a method may throw. - * There may be at most one Exceptions attribute in each method_info structure." - * - * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod() - * This method returns such list of internal names. - * - * must-single-thread - */ - def getExceptions(excs: List[Annotation]): List[String] = { - for (case ThrownException(exc) <- excs.distinct) - yield internalName(TypeErasure.erasure(exc).classSymbol) - } - } // end of trait BCForwardersGen - - trait BCClassGen extends BCInnerClassGen { - - // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch. - // There's a space tradeoff between these multi-branch instructions (details in the JVM spec). - // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic. - val MIN_SWITCH_DENSITY = 0.7 - - /* - * Add public static final field serialVersionUID with value `id` - * - * can-multi-thread - */ - def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = { - // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` - jclass.visitField( - GenBCodeOps.PrivateStaticFinal, - "serialVersionUID", - "J", - null, // no java-generic-signature - java.lang.Long.valueOf(id) - ).visitEnd() - } - } // end of trait BCClassGen - - /* functionality for building plain and mirror classes */ - abstract class JCommonBuilder - extends BCInnerClassGen - with BCAnnotGen - with BCForwardersGen - with BCPickles { } - - /* builder of mirror classes */ - class JMirrorBuilder extends JCommonBuilder { - - private var cunit: CompilationUnit = _ - def getCurrentCUnit(): CompilationUnit = cunit; - - /* Generate a mirror class for a top-level module. A mirror class is a class - * containing only static methods that forward to the corresponding method - * on the MODULE instance of the given Scala object. It will only be - * generated if there is no companion class: if there is, an attempt will - * instead be made to add the forwarder methods to the companion class. - * - * must-single-thread - */ - def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { - assert(moduleClass.is(ModuleClass)) - assert(moduleClass.companionClass == NoSymbol, moduleClass) - this.cunit = cunit - val bType = mirrorClassBTypeFromSymbol(moduleClass) - val moduleName = internalName(moduleClass) // + "$" - val mirrorName = bType.internalName - - val mirrorClass = new asm.tree.ClassNode - mirrorClass.visit( - classfileVersion, - bType.info.flags, - mirrorName, - null /* no java-generic-signature */, - ObjectRef.internalName, - EMPTY_STRING_ARRAY - ) - - if (emitSource) { - mirrorClass.visitSource("" + cunit.source.file.name, - null /* SourceDebugExtension */) - } - - val ssa = None // getAnnotPickle(mirrorName, if (moduleClass.is(Module)) moduleClass.companionClass else moduleClass.companionModule) - mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) - emitAnnotations(mirrorClass, moduleClass.annotations ++ ssa) - - addForwarders(mirrorClass, mirrorName, moduleClass) - mirrorClass.visitEnd() - - moduleClass.name // this side-effect is necessary, really. - - mirrorClass - } - - } // end of class JMirrorBuilder - - trait JAndroidBuilder { - self: BCInnerClassGen => - - /* From the reference documentation of the Android SDK: - * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`. - * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`, - * which is an object implementing the `Parcelable.Creator` interface. - */ - val androidFieldName = "CREATOR".toTermName - - lazy val AndroidParcelableInterface : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable") - lazy val AndroidCreatorClass : Symbol = NoSymbol // getClassIfDefined("android.os.Parcelable$Creator") - - /* - * must-single-thread - */ - def isAndroidParcelableClass(sym: Symbol) = - (AndroidParcelableInterface != NoSymbol) && - (sym.info.parents.map(_.typeSymbol) contains AndroidParcelableInterface) - - /* - * must-single-thread - */ - def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = { - val androidCreatorType = getClassBType(AndroidCreatorClass) - val tdesc_creator = androidCreatorType.descriptor - - cnode.visitField( - GenBCodeOps.PublicStaticFinal, - "CREATOR", - tdesc_creator, - null, // no java-generic-signature - null // no initial value - ).visitEnd() - - val moduleName = (thisName + "$") - - // GETSTATIC `moduleName`.MODULE$ : `moduleName`; - clinit.visitFieldInsn( - asm.Opcodes.GETSTATIC, - moduleName, - str.MODULE_INSTANCE_FIELD, - "L" + moduleName + ";" - ) - - // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator; - val bt = MethodBType(Nil, androidCreatorType) - clinit.visitMethodInsn( - asm.Opcodes.INVOKEVIRTUAL, - moduleName, - "CREATOR", - bt.descriptor, - false - ) - - // PUTSTATIC `thisName`.CREATOR; - clinit.visitFieldInsn( - asm.Opcodes.PUTSTATIC, - thisName, - "CREATOR", - tdesc_creator - ) - } - - } // end of trait JAndroidBuilder - - /** - * This method returns the BType for a type reference, for example a parameter type. - * - * If the result is a ClassBType for a nested class, it is added to the innerClassBufferASM. - * - * If `t` references a class, toTypeKind ensures that the class is not an implementation class. - * See also comment on getClassBTypeAndRegisterInnerClass, which is invoked for implementation - * classes. - */ - private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { - import ct.bTypes._ - val defn = ctx.definitions - import coreBTypes._ - import Types._ - /** - * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. - * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. - */ - def primitiveOrClassToBType(sym: Symbol): BType = { - assert(sym.isClass, sym) - assert(sym != defn.ArrayClass || compilingArray, sym) - primitiveTypeMap.getOrElse(sym, storage.getClassBType(sym)).asInstanceOf[BType] - } - - /** - * When compiling Array.scala, the type parameter T is not erased and shows up in method - * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. - */ - def nonClassTypeRefToBType(sym: Symbol): ClassBType = { - assert(sym.isType && compilingArray, sym) - ObjectRef.asInstanceOf[ct.bTypes.ClassBType] - } - - tp.widenDealias match { - case JavaArrayType(el) =>ArrayBType(typeToTypeKind(el)(ct)(storage)) // Array type such as Array[Int] (kept by erasure) - case t: TypeRef => - t.info match { - - case _ => - if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType - else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String - } - case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info) - - /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for - * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. - * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. - */ - case a @ AnnotatedType(t, _) => - report.debuglog(s"typeKind of annotated type $a") - typeToTypeKind(t)(ct)(storage) - - /* The cases below should probably never occur. They are kept for now to avoid introducing - * new compiler crashes, but we added a warning. The compiler / library bootstrap and the - * test suite don't produce any warning. - */ - - case tp => - report.warning( - s"an unexpected type representation reached the compiler backend while compiling ${ctx.compilationUnit}: $tp. " + - "If possible, please file a bug on https://github.com/scala/scala3/issues") - - tp match { - case tp: ThisType if tp.cls == defn.ArrayClass => ObjectRef.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test - case tp: ThisType => storage.getClassBType(tp.cls) - // case t: SingletonType => primitiveOrClassToBType(t.classSymbol) - case t: SingletonType => typeToTypeKind(t.underlying)(ct)(storage) - case t: RefinedType => typeToTypeKind(t.parent)(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b)) - } - } - } - - private def getGenericSignatureHelper(sym: Symbol, owner: Symbol, memberTpe: Type)(using Context): Option[String] = { - if (needsGenericSignature(sym)) { - val erasedTypeSym = TypeErasure.fullErasure(sym.denot.info).typeSymbol - if (erasedTypeSym.isPrimitiveValueClass) { - // Suppress signatures for symbols whose types erase in the end to primitive - // value types. This is needed to fix #7416. - None - } else { - val jsOpt = GenericSignatures.javaSig(sym, memberTpe) - if (ctx.settings.XverifySignatures.value) { - jsOpt.foreach(verifySignature(sym, _)) - } - - jsOpt - } - } else { - None - } - } - - private def verifySignature(sym: Symbol, sig: String)(using Context): Unit = { - import scala.tools.asm.util.CheckClassAdapter - def wrap(body: => Unit): Unit = { - try body - catch { - case ex: Throwable => - report.error( - em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} - |signature: $sig - |if this is reproducible, please report bug at https://github.com/scala/scala3/issues - """, sym.sourcePos) - throw ex - } - } - - wrap { - if (sym.is(Method)) { - CheckClassAdapter.checkMethodSignature(sig) - } - else if (sym.isTerm) { - CheckClassAdapter.checkFieldSignature(sig) - } - else { - CheckClassAdapter.checkClassSignature(sig) - } - } - } - - // @M don't generate java generics sigs for (members of) implementation - // classes, as they are monomorphic (TODO: ok?) - private final def needsGenericSignature(sym: Symbol): Boolean = !( - // pp: this condition used to include sym.hasexpandedname, but this leads - // to the total loss of generic information if a private member is - // accessed from a closure: both the field and the accessor were generated - // without it. This is particularly bad because the availability of - // generic information could disappear as a consequence of a seemingly - // unrelated change. - ctx.base.settings.YnoGenericSig.value - || sym.is(Artifact) - || sym.isAllOf(LiftedMethod) - || sym.is(Bridge) - ) - - private def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = { - // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. - // By rights, it should use the signature as-seen-from the module class, and add suitable - // primitive and value-class boxing/unboxing. - // But for now, just like we did in mixin, we just avoid writing a wrong generic signature - // (one that doesn't erase to the actual signature). See run/t3452b for a test case. - - val memberTpe = atPhase(erasurePhase) { moduleClass.denot.thisType.memberInfo(sym) } - val erasedMemberType = ElimErasedValueType.elimEVT(TypeErasure.transformInfo(sym, memberTpe)) - if (erasedMemberType =:= sym.denot.info) - getGenericSignatureHelper(sym, moduleClass, memberTpe).orNull - else null - } - - def abort(msg: String): Nothing = { - report.error(msg) - throw new RuntimeException(msg) - } - - private def compilingArray(using Context) = - ctx.compilationUnit.source.file.name == "Array.scala" -} - -object BCodeHelpers { - - class InvokeStyle(val style: Int) extends AnyVal { - import InvokeStyle._ - def isVirtual: Boolean = this == Virtual - def isStatic : Boolean = this == Static - def isSpecial: Boolean = this == Special - def isSuper : Boolean = this == Super - - def hasInstance = this != Static - } - - object InvokeStyle { - val Virtual = new InvokeStyle(0) // InvokeVirtual or InvokeInterface - val Static = new InvokeStyle(1) // InvokeStatic - val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors) - val Super = new InvokeStyle(3) // InvokeSpecial (super calls) - } - - /** An attachment on Apply nodes indicating that it should be compiled with - * `invokespecial` instead of `invokevirtual`. This is used for static - * forwarders. - * See BCodeSkelBuilder.makeStaticForwarder for more details. - */ - val UseInvokeSpecial = new dotc.util.Property.Key[Unit] - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala deleted file mode 100644 index 9b8d81bbdbd1..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeIdiomatic.scala +++ /dev/null @@ -1,727 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm -import scala.annotation.switch -import Primitives.{NE, EQ, TestOp, ArithmeticOp} -import scala.tools.asm.tree.MethodInsnNode -import dotty.tools.dotc.report - -/* - * A high-level facade to the ASM API for bytecode generation. - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded - * @version 1.0 - * - */ -trait BCodeIdiomatic extends Pure { - val int: DottyBackendInterface - final lazy val bTypes = new BTypesFromSymbols[int.type](int) - - import int.{_, given} - import bTypes._ - import coreBTypes._ - - - - lazy val target = - val releaseValue = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - val targetValue = Option(ctx.settings.XuncheckedJavaOutputVersion.value).filter(_.nonEmpty) - val defaultTarget = "8" - (releaseValue, targetValue) match - case (Some(release), None) => release - case (None, Some(target)) => target - case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.XuncheckedJavaOutputVersion.name} was overridden by ${ctx.settings.javaOutputVersion.name}") - release - case (None, None) => "8" // least supported version by default - - - // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = target match { - case "8" => asm.Opcodes.V1_8 - case "9" => asm.Opcodes.V9 - case "10" => asm.Opcodes.V10 - case "11" => asm.Opcodes.V11 - case "12" => asm.Opcodes.V12 - case "13" => asm.Opcodes.V13 - case "14" => asm.Opcodes.V14 - case "15" => asm.Opcodes.V15/* - case "16" => asm.Opcodes.V16 - case "17" => asm.Opcodes.V17 - case "18" => asm.Opcodes.V18 - case "19" => asm.Opcodes.V19 - case "20" => asm.Opcodes.V20 - case "21" => asm.Opcodes.V21 - case "22" => asm.Opcodes.V22*/ - } - - lazy val majorVersion: Int = (classfileVersion & 0xFF) - lazy val emitStackMapFrame = (majorVersion >= 50) - - val extraProc: Int = - import GenBCodeOps.addFlagIf - asm.ClassWriter.COMPUTE_MAXS - .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - - lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName - - val CLASS_CONSTRUCTOR_NAME = "" - val INSTANCE_CONSTRUCTOR_NAME = "" - - val EMPTY_STRING_ARRAY = Array.empty[String] - val EMPTY_INT_ARRAY = Array.empty[Int] - val EMPTY_LABEL_ARRAY = Array.empty[asm.Label] - val EMPTY_BTYPE_ARRAY = Array.empty[BType] - - /* can-multi-thread */ - final def mkArrayB(xs: List[BType]): Array[BType] = { - if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY } - val a = new Array[BType](xs.size); xs.copyToArray(a); a - } - /* can-multi-thread */ - final def mkArrayS(xs: List[String]): Array[String] = { - if (xs.isEmpty) { return EMPTY_STRING_ARRAY } - val a = new Array[String](xs.size); xs.copyToArray(a); a - } - /* can-multi-thread */ - final def mkArrayL(xs: List[asm.Label]): Array[asm.Label] = { - if (xs.isEmpty) { return EMPTY_LABEL_ARRAY } - val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a - } - - /* - * can-multi-thread - */ - final def mkArrayReverse(xs: List[String]): Array[String] = { - val len = xs.size - if (len == 0) { return EMPTY_STRING_ARRAY } - val a = new Array[String](len) - var i = len - 1 - var rest = xs - while (!rest.isEmpty) { - a(i) = rest.head - rest = rest.tail - i -= 1 - } - a - } - - /* - * can-multi-thread - */ - final def mkArrayReverse(xs: List[Int]): Array[Int] = { - val len = xs.size - if (len == 0) { return EMPTY_INT_ARRAY } - val a = new Array[Int](len) - var i = len - 1 - var rest = xs - while (!rest.isEmpty) { - a(i) = rest.head - rest = rest.tail - i -= 1 - } - a - } - - /* Just a namespace for utilities that encapsulate MethodVisitor idioms. - * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, - * but the methods here allow choosing when to transition from ICode to ASM types - * (including not at all, e.g. for performance). - */ - abstract class JCodeMethodN { - - def jmethod: asm.tree.MethodNode - - import asm.Opcodes; - - final def emit(opc: Int): Unit = { jmethod.visitInsn(opc) } - - /* - * can-multi-thread - */ - final def genPrimitiveArithmetic(op: ArithmeticOp, kind: BType): Unit = { - - import Primitives.{ ADD, SUB, MUL, DIV, REM, NOT } - - op match { - - case ADD => add(kind) - case SUB => sub(kind) - case MUL => mul(kind) - case DIV => div(kind) - case REM => rem(kind) - - case NOT => - if (kind.isIntSizedType) { - emit(Opcodes.ICONST_M1) - emit(Opcodes.IXOR) - } else if (kind == LONG) { - jmethod.visitLdcInsn(java.lang.Long.valueOf(-1)) - jmethod.visitInsn(Opcodes.LXOR) - } else { - abort(s"Impossible to negate an $kind") - } - - case _ => - abort(s"Unknown arithmetic primitive $op") - } - - } // end of method genPrimitiveArithmetic() - - /* - * can-multi-thread - */ - final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = { - - import ScalaPrimitivesOps.{ AND, OR, XOR } - - ((op, kind): @unchecked) match { - case (AND, LONG) => emit(Opcodes.LAND) - case (AND, INT) => emit(Opcodes.IAND) - case (AND, _) => - emit(Opcodes.IAND) - if (kind != BOOL) { emitT2T(INT, kind) } - - case (OR, LONG) => emit(Opcodes.LOR) - case (OR, INT) => emit(Opcodes.IOR) - case (OR, _) => - emit(Opcodes.IOR) - if (kind != BOOL) { emitT2T(INT, kind) } - - case (XOR, LONG) => emit(Opcodes.LXOR) - case (XOR, INT) => emit(Opcodes.IXOR) - case (XOR, _) => - emit(Opcodes.IXOR) - if (kind != BOOL) { emitT2T(INT, kind) } - } - - } // end of method genPrimitiveLogical() - - /* - * can-multi-thread - */ - final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = { - - import ScalaPrimitivesOps.{ LSL, ASR, LSR } - - ((op, kind): @unchecked) match { - case (LSL, LONG) => emit(Opcodes.LSHL) - case (LSL, INT) => emit(Opcodes.ISHL) - case (LSL, _) => - emit(Opcodes.ISHL) - emitT2T(INT, kind) - - case (ASR, LONG) => emit(Opcodes.LSHR) - case (ASR, INT) => emit(Opcodes.ISHR) - case (ASR, _) => - emit(Opcodes.ISHR) - emitT2T(INT, kind) - - case (LSR, LONG) => emit(Opcodes.LUSHR) - case (LSR, INT) => emit(Opcodes.IUSHR) - case (LSR, _) => - emit(Opcodes.IUSHR) - emitT2T(INT, kind) - } - - } // end of method genPrimitiveShift() - - /* Creates a new `StringBuilder` instance with the requested capacity - * - * can-multi-thread - */ - final def genNewStringBuilder(size: Int): Unit = { - jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) - jmethod.visitInsn(Opcodes.DUP) - jmethod.visitLdcInsn(Integer.valueOf(size)) - invokespecial( - JavaStringBuilderClassName, - INSTANCE_CONSTRUCTOR_NAME, - "(I)V", - itf = false - ) - } - - /* Issue a call to `StringBuilder#append` for the right element type - * - * can-multi-thread - */ - final def genStringBuilderAppend(elemType: BType): Unit = { - val paramType = elemType match { - case ct: ClassBType if ct.isSubtypeOf(StringRef) => StringRef - case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef) => jlStringBufferRef - case ct: ClassBType if ct.isSubtypeOf(jlCharSequenceRef) => jlCharSequenceRef - // Don't match for `ArrayBType(CHAR)`, even though StringBuilder has such an overload: - // `"a" + Array('b')` should NOT be "ab", but "a[C@...". - case _: RefBType => ObjectRef - // jlStringBuilder does not have overloads for byte and short, but we can just use the int version - case BYTE | SHORT => INT - case pt: PrimitiveBType => pt - } - val bt = MethodBType(List(paramType), jlStringBuilderRef) - invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor) - } - - /* Extract the built `String` from the `StringBuilder` - * - * can-multi-thread - */ - final def genStringBuilderEnd: Unit = { - invokevirtual(JavaStringBuilderClassName, "toString", genStringBuilderEndDesc) - } - // Use ClassBType refs instead of plain string literal to make sure that needed ClassBTypes are initialized and reachable - private lazy val genStringBuilderEndDesc = MethodBType(Nil, StringRef).descriptor - - /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` - * (only works for JDK 9+) - * - * can-multi-thread - */ - final def genIndyStringConcat( - recipe: String, - argTypes: Seq[asm.Type], - constants: Seq[String] - ): Unit = { - jmethod.visitInvokeDynamicInsn( - "makeConcatWithConstants", - asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), - coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle, - (recipe +: constants):_* - ) - } - - /* - * Emits one or more conversion instructions based on the types given as arguments. - * - * @param from The type of the value to be converted into another type. - * @param to The type the value will be converted into. - * - * can-multi-thread - */ - final def emitT2T(from: BType, to: BType): Unit = { - - assert( - from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, - s"Cannot emit primitive conversion from $from to $to" - ) - - def pickOne(opcs: Array[Int]): Unit = { // TODO index on to.sort - val chosen = (to: @unchecked) match { - case BYTE => opcs(0) - case SHORT => opcs(1) - case CHAR => opcs(2) - case INT => opcs(3) - case LONG => opcs(4) - case FLOAT => opcs(5) - case DOUBLE => opcs(6) - } - if (chosen != -1) { emit(chosen) } - } - - if (from == to) { return } - // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) - assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") - - // We're done with BOOL already - from match { - - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - - case BYTE => pickOne(JCodeMethodN.fromByteT2T) - case SHORT => pickOne(JCodeMethodN.fromShortT2T) - case CHAR => pickOne(JCodeMethodN.fromCharT2T) - case INT => pickOne(JCodeMethodN.fromIntT2T) - - case FLOAT => - import asm.Opcodes.{ F2L, F2D, F2I } - to match { - case LONG => emit(F2L) - case DOUBLE => emit(F2D) - case _ => emit(F2I); emitT2T(INT, to) - } - - case LONG => - import asm.Opcodes.{ L2F, L2D, L2I } - to match { - case FLOAT => emit(L2F) - case DOUBLE => emit(L2D) - case _ => emit(L2I); emitT2T(INT, to) - } - - case DOUBLE => - import asm.Opcodes.{ D2L, D2F, D2I } - to match { - case FLOAT => emit(D2F) - case LONG => emit(D2L) - case _ => emit(D2I); emitT2T(INT, to) - } - } - } // end of emitT2T() - - // can-multi-thread - final def boolconst(b: Boolean): Unit = { iconst(if (b) 1 else 0) } - - // can-multi-thread - final def iconst(cst: Int): Unit = { - if (cst >= -1 && cst <= 5) { - emit(Opcodes.ICONST_0 + cst) - } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) { - jmethod.visitIntInsn(Opcodes.BIPUSH, cst) - } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) { - jmethod.visitIntInsn(Opcodes.SIPUSH, cst) - } else { - jmethod.visitLdcInsn(Integer.valueOf(cst)) - } - } - - // can-multi-thread - final def lconst(cst: Long): Unit = { - if (cst == 0L || cst == 1L) { - emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Long.valueOf(cst)) - } - } - - // can-multi-thread - final def fconst(cst: Float): Unit = { - val bits: Int = java.lang.Float.floatToIntBits(cst) - if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 - emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Float.valueOf(cst)) - } - } - - // can-multi-thread - final def dconst(cst: Double): Unit = { - val bits: Long = java.lang.Double.doubleToLongBits(cst) - if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d - emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int]) - } else { - jmethod.visitLdcInsn(java.lang.Double.valueOf(cst)) - } - } - - // can-multi-thread - final def newarray(elem: BType): Unit = { - elem match { - case c: RefBType => - /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ - jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) - case _ => - assert(elem.isNonVoidPrimitiveType) - val rand = { - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - elem match { - case BOOL => Opcodes.T_BOOLEAN - case BYTE => Opcodes.T_BYTE - case SHORT => Opcodes.T_SHORT - case CHAR => Opcodes.T_CHAR - case INT => Opcodes.T_INT - case LONG => Opcodes.T_LONG - case FLOAT => Opcodes.T_FLOAT - case DOUBLE => Opcodes.T_DOUBLE - } - } - jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) - } - } - - - final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread - final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread - final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread - - final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread - final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread - - final def neg(tk: BType): Unit = { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread - final def add(tk: BType): Unit = { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread - final def sub(tk: BType): Unit = { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread - final def mul(tk: BType): Unit = { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread - final def div(tk: BType): Unit = { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread - final def rem(tk: BType): Unit = { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread - - // can-multi-thread - final def invokespecial(owner: String, name: String, desc: String, itf: Boolean): Unit = { - emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf) - } - // can-multi-thread - final def invokestatic(owner: String, name: String, desc: String, itf: Boolean): Unit = { - emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf) - } - // can-multi-thread - final def invokeinterface(owner: String, name: String, desc: String): Unit = { - emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true) - } - // can-multi-thread - final def invokevirtual(owner: String, name: String, desc: String): Unit = { - emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false) - } - - def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = { - val node = new MethodInsnNode(opcode, owner, name, desc, itf) - jmethod.instructions.add(node) - } - - - // can-multi-thread - final def goTo(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.GOTO, label) } - // can-multi-thread - final def emitIF(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIF(), label) } - // can-multi-thread - final def emitIF_ICMP(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) } - // can-multi-thread - final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = { - assert((cond == EQ) || (cond == NE), cond) - val opc = (if (cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) - jmethod.visitJumpInsn(opc, label) - } - // can-multi-thread - final def emitIFNONNULL(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } - // can-multi-thread - final def emitIFNULL (label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } - - // can-multi-thread - final def emitRETURN(tk: BType): Unit = { - if (tk == UNIT) { emit(Opcodes.RETURN) } - else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) } - } - - /* Emits one of tableswitch or lookoupswitch. - * - * can-multi-thread - */ - final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = { - assert(keys.length == branches.length) - - // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only. - // Similar to what javac emits for a switch statement consisting only of a default case. - if (keys.length == 0) { - jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) - return - } - - // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort - var i = 1 - while (i < keys.length) { - var j = 1 - while (j <= keys.length - i) { - if (keys(j) < keys(j - 1)) { - val tmp = keys(j) - keys(j) = keys(j - 1) - keys(j - 1) = tmp - val tmpL = branches(j) - branches(j) = branches(j - 1) - branches(j - 1) = tmpL - } - j += 1 - } - i += 1 - } - - // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011) - i = 1 - while (i < keys.length) { - if (keys(i-1) == keys(i)) { - abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.") - } - i += 1 - } - - val keyMin = keys(0) - val keyMax = keys(keys.length - 1) - - val isDenseEnough: Boolean = { - /* Calculate in long to guard against overflow. TODO what overflow? */ - val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double] - val klenD: Double = keys.length - val kdensity: Double = (klenD / keyRangeD) - - kdensity >= minDensity - } - - if (isDenseEnough) { - // use a table in which holes are filled with defaultBranch. - val keyRange = (keyMax - keyMin + 1) - val newBranches = new Array[asm.Label](keyRange) - var oldPos = 0 - var i = 0 - while (i < keyRange) { - val key = keyMin + i; - if (keys(oldPos) == key) { - newBranches(i) = branches(oldPos) - oldPos += 1 - } else { - newBranches(i) = defaultBranch - } - i += 1 - } - assert(oldPos == keys.length, "emitSWITCH") - jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) - } else { - jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) - } - } - - // internal helpers -- not part of the public API of `jcode` - // don't make private otherwise inlining will suffer - - // can-multi-thread - final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = { - assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) - jmethod.visitVarInsn(tk.typedOpcode(opc), idx) - } - - // ---------------- array load and store ---------------- - - // can-multi-thread - final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = { - assert(tk != UNIT, tk) - val opc = { - if (tk.isRef) { opcs(0) } - else if (tk.isIntSizedType) { - (tk: @unchecked) match { - case BOOL | BYTE => opcs(1) - case SHORT => opcs(2) - case CHAR => opcs(3) - case INT => opcs(4) - } - } else { - (tk: @unchecked) match { - case LONG => opcs(5) - case FLOAT => opcs(6) - case DOUBLE => opcs(7) - } - } - } - emit(opc) - } - - // ---------------- primitive operations ---------------- - - // can-multi-thread - final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = { - val opc = { - // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - tk match { - case LONG => opcs(1) - case FLOAT => opcs(2) - case DOUBLE => opcs(3) - case _ => opcs(0) - } - } - emit(opc) - } - - // can-multi-thread - final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } - - // can-multi-thread - final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } - - // ---------------- type checks and casts ---------------- - - // can-multi-thread - final def isInstance(tk: RefBType): Unit = { - jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType) - } - - // can-multi-thread - final def checkCast(tk: RefBType): Unit = { - // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk) - jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType) - } - - def abort(msg: String): Nothing = { - report.error(msg) - throw new RuntimeException(msg) - } - - } // end of class JCodeMethodN - - /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ - object JCodeMethodN { - - import asm.Opcodes._ - - // ---------------- conversions ---------------- - - val fromByteT2T = { Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT) - val fromCharT2T = { Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing - val fromShortT2T = { Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing - val fromIntT2T = { Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) } - - // ---------------- array load and store ---------------- - - val aloadOpcodes = { Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) } - val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) } - val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) } - - // ---------------- primitive operations ---------------- - - val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) } - val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) } - val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) } - val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) } - val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) } - val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) } - - } // end of object JCodeMethodN - - // ---------------- adapted from scalaPrimitives ---------------- - - /* Given `code` reports the src TypeKind of the coercion indicated by `code`. - * To find the dst TypeKind, `ScalaPrimitivesOps.generatedKind(code)` can be used. - * - * can-multi-thread - */ - final def coercionFrom(code: Int): BType = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE - case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT - case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR - case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT - case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG - case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT - case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE - } - } - - /* If code is a coercion primitive, the result type. - * - * can-multi-thread - */ - final def coercionTo(code: Int): BType = { - import ScalaPrimitivesOps._ - (code: @switch) match { - case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE - case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR - case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT - case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT - case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG - case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT - case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE - } - } - - implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) { - @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { mnode.instructions.foreachInsn(f) } - } - - implicit class InsnIterInsnList(lst: asm.tree.InsnList) { - - @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { - val insnIter = lst.iterator() - while (insnIter.hasNext) { - f(insnIter.next()) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala deleted file mode 100644 index 125ee26b0528..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeSkelBuilder.scala +++ /dev/null @@ -1,908 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.annotation.tailrec - -import scala.collection.{ mutable, immutable } - -import scala.tools.asm -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.TreeTypeMap -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.Decorators._ -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.StdNames._ -import dotty.tools.dotc.core.NameKinds._ -import dotty.tools.dotc.core.Names.TermName -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Types._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.util.Spans._ -import dotty.tools.dotc.report -import dotty.tools.dotc.transform.SymUtils._ - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeSkelBuilder extends BCodeHelpers { - import int.{_, given} - import DottyBackendInterface.{symExtensions, _} - import tpd._ - import bTypes._ - import coreBTypes._ - import bCodeAsmCommon._ - - lazy val NativeAttr: Symbol = requiredClass[scala.native] - - /** The destination of a value generated by `genLoadTo`. */ - enum LoadDestination: - /** The value is put on the stack, and control flows through to the next opcode. */ - case FallThrough - /** The value is put on the stack, and control flow is transferred to the given `label`. */ - case Jump(label: asm.Label) - /** The value is RETURN'ed from the enclosing method. */ - case Return - /** The value is ATHROW'n. */ - case Throw - end LoadDestination - - /* - * There's a dedicated PlainClassBuilder for each CompilationUnit, - * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()` - * - * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized, - * including `resetMethodBookkeeping()` and `initJMethod()`. - * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates - * the ASM MethodNode instance with ASM AbstractInsnNodes. - * - * Given that CleanUp delivers trees that produce values on the stack, - * the entry-point to all-things instruction-emit is `genLoad()`. - * There, an operation taking N arguments results in recursively emitting instructions to lead each of them, - * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack). - * - * In a few cases the above recipe deserves more details, as provided in the documentation for: - * - `genLoadTry()` - * - `genSynchronized() - * - `jumpDest` , `cleanups` , `labelDefsAtOrUnder` - */ - abstract class PlainSkelBuilder(cunit: CompilationUnit) - extends BCClassGen - with BCAnnotGen - with BCInnerClassGen - with JAndroidBuilder - with BCForwardersGen - with BCPickles - with BCJGenSigGen { - - // Strangely I can't find this in the asm code 255, but reserving 1 for "this" - inline val MaximumJvmParameters = 254 - - // current class - var cnode: ClassNode1 = null - var thisName: String = null // the internal name of the class being emitted - - var claszSymbol: Symbol = null - var isCZParcelable = false - var isCZStaticModule = false - - /* ---------------- idiomatic way to ask questions to typer ---------------- */ - - def paramTKs(app: Apply, take: Int = -1): List[BType] = app match { - case Apply(fun, _) => - val funSym = fun.symbol - (funSym.info.firstParamTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM) - } - - def symInfoTK(sym: Symbol): BType = { - toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM) - } - - def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) } - - override def getCurrentCUnit(): CompilationUnit = { cunit } - - /* ---------------- helper utils for generating classes and fields ---------------- */ - - def genPlainClass(cd0: TypeDef) = cd0 match { - case TypeDef(_, impl: Template) => - assert(cnode == null, "GenBCode detected nested methods.") - - claszSymbol = cd0.symbol - isCZParcelable = isAndroidParcelableClass(claszSymbol) - isCZStaticModule = claszSymbol.isStaticModuleClass - thisName = internalName(claszSymbol) - - cnode = new ClassNode1() - - initJClass(cnode) - - val cd = if (isCZStaticModule) { - // Move statements from the primary constructor following the superclass constructor call to - // a newly synthesised tree representing the "", which also assigns the MODULE$ field. - // Because the assigments to both the module instance fields, and the fields of the module itself - // are in the , these fields can be static + final. - - // Should we do this transformation earlier, say in Constructors? Or would that just cause - // pain for scala-{js, native}? - // - // @sjrd (https://github.com/scala/scala3/pull/9181#discussion_r457458205): - // moving that before the back-end would make things significantly more complicated for - // Scala.js and Native. Both have a first-class concept of ModuleClass, and encode the - // singleton pattern of MODULE$ in a completely different way. In the Scala.js IR, there - // even isn't anything that corresponds to MODULE$ per se. - // - // So if you move this before the back-end, then Scala.js and Scala Native will have to - // reverse all the effects of this transformation, which would be counter-productive. - - - // TODO: remove `!f.name.is(LazyBitMapName)` once we change lazy val encoding - // https://github.com/scala/scala3/issues/7140 - // - // Lazy val encoding assumes bitmap fields are non-static - // - // See `tests/run/given-var.scala` - // - - // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit - claszSymbol.info.decls.foreach { f => - if f.isField && !f.name.is(LazyBitMapName) then - f.setFlag(JavaStatic) - } - - val (clinits, body) = impl.body.partition(stat => stat.isInstanceOf[DefDef] && stat.symbol.isStaticConstructor) - - val (uptoSuperStats, remainingConstrStats) = splitAtSuper(impl.constr.rhs.asInstanceOf[Block].stats) - val clInitSymbol: TermSymbol = - if (clinits.nonEmpty) clinits.head.symbol.asTerm - else newSymbol( - claszSymbol, - nme.STATIC_CONSTRUCTOR, - JavaStatic | Method, - MethodType(Nil)(_ => Nil, _ => defn.UnitType), - privateWithin = NoSymbol, - coord = claszSymbol.coord - ) - - val moduleField = newSymbol( - claszSymbol, - str.MODULE_INSTANCE_FIELD.toTermName, - JavaStatic | Final, - claszSymbol.typeRef, - privateWithin = NoSymbol, - coord = claszSymbol.coord - ).entered - - val thisMap = new TreeMap { - override def transform(tree: Tree)(using Context) = { - val tp = tree.tpe.substThis(claszSymbol.asClass, claszSymbol.sourceModule.termRef) - tree.withType(tp) match { - case tree: This if tree.symbol == claszSymbol => - ref(claszSymbol.sourceModule) - case tree => - super.transform(tree) - } - } - } - - def rewire(stat: Tree) = thisMap.transform(stat).changeOwner(claszSymbol.primaryConstructor, clInitSymbol) - - val callConstructor = New(claszSymbol.typeRef).select(claszSymbol.primaryConstructor).appliedToTermArgs(Nil) - val assignModuleField = Assign(ref(moduleField), callConstructor) - val remainingConstrStatsSubst = remainingConstrStats.map(rewire) - val clinit = clinits match { - case (ddef: DefDef) :: _ => - cpy.DefDef(ddef)(rhs = Block(ddef.rhs :: assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - case _ => - DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - } - - val constr2 = { - val rhs = Block(uptoSuperStats, impl.constr.rhs.asInstanceOf[Block].expr) - cpy.DefDef(impl.constr)(rhs = rhs) - } - - val impl2 = cpy.Template(impl)(constr = constr2, body = clinit :: body) - cpy.TypeDef(cd0)(rhs = impl2) - } else cd0 - - val hasStaticCtor = isCZStaticModule || cd.symbol.info.decls.exists(_.isStaticConstructor) - if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() - - val optSerial: Option[Long] = - claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => - if (claszSymbol.is(Trait)) { - report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) - None - } else { - val vuid = annot.argumentConstant(0).map(_.longValue) - if (vuid.isEmpty) - report.error("The argument passed to @SerialVersionUID must be a constant", - annot.argument(0).getOrElse(annot.tree).sourcePos) - vuid - } - } - if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} - - addClassFields() - gen(cd.rhs) - - if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) - AsmUtils.traceClass(cnode) - - cnode.innerClasses - assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") - - } // end of method genPlainClass() - - /* - * must-single-thread - */ - private def initJClass(jclass: asm.ClassVisitor): Unit = { - - val ps = claszSymbol.info.parents - val superClass: String = if (ps.isEmpty) ObjectRef.internalName else internalName(ps.head.typeSymbol) - val interfaceNames0 = classBTypeFromSymbol(claszSymbol).info.interfaces.map(_.internalName) - /* To avoid deadlocks when combining objects, lambdas and multi-threading, - * lambdas in objects are compiled to instance methods of the module class - * instead of static methods (see tests/run/deadlock.scala and - * https://github.com/scala/scala-dev/issues/195 for details). - * This has worked well for us so far but this is problematic for - * serialization: serializing a lambda requires serializing all the values - * it captures, if this lambda is in an object, this means serializing the - * enclosing object, which fails if the object does not extend - * Serializable. - * Because serializing objects is basically free since #5775, it seems like - * the simplest solution is to simply make all objects Serializable, this - * certainly seems preferable to deadlocks. - * This cannot be done earlier because Scala.js would not like it (#9596). - */ - val interfaceNames = - if (claszSymbol.is(ModuleClass) && !interfaceNames0.contains("java/io/Serializable")) - interfaceNames0 :+ "java/io/Serializable" - else - interfaceNames0 - - val flags = javaFlags(claszSymbol) - - val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner) - cnode.visit(classfileVersion, flags, - thisName, thisSignature, - superClass, interfaceNames.toArray) - - if (emitSource) { - cnode.visitSource(cunit.source.file.name, null /* SourceDebugExtension */) - } - - enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match { - case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => - cnode.visitOuterClass(className, methodName, methodDescriptor) - case _ => () - } - - val ssa = None // TODO: inlined form `getAnnotPickle(thisName, claszSymbol)`. Should something be done on Dotty? - cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) - emitAnnotations(cnode, claszSymbol.annotations ++ ssa) - - if (!isCZStaticModule && !isCZParcelable) { - val skipStaticForwarders = (claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) - if (!skipStaticForwarders) { - val lmoc = claszSymbol.companionModule - // add static forwarders if there are no name conflicts; see bugs #363 and #1735 - if (lmoc != NoSymbol) { - // it must be a top level class (name contains no $s) - val isCandidateForForwarders = (lmoc.is(Module)) && lmoc.isStatic - if (isCandidateForForwarders) { - report.log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'") - addForwarders(cnode, thisName, lmoc.moduleClass) - } - } - } - - } - - // the invoker is responsible for adding a class-static constructor. - - } // end of method initJClass - - /* - * must-single-thread - */ - private def fabricateStaticInitAndroid(): Unit = { - - val clinit: asm.MethodVisitor = cnode.visitMethod( - GenBCodeOps.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED - CLASS_CONSTRUCTOR_NAME, - "()V", - null, // no java-generic-signature - null // no throwable exceptions - ) - clinit.visitCode() - - legacyAddCreatorCode(clinit, cnode, thisName) - - clinit.visitInsn(asm.Opcodes.RETURN) - clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments - clinit.visitEnd() - } - - def addClassFields(): Unit = { - /* Non-method term members are fields, except for module members. Module - * members can only happen on .NET (no flatten) for inner traits. There, - * a module symbol is generated (transformInfo in mixin) which is used - * as owner for the members of the implementation class (so that the - * backend emits them as static). - * No code is needed for this module symbol. - */ - for (f <- claszSymbol.info.decls.filter(p => p.isTerm && !p.is(Method))) { - val javagensig = getGenericSignature(f, claszSymbol) - val flags = javaFieldFlags(f) - - assert(!f.isStaticMember || !claszSymbol.isInterface || !f.is(Mutable), - s"interface $claszSymbol cannot have non-final static field $f") - - val jfield = new asm.tree.FieldNode( - flags, - f.javaSimpleName, - symInfoTK(f).descriptor, - javagensig, - null // no initial value - ) - cnode.fields.add(jfield) - emitAnnotations(jfield, f.annotations) - } - - } // end of method addClassFields() - - // current method - var mnode: MethodNode1 = null - var jMethodName: String = null - var isMethSymStaticCtor = false - var returnType: BType = null - var methSymbol: Symbol = null - // used by genLoadTry() and genSynchronized() - var earlyReturnVar: Symbol = null - var shouldEmitCleanup = false - // line numbers - var lastEmittedLineNr = -1 - - object bc extends JCodeMethodN { - override def jmethod = PlainSkelBuilder.this.mnode - } - - /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */ - - /* - * A jump is represented as a Return node whose `from` symbol denotes a Labeled's Bind node, the target of the jump. - * The `jumpDest` map is used to find the `LoadDestination` at the end of the `Labeled` block, as well as the - * corresponding expected type. The `LoadDestination` can never be `FallThrough` here. - */ - var jumpDest: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - def registerJumpDest(labelSym: Symbol, expectedType: BType, dest: LoadDestination): Unit = { - assert(labelSym.is(Label), s"trying to register a jump-dest for a non-label symbol, at: ${labelSym.span}") - assert(dest != LoadDestination.FallThrough, s"trying to register a FallThrough dest for label, at: ${labelSym.span}") - assert(!jumpDest.contains(labelSym), s"trying to register a second jump-dest for label, at: ${labelSym.span}") - jumpDest += (labelSym -> (expectedType, dest)) - } - def findJumpDest(labelSym: Symbol): (BType, LoadDestination) = { - assert(labelSym.is(Label), s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.span}") - jumpDest.getOrElse(labelSym, { - abort(s"unknown label symbol, for label at: ${labelSym.span}") - }) - } - - /* - * A program point may be lexically nested (at some depth) - * (a) in the try-clause of a try-with-finally expression - * (b) in a synchronized block. - * Each of the constructs above establishes a "cleanup block" to execute upon - * both normal-exit, early-return, and abrupt-termination of the instructions it encloses. - * - * The `cleanups` LIFO queue represents the nesting of active (for the current program point) - * pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block. - * At any given time during traversal of the method body, - * the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression. - * - * `cleanups` is used: - * - * (1) upon visiting a Return statement. - * In case of pending cleanups, we can't just emit a RETURN instruction, but must instead: - * - store the result (if any) in `earlyReturnVar`, and - * - jump to the next pending cleanup. - * See `genReturn()` - * - * (2) upon emitting a try-with-finally or a synchronized-expr, - * In these cases, the targets of the above jumps are emitted, - * provided an early exit was actually encountered somewhere in the protected clauses. - * See `genLoadTry()` and `genSynchronized()` - * - * The code thus emitted for jumps and targets covers the early-return case. - * The case of abrupt (ie exceptional) termination is covered by exception handlers - * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`. - */ - var cleanups: List[asm.Label] = Nil - def registerCleanup(finCleanup: asm.Label): Unit = { - if (finCleanup != null) { cleanups = finCleanup :: cleanups } - } - def unregisterCleanup(finCleanup: asm.Label): Unit = { - if (finCleanup != null) { - assert(cleanups.head eq finCleanup, - s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup") - cleanups = cleanups.tail - } - } - - /* ---------------- local variables and params ---------------- */ - - case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean) - - /* - * Bookkeeping for method-local vars and method-params. - * - * TODO: use fewer slots. local variable slots are never re-used in separate blocks. - * In the following example, x and y could use the same slot. - * def foo() = { - * { val x = 1 } - * { val y = "a" } - * } - */ - object locals { - - private val slots = mutable.AnyRefMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) - - private var nxtIdx = -1 // next available index for local-var - - def reset(isStaticMethod: Boolean): Unit = { - slots.clear() - nxtIdx = if (isStaticMethod) 0 else 1 - } - - def contains(locSym: Symbol): Boolean = { slots.contains(locSym) } - - def apply(locSym: Symbol): Local = { slots.apply(locSym) } - - /* Make a fresh local variable, ensuring a unique name. - * The invoker must make sure inner classes are tracked for the sym's tpe. - */ - def makeLocal(tk: BType, name: String, tpe: Type, pos: Span): Symbol = { - - val locSym = newSymbol(methSymbol, name.toTermName, Synthetic, tpe, NoSymbol, pos) - makeLocal(locSym, tk) - locSym - } - - def makeLocal(locSym: Symbol): Local = { - makeLocal(locSym, symInfoTK(locSym)) - } - - def getOrMakeLocal(locSym: Symbol): Local = { - // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. - slots.getOrElse(locSym, makeLocal(locSym)) - } - - def reuseLocal(sym: Symbol, loc: Local): Unit = - val existing = slots.put(sym, loc) - if (existing.isDefined) - report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) - - def reuseThisSlot(sym: Symbol): Unit = - reuseLocal(sym, Local(symInfoTK(sym), sym.javaSimpleName, 0, sym.is(Synthetic))) - - private def makeLocal(sym: Symbol, tk: BType): Local = { - assert(nxtIdx != -1, "not a valid start index") - val loc = Local(tk, sym.javaSimpleName, nxtIdx, sym.is(Synthetic)) - val existing = slots.put(sym, loc) - if (existing.isDefined) - report.error("attempt to create duplicate local var.", ctx.source.atSpan(sym.span)) - assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") - nxtIdx += tk.size - loc - } - - // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. - def store(locSym: Symbol): Unit = { - val Local(tk, _, idx, _) = slots(locSym) - bc.store(idx, tk) - } - - def load(locSym: Symbol): Unit = { - val Local(tk, _, idx, _) = slots(locSym) - bc.load(idx, tk) - } - - } - - /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */ - - // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`). - var varsInScope: List[(Symbol, asm.Label)] = null // (local-var-sym -> start-of-scope) - - // helpers around program-points. - def lastInsn: asm.tree.AbstractInsnNode = mnode.instructions.getLast - def currProgramPoint(): asm.Label = { - lastInsn match { - case labnode: asm.tree.LabelNode => labnode.getLabel - case _ => - val pp = new asm.Label - mnode visitLabel pp - pp - } - } - def markProgramPoint(lbl: asm.Label): Unit = { - val skip = (lbl == null) || isAtProgramPoint(lbl) - if (!skip) { mnode visitLabel lbl } - } - def isAtProgramPoint(lbl: asm.Label): Boolean = { - def getNonLineNumberNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { - case a: asm.tree.LineNumberNode => getNonLineNumberNode(a.getPrevious) // line numbers aren't part of code itself - case _ => a - } - (getNonLineNumberNode(lastInsn) match { - case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); - case _ => false } ) - } - def lineNumber(tree: Tree): Unit = { - if (!emitLines || !tree.span.exists) return; - val nr = ctx.source.offsetToLine(tree.span.point) + 1 - if (nr != lastEmittedLineNr) { - lastEmittedLineNr = nr - lastInsn match { - case lnn: asm.tree.LineNumberNode => - // overwrite previous landmark as no instructions have been emitted for it - lnn.line = nr - case _ => - mnode.visitLineNumber(nr, currProgramPoint()) - } - } - } - - // on entering a method - def resetMethodBookkeeping(dd: DefDef) = { - val rhs = dd.rhs - locals.reset(isStaticMethod = methSymbol.isStaticMember) - jumpDest = immutable.Map.empty - - // check previous invocation of genDefDef exited as many varsInScope as it entered. - assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().") - // check previous invocation of genDefDef unregistered as many cleanups as it registered. - assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.") - earlyReturnVar = null - shouldEmitCleanup = false - - lastEmittedLineNr = -1 - } - - /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */ - - def gen(tree: Tree): Unit = { - tree match { - case tpd.EmptyTree => () - - case ValDef(name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()` - - case dd: DefDef => - /* First generate a static forwarder if this is a non-private trait - * trait method. This is required for super calls to this method, which - * go through the static forwarder in order to work around limitations - * of the JVM. - * - * For the $init$ method, we must not leave it as a default method, but - * instead we must put the whole body in the static method. If we leave - * it as a default method, Java classes cannot extend Scala classes that - * extend several Scala traits, since they then inherit unrelated default - * $init$ methods. See #8599. scalac does the same thing. - * - * In theory, this would go in a separate MiniPhase, but it would have to - * sit in a MegaPhase of its own between GenSJSIR and GenBCode, so the cost - * is not worth it. We directly do it in this back-end instead, which also - * kind of makes sense because it is JVM-specific. - */ - val sym = dd.symbol - val needsStaticImplMethod = - claszSymbol.isInterface && !dd.rhs.isEmpty && !sym.isPrivate && !sym.isStaticMember - if needsStaticImplMethod then - if sym.name == nme.TRAIT_CONSTRUCTOR then - genTraitConstructorDefDef(dd) - else - genStaticForwarderForDefDef(dd) - genDefDef(dd) - else - genDefDef(dd) - - case tree: Template => - val body = - if (tree.constr.rhs.isEmpty) tree.body - else tree.constr :: tree.body - body foreach gen - - case _ => abort(s"Illegal tree in gen: $tree") - } - } - - /* - * must-single-thread - */ - def initJMethod(flags: Int, params: List[Symbol]): Unit = { - - val jgensig = getGenericSignature(methSymbol, claszSymbol) - val (excs, others) = methSymbol.annotations.partition(_.symbol eq defn.ThrowsAnnot) - val thrownExceptions: List[String] = getExceptions(excs) - - val bytecodeName = - if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME - else jMethodName - - val mdesc = asmMethodType(methSymbol).descriptor - mnode = cnode.visitMethod( - flags, - bytecodeName, - mdesc, - jgensig, - mkArrayS(thrownExceptions) - ).asInstanceOf[MethodNode1] - - // TODO param names: (m.params map (p => javaName(p.sym))) - - emitAnnotations(mnode, others) - emitParamNames(mnode, params) - emitParamAnnotations(mnode, params.map(_.annotations)) - - } // end of method initJMethod - - private def genTraitConstructorDefDef(dd: DefDef): Unit = - val statifiedDef = makeStatifiedDefDef(dd) - genDefDef(statifiedDef) - - /** Creates a copy of the given DefDef that is static and where an explicit - * self parameter represents the original `this` value. - * - * Example: from - * {{{ - * trait Enclosing { - * def foo(x: Int): String = this.toString() + x - * } - * }}} - * the statified version of `foo` would be - * {{{ - * static def foo($self: Enclosing, x: Int): String = $self.toString() + x - * }}} - */ - private def makeStatifiedDefDef(dd: DefDef): DefDef = - val origSym = dd.symbol.asTerm - val newSym = makeStatifiedDefSymbol(origSym, origSym.name) - tpd.DefDef(newSym, { paramRefss => - val selfParamRef :: regularParamRefs = paramRefss.head: @unchecked - val enclosingClass = origSym.owner.asClass - new TreeTypeMap( - typeMap = _.substThis(enclosingClass, selfParamRef.symbol.termRef) - .subst(dd.termParamss.head.map(_.symbol), regularParamRefs.map(_.symbol.termRef)), - treeMap = { - case tree: This if tree.symbol == enclosingClass => selfParamRef - case tree => tree - }, - oldOwners = origSym :: Nil, - newOwners = newSym :: Nil - ).transform(dd.rhs) - }) - - private def genStaticForwarderForDefDef(dd: DefDef): Unit = - val forwarderDef = makeStaticForwarder(dd) - genDefDef(forwarderDef) - - /* Generates a synthetic static forwarder for a trait method. - * For a method such as - * def foo(...args: Ts): R - * in trait X, we generate the following method: - * static def foo$($this: X, ...args: Ts): R = - * invokespecial $this.X::foo(...args) - * We force an invokespecial with the attachment UseInvokeSpecial. It is - * necessary to make sure that the call will not follow overrides of foo() - * in subtraits and subclasses, since the whole point of this forward is to - * encode super calls. - */ - private def makeStaticForwarder(dd: DefDef): DefDef = - val origSym = dd.symbol.asTerm - val name = traitSuperAccessorName(origSym).toTermName - val sym = makeStatifiedDefSymbol(origSym, name) - tpd.DefDef(sym, { paramss => - val params = paramss.head - tpd.Apply(params.head.select(origSym), params.tail) - .withAttachment(BCodeHelpers.UseInvokeSpecial, ()) - }) - - private def makeStatifiedDefSymbol(origSym: TermSymbol, name: TermName): TermSymbol = - val info = origSym.info match - case mt: MethodType => - MethodType(nme.SELF :: mt.paramNames, origSym.owner.typeRef :: mt.paramInfos, mt.resType) - origSym.copy( - name = name.toTermName, - flags = Method | JavaStatic, - info = info - ).asTerm - - def genDefDef(dd: DefDef): Unit = { - val rhs = dd.rhs - val vparamss = dd.termParamss - // the only method whose implementation is not emitted: getClass() - if (dd.symbol eq defn.Any_getClass) { return } - assert(mnode == null, "GenBCode detected nested method.") - - methSymbol = dd.symbol - jMethodName = methSymbol.javaSimpleName - returnType = asmMethodType(dd.symbol).returnType - isMethSymStaticCtor = methSymbol.isStaticConstructor - - resetMethodBookkeeping(dd) - - // add method-local vars for params - - assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss") - val params = if (vparamss.isEmpty) Nil else vparamss.head - for (p <- params) { locals.makeLocal(p.symbol) } - // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") - - if (params.size > MaximumJvmParameters) { - // SI-7324 - report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) - return - } - - val isNative = methSymbol.hasAnnotation(NativeAttr) - val isAbstractMethod = (methSymbol.is(Deferred) || (methSymbol.owner.isInterface && ((methSymbol.is(Deferred)) || methSymbol.isClassConstructor))) - val flags = - import GenBCodeOps.addFlagIf - javaFlags(methSymbol) - .addFlagIf(isAbstractMethod, asm.Opcodes.ACC_ABSTRACT) - .addFlagIf(false /*methSymbol.isStrictFP*/, asm.Opcodes.ACC_STRICT) - .addFlagIf(isNative, asm.Opcodes.ACC_NATIVE) // native methods of objects are generated in mirror classes - - // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize } - val paramSyms = params.map(_.symbol) - initJMethod(flags, paramSyms) - - - if (!isAbstractMethod && !isNative) { - // #14773 Reuse locals slots for tailrec-generated mutable vars - val trimmedRhs: Tree = - @tailrec def loop(stats: List[Tree]): List[Tree] = - stats match - case (tree @ ValDef(TailLocalName(_, _), _, _)) :: rest if tree.symbol.isAllOf(Mutable | Synthetic) => - tree.rhs match - case This(_) => - locals.reuseThisSlot(tree.symbol) - loop(rest) - case rhs: Ident if paramSyms.contains(rhs.symbol) => - locals.reuseLocal(tree.symbol, locals(rhs.symbol)) - loop(rest) - case _ => - stats - case _ => - stats - end loop - - rhs match - case Block(stats, expr) => - val trimmedStats = loop(stats) - if trimmedStats eq stats then - rhs - else - Block(trimmedStats, expr) - case _ => - rhs - end trimmedRhs - - def emitNormalMethodBody(): Unit = { - val veryFirstProgramPoint = currProgramPoint() - - if trimmedRhs == tpd.EmptyTree then - report.error( - em"Concrete method has no definition: $dd${ - if (ctx.settings.Ydebug.value) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" - else ""}", - ctx.source.atSpan(NoSpan) - ) - else - genLoadTo(trimmedRhs, returnType, LoadDestination.Return) - - if (emitVars) { - // add entries to LocalVariableTable JVM attribute - val onePastLastProgramPoint = currProgramPoint() - val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0) - if (!hasStaticBitSet) { - mnode.visitLocalVariable( - "this", - "L" + thisName + ";", - null, - veryFirstProgramPoint, - onePastLastProgramPoint, - 0 - ) - } - for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) } - } - - if (isMethSymStaticCtor) { appendToStaticCtor(dd) } - } // end of emitNormalMethodBody() - - lineNumber(rhs) - emitNormalMethodBody() - - // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions. - // The only non-instruction nodes to be found are LabelNode and LineNumberNode. - } - - if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern)) - AsmUtils.traceMethod(mnode) - - mnode = null - } // end of method genDefDef() - - /* - * must-single-thread - * - * TODO document, explain interplay with `fabricateStaticInitAndroid()` - */ - private def appendToStaticCtor(dd: DefDef): Unit = { - - def insertBefore( - location: asm.tree.AbstractInsnNode, - i0: asm.tree.AbstractInsnNode, - i1: asm.tree.AbstractInsnNode): Unit = { - if (i0 != null) { - mnode.instructions.insertBefore(location, i0.clone(null)) - mnode.instructions.insertBefore(location, i1.clone(null)) - } - } - - // collect all return instructions - var rets: List[asm.tree.AbstractInsnNode] = Nil - mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i } } - if (rets.isEmpty) { return } - - var insnParcA: asm.tree.AbstractInsnNode = null - var insnParcB: asm.tree.AbstractInsnNode = null - // android creator code - if (isCZParcelable) { - // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator - val andrFieldDescr = classBTypeFromSymbol(AndroidCreatorClass).descriptor - cnode.visitField( - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL, - "CREATOR", - andrFieldDescr, - null, - null - ) - // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from? - val callee = claszSymbol.companionModule.info.member(androidFieldName).symbol - val jowner = internalName(callee.owner) - val jname = callee.javaSimpleName - val jtype = asmMethodType(callee).descriptor - insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false) - // PUTSTATIC `thisName`.CREATOR; - insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr) - } - - // insert a few instructions for initialization before each return instruction - for(r <- rets) { - insertBefore(r, insnParcA, insnParcB) - } - - } - - def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = { - val Local(tk, name, idx, isSynth) = locals(sym) - if (force || !isSynth) { - mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) - } - } - - def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit - - } // end of class PlainSkelBuilder - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala b/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala deleted file mode 100644 index b5ed27511e7e..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BCodeSyncAndTry.scala +++ /dev/null @@ -1,426 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.collection.immutable -import scala.tools.asm - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.core.StdNames.nme -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.ast.tpd - -/* - * - * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ - * @version 1.0 - * - */ -trait BCodeSyncAndTry extends BCodeBodyBuilder { - import int.given - import tpd._ - import bTypes._ - import coreBTypes._ - /* - * Functionality to lower `synchronized` and `try` expressions. - */ - abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) { - - def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { - case Apply(TypeApply(fun, _), args) => - val monitor = locals.makeLocal(ObjectRef, "monitor", defn.ObjectType, tree.span) - val monCleanup = new asm.Label - - // if the synchronized block returns a result, store it in a local variable. - // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks). - val hasResult = (expectedType != UNIT) - val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult", defn.ObjectType, tree.span) else null - - /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */ - genLoadQualifier(fun) - bc dup ObjectRef - locals.store(monitor) - emit(asm.Opcodes.MONITORENTER) - - /* ------ (2) Synchronized block. - * Reached by fall-through from (1). - * Protected by: - * (2.a) the EH-version of the monitor-exit, and - * (2.b) whatever protects the whole synchronized expression. - * ------ - */ - val startProtected = currProgramPoint() - registerCleanup(monCleanup) - genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */) - unregisterCleanup(monCleanup) - if (hasResult) { locals.store(monitorResult) } - nopIfNeeded(startProtected) - val endProtected = currProgramPoint() - - /* ------ (3) monitor-exit after normal, non-early-return, termination of (2). - * Reached by fall-through from (2). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - if (hasResult) { locals.load(monitorResult) } - val postHandler = new asm.Label - bc goTo postHandler - - /* ------ (4) exception-handler version of monitor-exit code. - * Reached upon abrupt termination of (2). - * Protected by whatever protects the whole synchronized expression. - * null => "any" exception in bytecode, like we emit for finally. - * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233) - * ------ - */ - protect(startProtected, endProtected, currProgramPoint(), null) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - emit(asm.Opcodes.ATHROW) - - /* ------ (5) cleanup version of monitor-exit code. - * Reached upon early-return from (2). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - if (shouldEmitCleanup) { - markProgramPoint(monCleanup) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - pendingCleanups() - } - - /* ------ (6) normal exit of the synchronized expression. - * Reached after normal, non-early-return, termination of (3). - * Protected by whatever protects the whole synchronized expression. - * ------ - */ - mnode visitLabel postHandler - - lineNumber(tree) - - expectedType - } - - /* - * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP. - * Useful to avoid emitting an empty try-block being protected by exception handlers, - * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102. - */ - def nopIfNeeded(lbl: asm.Label): Unit = { - val noInstructionEmitted = isAtProgramPoint(lbl) - if (noInstructionEmitted) { emit(asm.Opcodes.NOP) } - } - - /* - * Emitting try-catch is easy, emitting try-catch-finally not quite so. - * A finally-block (which always has type Unit, thus leaving the operand stack unchanged) - * affects control-transfer from protected regions, as follows: - * - * (a) `return` statement: - * - * First, the value to return (if any) is evaluated. - * Afterwards, all enclosing finally-blocks are run, from innermost to outermost. - * Only then is the return value (if any) returned. - * - * Some terminology: - * (a.1) Executing a return statement that is protected - * by one or more finally-blocks is called "early return" - * (a.2) the chain of code sections (a code section for each enclosing finally-block) - * to run upon early returns is called "cleanup chain" - * - * As an additional spin, consider a return statement in a finally-block. - * In this case, the value to return depends on how control arrived at that statement: - * in case it arrived via a previous return, the previous return enjoys priority: - * the value to return is given by that statement. - * - * (b) A finally-block protects both the try-clause and the catch-clauses. - * - * Sidenote: - * A try-clause may contain an empty block. On CLR, a finally-block has special semantics - * regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler - * that protects an "empty" range ("empty" as in "containing NOPs only", - * see `asm.optimiz.DanglingExcHandlers` and SI-6720). - * - * This means a finally-block indicates instructions that can be reached: - * (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause - * In this case, the next-program-point is that following the try-catch-finally expression. - * (b.2) Upon early-return initiated in the try-clause or a catch-clause - * In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return. - * (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause - * In this case, the unhandled exception must be re-thrown after running the finally-block. - * - * (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock) - * that's why `genSynchronized()` too emits cleanup-sections. - * - * A number of code patterns can be emitted to realize the intended semantics. - * - * A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position. - * The principle at work being that once control is transferred to a cleanup-section, - * control will always stay within the cleanup-chain. - * That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block - * (reached via abrupt termination) takes over. - * - * The observations above hint at another code layout, less verbose, for the cleanup-chain. - * - * The code layout that GenBCode emits takes into account that once a cleanup section has been reached, - * jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics. - * - * There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway): - * one for normal control flow and another chain consisting of exception handlers. - * The in-line comments below refer to them as - * - "early-return-cleanups" and - * - "exception-handler-version-of-finally-block" respectively. - * - */ - def genLoadTry(tree: Try): BType = tree match { - case Try(block, catches, finalizer) => - val kind = tpeTK(tree) - - val caseHandlers: List[EHClause] = - for (CaseDef(pat, _, caseBody) <- catches) yield { - pat match { - case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) - case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) - case Bind(_, _) => BoundEH (pat.symbol, caseBody) - } - } - - // ------ (0) locals used later ------ - - /* - * `postHandlers` is a program point denoting: - * (a) the finally-clause conceptually reached via fall-through from try-catch-finally - * (in case a finally-block is present); or - * (b) the program point right after the try-catch - * (in case there's no finally-block). - * The name choice emphasizes that the code section lies "after all exception handlers", - * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks. - */ - val postHandlers = new asm.Label - - val hasFinally = (finalizer != tpd.EmptyTree) - - /* - * used in the finally-clause reached via fall-through from try-catch, if any. - */ - val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer) - - /* - * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type. - * Because those two types can be different, dedicated vars are needed. - */ - val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp", tree.tpe, tree.span) else null - - /* - * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause) - * AND hasFinally, a cleanup is needed. - */ - val finCleanup = if (hasFinally) new asm.Label else null - - /* ------ (1) try-block, protected by: - * (1.a) the EHs due to case-clauses, emitted in (2), - * (1.b) the EH due to finally-clause, emitted in (3.A) - * (1.c) whatever protects the whole try-catch-finally expression. - * ------ - */ - - val startTryBody = currProgramPoint() - registerCleanup(finCleanup) - genLoad(block, kind) - unregisterCleanup(finCleanup) - nopIfNeeded(startTryBody) - val endTryBody = currProgramPoint() - bc goTo postHandlers - - /** - * A return within a `try` or `catch` block where a `finally` is present ("early return") - * emits a store of the result to a local, jump to a "cleanup" version of the `finally` block, - * and sets `shouldEmitCleanup = true` (see [[PlainBodyBuilder.genReturn]]). - * - * If the try-catch is nested, outer `finally` blocks need to be emitted in a cleanup version - * as well, so the `shouldEmitCleanup` variable remains `true` until the outermost `finally`. - * Nested cleanup `finally` blocks jump to the next enclosing one. For the outermost, we emit - * a read of the local variable, a return, and we set `shouldEmitCleanup = false` (see - * [[pendingCleanups]]). - * - * Now, assume we have - * - * try { return 1 } finally { - * try { println() } finally { println() } - * } - * - * Here, the outer `finally` needs a cleanup version, but the inner one does not. The method - * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to - * nested `finally` blocks. - */ - def withFreshCleanupScope(body: => Unit) = { - val savedShouldEmitCleanup = shouldEmitCleanup - shouldEmitCleanup = false - body - shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup - } - - /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause) - * An EH in (2) is reached upon abrupt termination of (1). - * An EH in (2) is protected by: - * (2.a) the EH-version of the finally-clause, if any. - * (2.b) whatever protects the whole try-catch-finally expression. - * ------ - */ - - for (ch <- caseHandlers) withFreshCleanupScope { - - // (2.a) emit case clause proper - val startHandler = currProgramPoint() - var endHandler: asm.Label = null - var excType: ClassBType = null - registerCleanup(finCleanup) - ch match { - case NamelessEH(typeToDrop, caseBody) => - bc drop typeToDrop - genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`. - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - excType = typeToDrop - - case BoundEH (patSymbol, caseBody) => - // test/files/run/contrib674.scala , a local-var already exists for patSymbol. - // rather than creating on first-access, we do it right away to emit debug-info for the created local var. - val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol) - bc.store(patIdx, patTK) - genLoad(caseBody, kind) - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - emitLocalVarScope(patSymbol, startHandler, endHandler) - excType = patTK.asClassBType - } - unregisterCleanup(finCleanup) - // (2.b) mark the try-body as protected by this case clause. - protect(startTryBody, endTryBody, startHandler, excType) - // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given. - bc goTo postHandlers - - } - - // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first - // version of the `finally` block below, the variable may become true. But this does not mean - // that we need a cleanup version for the current block, only for the enclosing ones. - val currentFinallyBlockNeedsCleanup = shouldEmitCleanup - - /* ------ (3.A) The exception-handler-version of the finally-clause. - * Reached upon abrupt termination of (1) or one of the EHs in (2). - * Protected only by whatever protects the whole try-catch-finally expression. - * ------ - */ - - // a note on terminology: this is not "postHandlers", despite appearances. - // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. - if (hasFinally) withFreshCleanupScope { - nopIfNeeded(startTryBody) - val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception. - protect(startTryBody, finalHandler, finalHandler, null) - val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc", defn.ThrowableType, finalizer.span)) - bc.store(eIdx, eTK) - emitFinalizer(finalizer, null, isDuplicate = true) - bc.load(eIdx, eTK) - emit(asm.Opcodes.ATHROW) - } - - /* ------ (3.B) Cleanup-version of the finally-clause. - * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2) - * (and only from there, ie reached only upon early RETURN from - * program regions bracketed by registerCleanup/unregisterCleanup). - * Protected only by whatever protects the whole try-catch-finally expression. - * - * Given that control arrives to a cleanup section only upon early RETURN, - * the value to return (if any) is always available. Therefore, a further RETURN - * found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`). - * In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section, - * the variable `insideCleanupBlock` is used. - * ------ - */ - - // this is not "postHandlers" either. - // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause. - // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid. - if (hasFinally && currentFinallyBlockNeedsCleanup) { - markProgramPoint(finCleanup) - // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - emitFinalizer(finalizer, null, isDuplicate = true) - pendingCleanups() - } - - /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit - * Reached upon normal, non-early-return termination of (1) or of an EH in (2). - * Protected only by whatever protects the whole try-catch-finally expression. - * TODO explain what happens upon RETURN contained in (4) - * ------ - */ - - markProgramPoint(postHandlers) - if (hasFinally) { - emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` - } - - kind - } // end of genLoadTry() - - /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */ - private def pendingCleanups(): Unit = { - cleanups match { - case Nil => - if (earlyReturnVar != null) { - locals.load(earlyReturnVar) - bc.emitRETURN(locals(earlyReturnVar).tk) - } else { - bc emitRETURN UNIT - } - shouldEmitCleanup = false - - case nextCleanup :: _ => - bc goTo nextCleanup - } - } - - def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = { - val excInternalName: String = - if (excType == null) null - else excType.internalName - assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.") - mnode.visitTryCatchBlock(start, end, handler, excInternalName) - } - - /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */ - def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = { - var saved: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - if (isDuplicate) { - saved = jumpDest - } - // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok) - if (tmp != null) { locals.store(tmp) } - genLoad(finalizer, UNIT) - if (tmp != null) { locals.load(tmp) } - if (isDuplicate) { - jumpDest = saved - } - } - - /* Does this tree have a try-catch block? */ - def mayCleanStack(tree: Tree): Boolean = tree.find { t => t match { // TODO: use existsSubTree - case Try(_, _, _) => true - case _ => false - } - }.isDefined - - trait EHClause - case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause - case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause - - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala deleted file mode 100644 index f9a3a3aae105..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypes.scala +++ /dev/null @@ -1,864 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import scala.tools.asm - -/** - * The BTypes component defines The BType class hierarchy. BTypes encapsulates all type information - * that is required after building the ASM nodes. This includes optimizations, geneartion of - * InnerClass attributes and generation of stack map frames. - * - * This representation is immutable and independent of the compiler data structures, hence it can - * be queried by concurrent threads. - */ -abstract class BTypes extends Pure { - - val int: DottyBackendInterface - import int.given - /** - * A map from internal names to ClassBTypes. Every ClassBType is added to this map on its - * construction. - * - * This map is used when computing stack map frames. The asm.ClassWriter invokes the method - * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal - * name. The method assumes that every class type that appears in the bytecode exists in the map. - * - * Concurrent because stack map frames are computed when in the class writer, which might run - * on multiple classes concurrently. - */ - protected def classBTypeFromInternalNameMap: collection.concurrent.Map[String, ClassBType] - // NOTE: Should be a lazy val but scalac does not allow abstract lazy vals (dotty does) - - /** - * Obtain a previously constructed ClassBType for a given internal name. - */ - def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName) - - // Some core BTypes are required here, in class BType, where no Global instance is available. - // The Global is only available in the subclass BTypesFromSymbols. We cannot depend on the actual - // implementation (CoreBTypesProxy) here because it has members that refer to global.Symbol. - val coreBTypes: CoreBTypesProxyGlobalIndependent[this.type] - import coreBTypes._ - - /** - * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType - * referring to BTypes. - */ - /*sealed*/ trait BType extends Pure { // Not sealed for now due to SI-8546 - final override def toString: String = this match { - case UNIT => "V" - case BOOL => "Z" - case CHAR => "C" - case BYTE => "B" - case SHORT => "S" - case INT => "I" - case FLOAT => "F" - case LONG => "J" - case DOUBLE => "D" - case ClassBType(internalName) => "L" + internalName + ";" - case ArrayBType(component) => "[" + component - case MethodBType(args, res) => args.mkString("(", "", ")" + res) - } - - /** - * @return The Java descriptor of this type. Examples: - * - int: I - * - java.lang.String: Ljava/lang/String; - * - int[]: [I - * - Object m(String s, double d): (Ljava/lang/String;D)Ljava/lang/Object; - */ - final def descriptor = toString - - /** - * @return 0 for void, 2 for long and double, 1 otherwise - */ - final def size: Int = this match { - case UNIT => 0 - case LONG | DOUBLE => 2 - case _ => 1 - } - - final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType] - final def isRef: Boolean = this.isInstanceOf[RefBType] - final def isArray: Boolean = this.isInstanceOf[ArrayBType] - final def isClass: Boolean = this.isInstanceOf[ClassBType] - final def isMethod: Boolean = this.isInstanceOf[MethodBType] - - final def isNonVoidPrimitiveType = isPrimitive && this != UNIT - - final def isNullType = this == srNullRef - final def isNothingType = this == srNothingRef - - final def isBoxed = this.isClass && boxedClasses(this.asClassBType) - - final def isIntSizedType = this == BOOL || this == CHAR || this == BYTE || - this == SHORT || this == INT - final def isIntegralType = this == INT || this == BYTE || this == LONG || - this == CHAR || this == SHORT - final def isRealType = this == FLOAT || this == DOUBLE - final def isNumericType = isIntegralType || isRealType - final def isWideType = size == 2 - - /* - * Subtype check `this <:< other` on BTypes that takes into account the JVM built-in numeric - * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the - * Java bytecode type hierarchy. - */ - final def conformsTo(other: BType): Boolean = { - assert(isRef || isPrimitive, s"conformsTo cannot handle $this") - assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other") - - this match { - case ArrayBType(component) => - if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true - else other match { - case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent) - case _ => false - } - - case classType: ClassBType => - if (isBoxed) { - if (other.isBoxed) this == other - else if (other == ObjectRef) true - else other match { - case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // e.g., java/lang/Double conforms to java/lang/Number - case _ => false - } - } else if (isNullType) { - if (other.isNothingType) false - else if (other.isPrimitive) false - else true // Null conforms to all classes (except Nothing) and arrays. - } else if (isNothingType) { - true - } else other match { - case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) - // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case - case _ => - // isNothingType || // documentation only, because `if (isNothingType)` above covers this case - false - } - - case UNIT => - other == UNIT - case BOOL | BYTE | SHORT | CHAR => - this == other || other == INT || other == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt(). - case _ => - assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") - this == other - } - } - - /** - * Compute the upper bound of two types. - * Takes promotions of numeric primitives into account. - */ - final def maxType(other: BType): BType = this match { - case pt: PrimitiveBType => pt.maxValueType(other) - - case _: ArrayBType | _: ClassBType => - if (isNothingType) return other - if (other.isNothingType) return this - if (this == other) return this - - assert(other.isRef, s"Cannot compute maxType: $this, $other") - // Approximate `lub`. The common type of two references is always ObjectReference. - ObjectRef - } - - /** - * See documentation of [[typedOpcode]]. - * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8. - */ - private def loadStoreOpcodeOffset: Int = this match { - case UNIT | INT => 0 - case BOOL | BYTE => 5 - case CHAR => 6 - case SHORT => 7 - case FLOAT => 2 - case LONG => 1 - case DOUBLE => 3 - case _ => 4 - } - - /** - * See documentation of [[typedOpcode]]. - * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16. - */ - private def typedOpcodeOffset: Int = this match { - case UNIT => 5 - case BOOL | CHAR | BYTE | SHORT | INT => 0 - case FLOAT => 2 - case LONG => 1 - case DOUBLE => 3 - case _ => 4 - } - - /** - * Some JVM opcodes have typed variants. This method returns the correct opcode according to - * the type. - * - * @param opcode A JVM instruction opcode. This opcode must be one of ILOAD, ISTORE, IALOAD, - * IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL, ISHR, IUSHR, IAND, IOR - * IXOR and IRETURN. - * @return The opcode adapted to this java type. For example, if this type is `float` and - * `opcode` is `IRETURN`, this method returns `FRETURN`. - */ - final def typedOpcode(opcode: Int): Int = { - if (opcode == asm.Opcodes.IALOAD || opcode == asm.Opcodes.IASTORE) - opcode + loadStoreOpcodeOffset - else - opcode + typedOpcodeOffset - } - - /** - * The asm.Type corresponding to this BType. - * - * Note about asm.Type.getObjectType (*): For class types, the method expects the internal - * name, i.e. without the surrounding 'L' and ';'. For array types on the other hand, the - * method expects a full descriptor, for example "[Ljava/lang/String;". - * - * See method asm.Type.getType that creates a asm.Type from a type descriptor - * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type - * - for an ARRAY type, the full descriptor is part of the range - */ - def toASMType: asm.Type = this match { - case UNIT => asm.Type.VOID_TYPE - case BOOL => asm.Type.BOOLEAN_TYPE - case CHAR => asm.Type.CHAR_TYPE - case BYTE => asm.Type.BYTE_TYPE - case SHORT => asm.Type.SHORT_TYPE - case INT => asm.Type.INT_TYPE - case FLOAT => asm.Type.FLOAT_TYPE - case LONG => asm.Type.LONG_TYPE - case DOUBLE => asm.Type.DOUBLE_TYPE - case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above - case a: ArrayBType => asm.Type.getObjectType(a.descriptor) - case m: MethodBType => asm.Type.getMethodType(m.descriptor) - } - - def asRefBType : RefBType = this.asInstanceOf[RefBType] - def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType] - def asClassBType : ClassBType = this.asInstanceOf[ClassBType] - def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType] - } - - sealed trait PrimitiveBType extends BType { - - /** - * The upper bound of two primitive types. The `other` type has to be either a primitive - * type or Nothing. - * - * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative - * values of Byte and Short. See ticket #2087. - */ - final def maxValueType(other: BType): BType = { - - def uncomparable: Nothing = throw new AssertionError(s"Cannot compute maxValueType: $this, $other") - - if (!other.isPrimitive && !other.isNothingType) uncomparable - - if (other.isNothingType) return this - if (this == other) return this - - this match { - case BYTE => - if (other == CHAR) INT - else if (other.isNumericType) other - else uncomparable - - case SHORT => - other match { - case BYTE => SHORT - case CHAR => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case CHAR => - other match { - case BYTE | SHORT => INT - case INT | LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case INT => - other match { - case BYTE | SHORT | CHAR => INT - case LONG | FLOAT | DOUBLE => other - case _ => uncomparable - } - - case LONG => - other match { - case INT | BYTE | LONG | CHAR | SHORT => LONG - case DOUBLE => DOUBLE - case FLOAT => FLOAT - case _ => uncomparable - } - - case FLOAT => - if (other == DOUBLE) DOUBLE - else if (other.isNumericType) FLOAT - else uncomparable - - case DOUBLE => - if (other.isNumericType) DOUBLE - else uncomparable - - case UNIT | BOOL => uncomparable - } - } - } - - case object UNIT extends PrimitiveBType - case object BOOL extends PrimitiveBType - case object CHAR extends PrimitiveBType - case object BYTE extends PrimitiveBType - case object SHORT extends PrimitiveBType - case object INT extends PrimitiveBType - case object FLOAT extends PrimitiveBType - case object LONG extends PrimitiveBType - case object DOUBLE extends PrimitiveBType - - sealed trait RefBType extends BType { - /** - * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY, - * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to - * (a: Array[T]).clone() for any T, see genApply. - * - * In contrast to the descriptor, this string does not contain the surrounding 'L' and ';' for - * class types, for example "java/lang/String". - * However, for array types, the full descriptor is used, for example "[Ljava/lang/String;". - * - * This can be verified for example using javap or ASMifier. - */ - def classOrArrayType: String = this match { - case ClassBType(internalName) => internalName - case a: ArrayBType => a.descriptor - } - } - - /** - * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm). - * - * In this summary, "class" means "class or interface". - * - * JLS: http://docs.oracle.com/javase/specs/jls/se8/html/index.html - * JVMS: http://docs.oracle.com/javase/specs/jvms/se8/html/index.html - * - * Terminology - * ----------- - * - * - Nested class (JLS 8): class whose declaration occurs within the body of another class - * - * - Top-level class (JLS 8): non-nested class - * - * - Inner class (JLS 8.1.3): nested class that is not (explicitly or implicitly) static - * - * - Member class (JLS 8.5): class directly enclosed in the body of a class (and not, for - * example, defined in a method). Member classes cannot be anonymous. May be static. - * - * - Local class (JLS 14.3): nested, non-anonymous class that is not a member of a class - * - cannot be static (therefore they are "inner" classes) - * - can be defined in a method, a constructor or in an initializer block - * - * - Initializer block (JLS 8.6 / 8.7): block of statements in a java class - * - static initializer: executed before constructor body - * - instance initializer: executed when class is initialized (instance creation, static - * field access, ...) - * - * - A static nested class can be defined as - * - a static member class (explicitly static), or - * - a member class of an interface (implicitly static) - * - local classes are never static, even if they are defined in a static method. - * - * Note: it is NOT the case that all inner classes (non-static) have an outer pointer. Example: - * class C { static void foo { class D {} } } - * The class D is an inner class (non-static), but javac does not add an outer pointer to it. - * - * InnerClass - * ---------- - * - * The JVMS 4.7.6 requires an entry for every class mentioned in a CONSTANT_Class_info in the - * constant pool (CP) that is not a member of a package (JLS 7.1). - * - * The JLS 13.1, points 9. / 10. requires: a class must reference (in the CP) - * - its immediately enclosing class - * - all of its member classes - * - all local and anonymous classes that are referenced (or declared) elsewhere (method, - * constructor, initializer block, field initializer) - * - * In a comment, the 4.7.6 spec says: this implies an entry in the InnerClass attribute for - * - All enclosing classes (except the outermost, which is top-level) - * - My comment: not sure how this is implied, below (*) a Java counter-example. - * In any case, the Java compiler seems to add all enclosing classes, even if they are not - * otherwise mentioned in the CP. So we should do the same. - * - All nested classes (including anonymous and local, but not transitively) - * - * Fields in the InnerClass entries: - * - inner class: the (nested) class C we are talking about - * - outer class: the class of which C is a member. Has to be null for non-members, i.e. for - * local and anonymous classes. NOTE: this co-incides with the presence of an - * EnclosingMethod attribute (see below) - * - inner name: A string with the simple name of the inner class. Null for anonymous classes. - * - flags: access property flags, details in JVMS, table in 4.7.6. Static flag: see - * discussion below. - * - * - * Note 1: when a nested class is present in the InnerClass attribute, all of its enclosing - * classes have to be present as well (by the rules above). Example: - * - * class Outer { class I1 { class I2 { } } } - * class User { Outer.I1.I2 foo() { } } - * - * The return type "Outer.I1.I2" puts "Outer$I1$I2" in the CP, therefore the class is added to the - * InnerClass attribute. For this entry, the "outer class" field will be "Outer$I1". This in turn - * adds "Outer$I1" to the CP, which requires adding that class to the InnerClass attribute. - * (For local / anonymous classes this would not be the case, since the "outer class" attribute - * would be empty. However, no class (other than the enclosing class) can refer to them, as they - * have no name.) - * - * In the current implementation of the Scala compiler, when adding a class to the InnerClass - * attribute, all of its enclosing classes will be added as well. Javac seems to do the same, - * see (*). - * - * - * Note 2: If a class name is mentioned only in a CONSTANT_Utf8_info, but not in a - * CONSTANT_Class_info, the JVMS does not require an entry in the InnerClass attribute. However, - * the Java compiler seems to add such classes anyway. For example, when using an annotation, the - * annotation class is stored as a CONSTANT_Utf8_info in the CP: - * - * @O.Ann void foo() { } - * - * adds "const #13 = Asciz LO$Ann;;" in the constant pool. The "RuntimeInvisibleAnnotations" - * attribute refers to that constant pool entry. Even though there is no other reference to - * `O.Ann`, the java compiler adds an entry for that class to the InnerClass attribute (which - * entails adding a CONSTANT_Class_info for the class). - * - * - * - * EnclosingMethod - * --------------- - * - * JVMS 4.7.7: the attribute must be present "if and only if it represents a local class - * or an anonymous class" (i.e. not for member classes). - * - * The attribute is mis-named, it should be called "EnclosingClass". It has to be defined for all - * local and anonymous classes, no matter if there is an enclosing method or not. Accordingly, the - * "class" field (see below) must be always defined, while the "method" field may be null. - * - * NOTE: When a EnclosingMethod attribute is required (local and anonymous classes), the "outer" - * field in the InnerClass table must be null. - * - * Fields: - * - class: the enclosing class - * - method: the enclosing method (or constructor). Null if the class is not enclosed by a - * method, i.e. for - * - local or anonymous classes defined in (static or non-static) initializer blocks - * - anonymous classes defined in initializer blocks or field initializers - * - * Note: the field is required for anonymous classes defined within local variable - * initializers (within a method), Java example below (**). - * - * For local and anonymous classes in initializer blocks or field initializers, and - * class-level anonymous classes, the scala compiler sets the "method" field to null. - * - * - * (*) - * public class Test { - * void foo() { - * class Foo1 { - * // constructor statement block - * { - * class Foo2 { - * class Foo3 { } - * } - * } - * } - * } - * } - * - * The class file Test$1Foo1$1Foo2$Foo3 has no reference to the class Test$1Foo1, however it - * still contains an InnerClass attribute for Test$1Foo1. - * Maybe this is just because the Java compiler follows the JVMS comment ("InnerClasses - * information for each enclosing class"). - * - * - * (**) - * void foo() { - * // anonymous class defined in local variable initializer expression. - * Runnable x = true ? (new Runnable() { - * public void run() { return; } - * }) : null; - * } - * - * The EnclosingMethod attribute of the anonymous class mentions "foo" in the "method" field. - * - * - * Java Compatibility - * ------------------ - * - * In the InnerClass entry for classes in top-level modules, the "outer class" is emitted as the - * mirror class (or the existing companion class), i.e. C1 is nested in T (not T$). - * For classes nested in a nested object, the "outer class" is the module class: C2 is nested in T$N$ - * object T { - * class C1 - * object N { class C2 } - * } - * - * Reason: java compat. It's a "best effort" "solution". If you want to use "C1" from Java, you - * can write "T.C1", and the Java compiler will translate that to the classfile T$C1. - * - * If we would emit the "outer class" of C1 as "T$", then in Java you'd need to write "T$.C1" - * because the java compiler looks at the InnerClass attribute to find if an inner class exists. - * However, the Java compiler would then translate the '.' to '$' and you'd get the class name - * "T$$C1". This class file obviously does not exist. - * - * Directly using the encoded class name "T$C1" in Java does not work: since the classfile - * describes a nested class, the Java compiler hides it from the classpath and will report - * "cannot find symbol T$C1". This means that the class T.N.C2 cannot be referenced from a - * Java source file in any way. - * - * - * STATIC flag - * ----------- - * - * Java: static member classes have the static flag in the InnerClass attribute, for example B in - * class A { static class B { } } - * - * The spec is not very clear about when the static flag should be emitted. It says: "Marked or - * implicitly static in source." - * - * The presence of the static flag does NOT coincide with the absence of an "outer" field in the - * class. The java compiler never puts the static flag for local classes, even if they don't have - * an outer pointer: - * - * class A { - * void f() { class B {} } - * static void g() { calss C {} } - * } - * - * B has an outer pointer, C doesn't. Both B and C are NOT marked static in the InnerClass table. - * - * It seems sane to follow the same principle in the Scala compiler. So: - * - * package p - * object O1 { - * class C1 // static inner class - * object O2 { // static inner module - * def f = { - * class C2 { // non-static inner class, even though there's no outer pointer - * class C3 // non-static, has an outer pointer - * } - * } - * } - * } - * - * Mirror Classes - * -------------- - * - * TODO: innerclass attributes on mirror class - */ - - /** - * A ClassBType represents a class or interface type. The necessary information to build a - * ClassBType is extracted from compiler symbols and types, see BTypesFromSymbols. - * - * The `offset` and `length` fields are used to represent the internal name of the class. They - * are indices into some character array. The internal name can be obtained through the method - * `internalNameString`, which is abstract in this component. Name creation is assumed to be - * hash-consed, so if two ClassBTypes have the same internal name, they NEED to have the same - * `offset` and `length`. - * - * The actual implementation in subclass BTypesFromSymbols uses the global `chrs` array from the - * name table. This representation is efficient because the JVM class name is obtained through - * `classSymbol.javaBinaryName`. This already adds the necessary string to the `chrs` array, - * so it makes sense to reuse the same name table in the backend. - * - * ClassBType is not a case class because we want a custom equals method, and because the - * extractor extracts the internalName, which is what you typically need. - */ - final class ClassBType(val internalName: String) extends RefBType { - /** - * Write-once variable allows initializing a cyclic graph of infos. This is required for - * nested classes. Example: for the definition `class A { class B }` we have - * - * B.info.nestedInfo.outerClass == A - * A.info.memberClasses contains B - */ - private var _info: ClassInfo = null - - def info: ClassInfo = { - assert(_info != null, s"ClassBType.info not yet assigned: $this") - _info - } - - def info_=(i: ClassInfo): Unit = { - assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") - _info = i - checkInfoConsistency() - } - - classBTypeFromInternalNameMap(internalName) = this - - private def checkInfoConsistency(): Unit = { - // we assert some properties. however, some of the linked ClassBType (members, superClass, - // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a - // best-effort verification. - def ifInit(c: ClassBType)(p: ClassBType => Boolean): Boolean = c._info == null || p(c) - - def isJLO(t: ClassBType) = t.internalName == "java/lang/Object" - - assert(!ClassBType.isInternalPhantomType(internalName), s"Cannot create ClassBType for phantom type $this") - - assert( - if (info.superClass.isEmpty) { isJLO(this) || (DottyBackendInterface.isCompilingPrimitive && ClassBType.hasNoSuper(internalName)) } - else if (isInterface) isJLO(info.superClass.get) - else !isJLO(this) && ifInit(info.superClass.get)(!_.isInterface), - s"Invalid superClass in $this: ${info.superClass}" - ) - assert( - info.interfaces.forall(c => ifInit(c)(_.isInterface)), - s"Invalid interfaces in $this: ${info.interfaces}" - ) - - assert(info.memberClasses.forall(c => ifInit(c)(_.isNestedClass)), info.memberClasses) - } - - /** - * The internal name of a class is the string returned by java.lang.Class.getName, with all '.' - * replaced by '/'. For example "java/lang/String". - */ - //def internalName: String = internalNameString(offset, length) - - /** - * @return The class name without the package prefix - */ - def simpleName: String = internalName.split("/").last - - def isInterface = (info.flags & asm.Opcodes.ACC_INTERFACE) != 0 - - def superClassesTransitive: List[ClassBType] = info.superClass match { - case None => Nil - case Some(sc) => sc :: sc.superClassesTransitive - } - - def isNestedClass = info.nestedInfo.isDefined - - def enclosingNestedClassesChain: List[ClassBType] = - if (isNestedClass) this :: info.nestedInfo.get.enclosingClass.enclosingNestedClassesChain - else Nil - - def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map { - case NestedInfo(_, outerName, innerName, isStaticNestedClass) => - import GenBCodeOps.addFlagIf - InnerClassEntry( - internalName, - outerName.orNull, - innerName.orNull, - info.flags.addFlagIf(isStaticNestedClass, asm.Opcodes.ACC_STATIC) - & ClassBType.INNER_CLASSES_FLAGS - ) - } - - def isSubtypeOf(other: ClassBType): Boolean = { - if (this == other) return true - - if (isInterface) { - if (other == ObjectRef) return true // interfaces conform to Object - if (!other.isInterface) return false // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false. - // else: this and other are both interfaces. continue to (*) - } else { - val sc = info.superClass - if (sc.isDefined && sc.get.isSubtypeOf(other)) return true // the superclass of this class conforms to other - if (!other.isInterface) return false // this and other are both classes, and the superclass of this does not conform - // else: this is a class, the other is an interface. continue to (*) - } - - // (*) check if some interface of this class conforms to other. - info.interfaces.exists(_.isSubtypeOf(other)) - } - - /** - * Finding the least upper bound in agreement with the bytecode verifier - * Background: - * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf - * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 - * https://issues.scala-lang.org/browse/SI-3872 - */ - def jvmWiseLUB(other: ClassBType): ClassBType = { - def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType - assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other") - - val res: ClassBType = (this.isInterface, other.isInterface) match { - case (true, true) => - // exercised by test/files/run/t4761.scala - if (other.isSubtypeOf(this)) this - else if (this.isSubtypeOf(other)) other - else ObjectRef - - case (true, false) => - if (other.isSubtypeOf(this)) this else ObjectRef - - case (false, true) => - if (this.isSubtypeOf(other)) other else ObjectRef - - case _ => - // TODO @lry I don't really understand the reasoning here. - // Both this and other are classes. The code takes (transitively) all superclasses and - // finds the first common one. - // MOST LIKELY the answer can be found here, see the comments and links by Miguel: - // - https://issues.scala-lang.org/browse/SI-3872 - firstCommonSuffix(this :: this.superClassesTransitive, other :: other.superClassesTransitive) - } - - assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res") - res - } - - private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { - var chainA = as - var chainB = bs - var fcs: ClassBType = null - while { - if (chainB contains chainA.head) fcs = chainA.head - else if (chainA contains chainB.head) fcs = chainB.head - else { - chainA = chainA.tail - chainB = chainB.tail - } - fcs == null - } do () - fcs - } - - /** - * Custom equals / hashCode: we only compare the name (offset / length) - */ - override def equals(o: Any): Boolean = (this eq o.asInstanceOf[Object]) || (o match { - case c: ClassBType @unchecked => c.internalName == this.internalName - case _ => false - }) - - override def hashCode: Int = { - import scala.runtime.Statics - var acc: Int = -889275714 - acc = Statics.mix(acc, internalName.hashCode) - Statics.finalizeHash(acc, 2) - } - } - - object ClassBType { - /** - * Pattern matching on a ClassBType extracts the `internalName` of the class. - */ - def unapply(c: ClassBType): Some[String] = Some(c.internalName) - - /** - * Valid flags for InnerClass attribute entry. - * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 - */ - private val INNER_CLASSES_FLAGS = { - asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | - asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | - asm.Opcodes.ACC_ENUM - } - - // Primitive classes have no super class. A ClassBType for those is only created when - // they are actually being compiled (e.g., when compiling scala/Boolean.scala). - private val hasNoSuper = Set( - "scala/Unit", - "scala/Boolean", - "scala/Char", - "scala/Byte", - "scala/Short", - "scala/Int", - "scala/Float", - "scala/Long", - "scala/Double" - ) - - private val isInternalPhantomType = Set( - "scala/Null", - "scala/Nothing" - ) - } - - /** - * The type info for a class. Used for symboltable-independent subtype checks in the backend. - * - * @param superClass The super class, not defined for class java/lang/Object. - * @param interfaces All transitively implemented interfaces, except for those inherited - * through the superclass. - * @param flags The java flags, obtained through `javaFlags`. Used also to derive - * the flags for InnerClass entries. - * @param memberClasses Classes nested in this class. Those need to be added to the - * InnerClass table, see the InnerClass spec summary above. - * @param nestedInfo If this describes a nested class, information for the InnerClass table. - */ - case class ClassInfo(superClass: Option[ClassBType], interfaces: List[ClassBType], flags: Int, - memberClasses: List[ClassBType], nestedInfo: Option[NestedInfo]) - - /** - * Information required to add a class to an InnerClass table. - * The spec summary above explains what information is required for the InnerClass entry. - * - * @param enclosingClass The enclosing class, if it is also nested. When adding a class - * to the InnerClass table, enclosing nested classes are also added. - * @param outerName The outerName field in the InnerClass entry, may be None. - * @param innerName The innerName field, may be None. - * @param isStaticNestedClass True if this is a static nested class (not inner class) (*) - * - * (*) Note that the STATIC flag in ClassInfo.flags, obtained through javaFlags(classSym), is not - * correct for the InnerClass entry, see javaFlags. The static flag in the InnerClass describes - * a source-level propety: if the class is in a static context (does not have an outer pointer). - * This is checked when building the NestedInfo. - */ - case class NestedInfo(enclosingClass: ClassBType, - outerName: Option[String], - innerName: Option[String], - isStaticNestedClass: Boolean) - - /** - * This class holds the data for an entry in the InnerClass table. See the InnerClass summary - * above in this file. - * - * There's some overlap with the class NestedInfo, but it's not exactly the same and cleaner to - * keep separate. - * @param name The internal name of the class. - * @param outerName The internal name of the outer class, may be null. - * @param innerName The simple name of the inner class, may be null. - * @param flags The flags for this class in the InnerClass entry. - */ - case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int) - - case class ArrayBType(componentType: BType) extends RefBType { - def dimension: Int = componentType match { - case a: ArrayBType => 1 + a.dimension - case _ => 1 - } - - def elementType: BType = componentType match { - case a: ArrayBType => a.elementType - case t => t - } - } - - case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType - - /* Some definitions that are required for the implementation of BTypes. They are abstract because - * initializing them requires information from types / symbols, which is not accessible here in - * BTypes. - * - * They are defs (not vals) because they are implemented using vars (see comment on CoreBTypes). - */ - - /** - * Just a named pair, used in CoreBTypes.asmBoxTo/asmUnboxTo. - */ - /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala b/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala deleted file mode 100644 index d78008d65cc6..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BTypesFromSymbols.scala +++ /dev/null @@ -1,348 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.tools.asm -import scala.annotation.threadUnsafe -import scala.collection.mutable -import scala.collection.mutable.Clearable - -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Phases.Phase -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.core.StdNames - -/** - * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary - * information from a symbol and its type to create the corresponding ClassBType. It requires - * access to the compiler (global parameter). - * - * The mixin CoreBTypes defines core BTypes that are used in the backend. Building these BTypes - * uses classBTypeFromSymbol, hence requires access to the compiler (global). - * - * BTypesFromSymbols extends BTypes because the implementation of BTypes requires access to some - * of the core btypes. They are declared in BTypes as abstract members. Note that BTypes does - * not have access to the compiler instance. - */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { - import int.{_, given} - import DottyBackendInterface.{symExtensions, _} - - lazy val TransientAttr = requiredClass[scala.transient] - lazy val VolatileAttr = requiredClass[scala.volatile] - - val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) - import bCodeAsmCommon._ - - // Why the proxy, see documentation of class [[CoreBTypes]]. - val coreBTypes: CoreBTypesProxy[this.type] = new CoreBTypesProxy[this.type](this) - import coreBTypes._ - - final def intializeCoreBTypes(): Unit = { - coreBTypes.setBTypes(new CoreBTypes[this.type](this)) - } - - private[this] val perRunCaches: Caches = new Caches { - def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() - def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() - def recordCache[T <: Clearable](cache: T): T = cache - def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() - def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] - } - - // TODO remove abstraction - private abstract class Caches { - def recordCache[T <: Clearable](cache: T): T - def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] - def newMap[K, V](): collection.mutable.HashMap[K, V] - def newSet[K](): collection.mutable.Set[K] - def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] - } - - @threadUnsafe protected lazy val classBTypeFromInternalNameMap = { - perRunCaches.recordCache(collection.concurrent.TrieMap.empty[String, ClassBType]) - } - - /** - * Cache for the method classBTypeFromSymbol. - */ - @threadUnsafe private lazy val convertedClasses = perRunCaches.newMap[Symbol, ClassBType]() - - /** - * The ClassBType for a class symbol `sym`. - */ - final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { - assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") - assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") - assert( - (!primitiveTypeMap.contains(classSym) || isCompilingPrimitive) && - (classSym != defn.NothingClass && classSym != defn.NullClass), - s"Cannot create ClassBType for special class symbol ${classSym.showFullName}") - - convertedClasses.getOrElse(classSym, { - val internalName = classSym.javaBinaryName - // We first create and add the ClassBType to the hash map before computing its info. This - // allows initializing cylic dependencies, see the comment on variable ClassBType._info. - val classBType = new ClassBType(internalName) - convertedClasses(classSym) = classBType - setClassInfo(classSym, classBType) - }) - } - - final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = { - assert(moduleClassSym.isTopLevelModuleClass, s"not a top-level module class: $moduleClassSym") - val internalName = moduleClassSym.javaBinaryName.stripSuffix(StdNames.str.MODULE_SUFFIX) - val bType = ClassBType(internalName) - bType.info = ClassInfo( - superClass = Some(ObjectRef), - interfaces = Nil, - flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, - memberClasses = getMemberClasses(moduleClassSym).map(classBTypeFromSymbol), - nestedInfo = None - ) - bType - } - - private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { - val superClassSym: Symbol = { - val t = classSym.asClass.superClass - if (t.exists) t - else if (classSym.is(ModuleClass)) { - // workaround #371 - - println(s"Warning: mocking up superclass for $classSym") - defn.ObjectClass - } - else t - } - assert( - if (classSym == defn.ObjectClass) - superClassSym == NoSymbol - else if (classSym.isInterface) - superClassSym == defn.ObjectClass - else - // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes. - ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)), - s"Bad superClass for $classSym: $superClassSym" - ) - val superClass = if (superClassSym == NoSymbol) None - else Some(classBTypeFromSymbol(superClassSym)) - - /** - * All interfaces implemented by a class, except for those inherited through the superclass. - * Redundant interfaces are removed unless there is a super call to them. - */ - extension (sym: Symbol) def superInterfaces: List[Symbol] = { - val directlyInheritedTraits = sym.directlyInheritedTraits - val directlyInheritedTraitsSet = directlyInheritedTraits.toSet - val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet - val superCalls = superCallsMap.getOrElse(sym, Set.empty) - val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Trait)) -// if (additional.nonEmpty) -// println(s"$fullName: adding supertraits $additional") - directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional - } - - val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) - - val flags = javaFlags(classSym) - - /* The InnerClass table of a class C must contain all nested classes of C, even if they are only - * declared but not otherwise referenced in C (from the bytecode or a method / field signature). - * We collect them here. - */ - val nestedClassSymbols = { - // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect - // member classes right after lambdalift, we obtain all nested classes, including local and - // anonymous ones. - val nestedClasses = getNestedClasses(classSym) - - // If this is a top-level class, and it has a companion object, the member classes of the - // companion are added as members of the class. For example: - // class C { } - // object C { - // class D - // def f = { class E } - // } - // The class D is added as a member of class C. The reason is that the InnerClass attribute - // for D will containt class "C" and NOT the module class "C$" as the outer class of D. - // This is done by buildNestedInfo, the reason is Java compatibility, see comment in BTypes. - // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks - // like D is a member of C, not C$. - val linkedClass = classSym.linkedClass - val companionModuleMembers = { - if (classSym.linkedClass.isTopLevelModuleClass) getMemberClasses(classSym.linkedClass) - else Nil - } - - nestedClasses ++ companionModuleMembers - } - - /** - * For nested java classes, the scala compiler creates both a class and a module (and therefore - * a module class) symbol. For example, in `class A { class B {} }`, the nestedClassSymbols - * for A contain both the class B and the module class B. - * Here we get rid of the module class B, making sure that the class B is present. - */ - val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { - if (s.is(JavaDefined) && s.is(ModuleClass)) { - // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that - // returns NoSymbol, so it doesn't work. - val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) - // this assertion is specific to how ScalaC works. It doesn't apply to dotty, as n dotty there will be B & B$ - // assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") - false - } else true - }) - - val memberClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol) - - val nestedInfo = buildNestedInfo(classSym) - - classBType.info = ClassInfo(superClass, interfaces, flags, memberClasses, nestedInfo) - classBType - } - - /** For currently compiled classes: All locally defined classes including local classes. - * The empty list for classes that are not currently compiled. - */ - private def getNestedClasses(sym: Symbol): List[Symbol] = definedClasses(sym, flattenPhase) - - /** For currently compiled classes: All classes that are declared as members of this class - * (but not inherited ones). The empty list for classes that are not currently compiled. - */ - private def getMemberClasses(sym: Symbol): List[Symbol] = definedClasses(sym, lambdaLiftPhase) - - private def definedClasses(sym: Symbol, phase: Phase) = - if (sym.isDefinedInCurrentRun) - atPhase(phase) { - toDenot(sym).info.decls.filter(sym => sym.isClass && !sym.isEffectivelyErased) - } - else Nil - - private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { - assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") - - val isNested = !innerClassSym.originalOwner.originalLexicallyEnclosingClass.is(PackageClass) - if (!isNested) None - else { - // See comment in BTypes, when is a class marked static in the InnerClass table. - val isStaticNestedClass = innerClassSym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner - - // After lambdalift (which is where we are), the rawowoner field contains the enclosing class. - val enclosingClassSym = { - if (innerClassSym.isClass) { - atPhase(flattenPhase.prev) { - toDenot(innerClassSym).owner.enclosingClass - } - } - else atPhase(flattenPhase.prev)(innerClassSym.enclosingClass) - } //todo is handled specially for JavaDefined symbols in scalac - - val enclosingClass: ClassBType = classBTypeFromSymbol(enclosingClassSym) - - val outerName: Option[String] = { - if (isAnonymousOrLocalClass(innerClassSym)) { - None - } else { - val outerName = innerClassSym.originalOwner.originalLexicallyEnclosingClass.javaBinaryName - def dropModule(str: String): String = - if (!str.isEmpty && str.last == '$') str.take(str.length - 1) else str - // Java compatibility. See the big comment in BTypes that summarizes the InnerClass spec. - val outerNameModule = - if (innerClassSym.originalOwner.originalLexicallyEnclosingClass.isTopLevelModuleClass) dropModule(outerName) - else outerName - Some(outerNameModule.toString) - } - } - - val innerName: Option[String] = { - if (innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction) None - else { - val original = innerClassSym.initial - Some(atPhase(original.validFor.phaseId)(innerClassSym.name).mangledString) // moduleSuffix for module classes - } - } - - Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass)) - } - } - - /** - * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain. - * - * The problem is that we are interested in a source-level property. Various phases changed the - * symbol's properties in the meantime, mostly lambdalift modified (destructively) the owner. - * Therefore, `sym.isStatic` is not what we want. For example, in - * object T { def f { object U } } - * the owner of U is T, so UModuleClass.isStatic is true. Phase travel does not help here. - */ - extension (sym: Symbol) - private def isOriginallyStaticOwner: Boolean = - sym.is(PackageClass) || sym.is(ModuleClass) && sym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner - - /** - * Return the Java modifiers for the given symbol. - * Java modifiers for classes: - * - public, abstract, final, strictfp (not used) - * for interfaces: - * - the same as for classes, without 'final' - * for fields: - * - public, private (*) - * - static, final - * for methods: - * - the same as for fields, plus: - * - abstract, synchronized (not used), strictfp (not used), native (not used) - * for all: - * - deprecated - * - * (*) protected cannot be used, since inner classes 'see' protected members, - * and they would fail verification after lifted. - */ - final def javaFlags(sym: Symbol): Int = { - - // Classes are always emitted as public. This matches the behavior of Scala 2 - // and is necessary for object deserialization to work properly, otherwise - // ModuleSerializationProxy may fail with an accessiblity error (see - // tests/run/serialize.scala and https://github.com/typelevel/cats-effect/pull/2360). - val privateFlag = !sym.isClass && (sym.is(Private) || (sym.isPrimaryConstructor && sym.owner.isTopLevelModuleClass)) - - val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) - - import asm.Opcodes._ - import GenBCodeOps.addFlagIf - 0 .addFlagIf(privateFlag, ACC_PRIVATE) - .addFlagIf(!privateFlag, ACC_PUBLIC) - .addFlagIf(sym.is(Deferred) || sym.isOneOf(AbstractOrTrait), ACC_ABSTRACT) - .addFlagIf(sym.isInterface, ACC_INTERFACE) - .addFlagIf(finalFlag - // Primitives are "abstract final" to prohibit instantiation - // without having to provide any implementations, but that is an - // illegal combination of modifiers at the bytecode level so - // suppress final if abstract if present. - && !sym.isOneOf(AbstractOrTrait) - // Mixin forwarders are bridges and can be final, but final bridges confuse some frameworks - && !sym.is(Bridge), ACC_FINAL) - .addFlagIf(sym.isStaticMember, ACC_STATIC) - .addFlagIf(sym.is(Bridge), ACC_BRIDGE | ACC_SYNTHETIC) - .addFlagIf(sym.is(Artifact), ACC_SYNTHETIC) - .addFlagIf(sym.isClass && !sym.isInterface, ACC_SUPER) - .addFlagIf(sym.isAllOf(JavaEnum), ACC_ENUM) - .addFlagIf(sym.is(JavaVarargs), ACC_VARARGS) - .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) - .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) - .addFlagIf(sym.is(Enum), ACC_ENUM) - } - - def javaFieldFlags(sym: Symbol) = { - import asm.Opcodes._ - import GenBCodeOps.addFlagIf - javaFlags(sym) - .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) - .addFlagIf(sym.hasAnnotation(VolatileAttr), ACC_VOLATILE) - .addFlagIf(!sym.is(Mutable), ACC_FINAL) - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala b/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala deleted file mode 100644 index 551d4f8d809e..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/BytecodeWriters.scala +++ /dev/null @@ -1,147 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.language.unsafeNulls - -import java.io.{ DataOutputStream, FileOutputStream, IOException, File as JFile } -import java.nio.channels.ClosedByInterruptException -import dotty.tools.io._ -import dotty.tools.dotc.report - - -/** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) - -/** For the last mile: turning generated bytecode in memory into - * something you can use. Has implementations for writing to class - * files, jars, and disassembled/javap output. - */ -trait BytecodeWriters { - val int: DottyBackendInterface - import int.{_, given} - - /** - * @param clsName cls.getName - */ - def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile = - getFile(outputDirectory, clsName, suffix) - - def factoryNonJarBytecodeWriter(): BytecodeWriter = { - val emitAsmp = None - val doDump = dumpClasses - (emitAsmp.isDefined, doDump.isDefined) match { - case (false, false) => new ClassBytecodeWriter { } - case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { } - case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter - case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { } - } - } - - trait BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit - def close(): Unit = () - } - - class DirectToJarfileWriter(jfile: JFile) extends BytecodeWriter { - val writer = new Jar(jfile).jarWriter() - - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile == null, - "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.") - val path = jclassName + ".class" - val out = writer.newOutputStream(path) - - try out.write(jclassBytes, 0, jclassBytes.length) - finally out.flush() - - report.informProgress("added " + label + path + " to jar") - } - override def close() = writer.close() - } - - /* - * The ASM textual representation for bytecode overcomes disadvantages of javap output in three areas: - * (a) pickle dingbats undecipherable to the naked eye; - * (b) two constant pools, while having identical contents, are displayed differently due to physical layout. - * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, - * their expansion by ASM is more readable. - * - * */ - trait AsmpBytecodeWriter extends BytecodeWriter { - import scala.tools.asm - - private val baseDir = new Directory(None.get).createDirectory() // FIXME missing directoy - // new needed here since resolution of user-defined `apply` methods is ambiguous, and we want the constructor. - - private def emitAsmp(jclassBytes: Array[Byte], asmpFile: dotty.tools.io.File): Unit = { - val pw = asmpFile.printWriter() - try { - val cnode = new ClassNode1() - val cr = new asm.ClassReader(jclassBytes) - cr.accept(cnode, 0) - val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter())) - cnode.accept(trace) - trace.p.print(pw) - } - finally pw.close() - } - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val segments = jclassName.split("[./]") - val asmpFile = segments.foldLeft(baseDir: Path)(_ / _).changeExtension("asmp").toFile - - asmpFile.parent.createDirectory() - emitAsmp(jclassBytes, asmpFile) - } - } - - trait ClassBytecodeWriter extends BytecodeWriter { - def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - assert(outfile != null, - "Precisely this override requires its invoker to hand out a non-null AbstractFile.") - val outstream = new DataOutputStream(outfile.bufferedOutput) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - catch case ex: ClosedByInterruptException => - try - outfile.delete() // don't leave an empty or half-written classfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - report.informProgress("wrote '" + label + "' to " + outfile) - } - } - - trait DumpBytecodeWriter extends BytecodeWriter { - val baseDir = Directory(dumpClasses.get).createDirectory() - - abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit = { - super.writeClass(label, jclassName, jclassBytes, outfile) - - val pathName = jclassName - val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _).changeExtension("class").toFile - dumpFile.parent.createDirectory() - val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path)) - - try outstream.write(jclassBytes, 0, jclassBytes.length) - finally outstream.close() - } - } - - private def dumpClasses: Option[String] = - if (ctx.settings.Ydumpclasses.isDefault) None - else Some(ctx.settings.Ydumpclasses.value) -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java b/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java deleted file mode 100644 index c5594ae3dea6..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/ClassNode1.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.MethodVisitor; -import scala.tools.asm.Opcodes; -import scala.tools.asm.tree.ClassNode; -import scala.tools.asm.tree.MethodNode; - -/** - * A subclass of {@link ClassNode} to customize the representation of - * label nodes with {@link LabelNode1}. - */ -public class ClassNode1 extends ClassNode { - public ClassNode1() { - this(Opcodes.ASM6); - } - - public ClassNode1(int api) { - super(api); - } - - @Override - public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { - MethodNode method = new MethodNode1(access, name, descriptor, signature, exceptions); - methods.add(method); - return method; - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala b/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala deleted file mode 100644 index 299c1c75d6cf..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/CollectSuperCalls.scala +++ /dev/null @@ -1,48 +0,0 @@ -package dotty.tools.backend.jvm - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.core.Phases._ -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.core.Flags.Trait -import dotty.tools.dotc.transform.MegaPhase.MiniPhase - -/** Collect all super calls to trait members. - * - * For each super reference to trait member, register a call from the current class to the - * owner of the referenced member. - * - * This information is used to know if it is safe to remove a redundant mixin class. - * A redundant mixin class is one that is implemented by another mixin class. As the - * methods in a redundant mixin class could be implemented with a default abstract method, - * the redundant mixin class could be required as a parent by the JVM. - */ -class CollectSuperCalls extends MiniPhase { - import tpd._ - - override def phaseName: String = CollectSuperCalls.name - - override def description: String = CollectSuperCalls.description - - override def transformSelect(tree: Select)(using Context): Tree = { - tree.qualifier match { - case sup: Super => - if (tree.symbol.owner.is(Trait)) - registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass) - case _ => - } - tree - } - - private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(using Context) = { - genBCodePhase match { - case genBCodePhase: GenBCode => - genBCodePhase.registerSuperCall(sym, calls) - case _ => - } - } -} - -object CollectSuperCalls: - val name: String = "collectSuperCalls" - val description: String = "find classes that are called with super" diff --git a/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala b/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala deleted file mode 100644 index d5fce3f53627..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/CoreBTypes.scala +++ /dev/null @@ -1,294 +0,0 @@ -package dotty.tools -package backend -package jvm - - -import dotty.tools.dotc.core.Symbols._ -import dotty.tools.dotc.transform.Erasure -import scala.tools.asm.{Handle, Opcodes} -import dotty.tools.dotc.core.StdNames - -/** - * Core BTypes and some other definitions. The initialization of these definitions requies access - * to symbols / types (global). - * - * The symbols used to initialize the ClassBTypes may change from one compiler run to the next. To - * make sure the definitions are consistent with the symbols in the current run, the - * `intializeCoreBTypes` method in BTypesFromSymbols creates a new instance of CoreBTypes in each - * compiler run. - * - * The class BTypesFromSymbols does not directly reference CoreBTypes, but CoreBTypesProxy. The - * reason is that having a `var bTypes: CoreBTypes` would not allow `import bTypes._`. Instead, the - * proxy class holds a `CoreBTypes` in a variable field and forwards to this instance. - * - * The definitions in `CoreBTypes` need to be lazy vals to break an initialization cycle. When - * creating a new instance to assign to the proxy, the `classBTypeFromSymbol` invoked in the - * constructor will actucally go through the proxy. The lazy vals make sure the instance is assigned - * in the proxy before the fields are initialized. - * - * Note: if we did not re-create the core BTypes on each compiler run, BType.classBTypeFromInternalNameMap - * could not be a perRunCache anymore: the classes defeined here need to be in that map, they are - * added when the ClassBTypes are created. The per run cache removes them, so they would be missing - * in the second run. - */ -class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) { - import bTypes._ - import int.given - import DottyBackendInterface._ - - //import global._ - //import rootMirror.{requiredClass, getClassIfDefined} - //import definitions._ - - /** - * Maps primitive types to their corresponding PrimitiveBType. The map is defined lexically above - * the first use of `classBTypeFromSymbol` because that method looks at the map. - */ - lazy val primitiveTypeMap: Map[Symbol, PrimitiveBType] = Map( - defn.UnitClass -> UNIT, - defn.BooleanClass -> BOOL, - defn.CharClass -> CHAR, - defn.ByteClass -> BYTE, - defn.ShortClass -> SHORT, - defn.IntClass -> INT, - defn.LongClass -> LONG, - defn.FloatClass -> FLOAT, - defn.DoubleClass -> DOUBLE - ) - - private lazy val BOXED_UNIT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Void]) - private lazy val BOXED_BOOLEAN : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Boolean]) - private lazy val BOXED_BYTE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Byte]) - private lazy val BOXED_SHORT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Short]) - private lazy val BOXED_CHAR : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Character]) - private lazy val BOXED_INT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Integer]) - private lazy val BOXED_LONG : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Long]) - private lazy val BOXED_FLOAT : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Float]) - private lazy val BOXED_DOUBLE : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Double]) - - /** - * Map from primitive types to their boxed class type. Useful when pushing class literals onto the - * operand stack (ldc instruction taking a class literal), see genConstant. - */ - lazy val boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = Map( - UNIT -> BOXED_UNIT, - BOOL -> BOXED_BOOLEAN, - BYTE -> BOXED_BYTE, - SHORT -> BOXED_SHORT, - CHAR -> BOXED_CHAR, - INT -> BOXED_INT, - LONG -> BOXED_LONG, - FLOAT -> BOXED_FLOAT, - DOUBLE -> BOXED_DOUBLE - ) - - lazy val boxedClasses: Set[ClassBType] = boxedClassOfPrimitive.values.toSet - - /** - * Maps the method symbol for a box method to the boxed type of the result. For example, the - * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. - */ - lazy val boxResultType: Map[Symbol, ClassBType] = { - val boxMethods = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def? - (x, Erasure.Boxing.boxMethod(x.asClass)) - }.toMap - for ((valueClassSym, boxMethodSym) <- boxMethods) - yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym)) - } - - /** - * Maps the method symbol for an unbox method to the primitive type of the result. - * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ - lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { - val unboxMethods: Map[Symbol, Symbol] = - defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap - for ((valueClassSym, unboxMethodSym) <- unboxMethods) - yield unboxMethodSym -> primitiveTypeMap(valueClassSym) - } - - /* - * srNothingRef and srNullRef exist at run-time only. They are the bytecode-level manifestation (in - * method signatures only) of what shows up as NothingClass (scala.Nothing) resp. NullClass (scala.Null) in Scala ASTs. - * - * Therefore, when srNothingRef or srNullRef are to be emitted, a mapping is needed: the internal - * names of NothingClass and NullClass can't be emitted as-is. - * TODO @lry Once there's a 2.11.3 starr, use the commented argument list. The current starr crashes on the type literal `scala.runtime.Nothing$` - */ - lazy val srNothingRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Nothing$")) // (requiredClass[scala.runtime.Nothing$]) - lazy val srNullRef : ClassBType = classBTypeFromSymbol(requiredClass("scala.runtime.Null$")) // (requiredClass[scala.runtime.Null$]) - - lazy val ObjectRef : ClassBType = classBTypeFromSymbol(defn.ObjectClass) - lazy val StringRef : ClassBType = classBTypeFromSymbol(defn.StringClass) - lazy val jlStringBuilderRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuilder]) - lazy val jlStringBufferRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.StringBuffer]) - lazy val jlCharSequenceRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.CharSequence]) - lazy val jlClassRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.Class[_]]) - lazy val jlThrowableRef : ClassBType = classBTypeFromSymbol(defn.ThrowableClass) - lazy val jlCloneableRef : ClassBType = classBTypeFromSymbol(defn.JavaCloneableClass) // java/lang/Cloneable - lazy val jioSerializableRef : ClassBType = classBTypeFromSymbol(requiredClass[java.io.Serializable]) // java/io/Serializable - lazy val jlClassCastExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.ClassCastException]) // java/lang/ClassCastException - lazy val jlIllegalArgExceptionRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.IllegalArgumentException]) - lazy val jliSerializedLambdaRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.SerializedLambda]) - - lazy val srBoxesRunTimeRef: ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BoxesRunTime]) - - private lazy val jliCallSiteRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.CallSite]) - private lazy val jliLambdaMetafactoryRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.LambdaMetafactory]) - private lazy val jliMethodHandleRef : ClassBType = classBTypeFromSymbol(defn.MethodHandleClass) - private lazy val jliMethodHandlesLookupRef : ClassBType = classBTypeFromSymbol(defn.MethodHandlesLookupClass) - private lazy val jliMethodTypeRef : ClassBType = classBTypeFromSymbol(requiredClass[java.lang.invoke.MethodType]) - private lazy val jliStringConcatFactoryRef : ClassBType = classBTypeFromSymbol(requiredClass("java.lang.invoke.StringConcatFactory")) // since JDK 9 - private lazy val srLambdaDeserialize : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.LambdaDeserialize]) - - lazy val jliLambdaMetaFactoryMetafactoryHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - jliLambdaMetafactoryRef.internalName, - "metafactory", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, jliMethodTypeRef, jliMethodHandleRef, jliMethodTypeRef), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliLambdaMetaFactoryAltMetafactoryHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - jliLambdaMetafactoryRef.internalName, - "altMetafactory", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(ObjectRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliLambdaDeserializeBootstrapHandle: Handle = new Handle( - Opcodes.H_INVOKESTATIC, - srLambdaDeserialize.internalName, - "bootstrap", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, ArrayBType(jliMethodHandleRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - lazy val jliStringConcatFactoryMakeConcatWithConstantsHandle = new Handle( - Opcodes.H_INVOKESTATIC, - jliStringConcatFactoryRef.internalName, - "makeConcatWithConstants", - MethodBType( - List(jliMethodHandlesLookupRef, StringRef, jliMethodTypeRef, StringRef, ArrayBType(ObjectRef)), - jliCallSiteRef - ).descriptor, - /* itf = */ false) - - /** - * Methods in scala.runtime.BoxesRuntime - */ - lazy val asmBoxTo : Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("boxToBoolean", MethodBType(List(BOOL), BOXED_BOOLEAN)), - BYTE -> MethodNameAndType("boxToByte", MethodBType(List(BYTE), BOXED_BYTE)), - CHAR -> MethodNameAndType("boxToCharacter", MethodBType(List(CHAR), BOXED_CHAR)), - SHORT -> MethodNameAndType("boxToShort", MethodBType(List(SHORT), BOXED_SHORT)), - INT -> MethodNameAndType("boxToInteger", MethodBType(List(INT), BOXED_INT)), - LONG -> MethodNameAndType("boxToLong", MethodBType(List(LONG), BOXED_LONG)), - FLOAT -> MethodNameAndType("boxToFloat", MethodBType(List(FLOAT), BOXED_FLOAT)), - DOUBLE -> MethodNameAndType("boxToDouble", MethodBType(List(DOUBLE), BOXED_DOUBLE)) - ) - - lazy val asmUnboxTo: Map[BType, MethodNameAndType] = Map( - BOOL -> MethodNameAndType("unboxToBoolean", MethodBType(List(ObjectRef), BOOL)), - BYTE -> MethodNameAndType("unboxToByte", MethodBType(List(ObjectRef), BYTE)), - CHAR -> MethodNameAndType("unboxToChar", MethodBType(List(ObjectRef), CHAR)), - SHORT -> MethodNameAndType("unboxToShort", MethodBType(List(ObjectRef), SHORT)), - INT -> MethodNameAndType("unboxToInt", MethodBType(List(ObjectRef), INT)), - LONG -> MethodNameAndType("unboxToLong", MethodBType(List(ObjectRef), LONG)), - FLOAT -> MethodNameAndType("unboxToFloat", MethodBType(List(ObjectRef), FLOAT)), - DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectRef), DOUBLE)) - ) - - lazy val typeOfArrayOp: Map[Int, BType] = { - import dotty.tools.backend.ScalaPrimitivesOps._ - Map( - (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ - (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++ - (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++ - (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++ - (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++ - (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++ - (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++ - (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ - (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _* - ) - } -} - -/** - * This trait make some core BTypes availalbe that don't depend on a Global instance. Some core - * BTypes are required to be accessible in the BTypes trait, which does not have access to Global. - * - * BTypes cannot refer to CoreBTypesProxy because some of its members depend on global, for example - * the type Symbol in - * def primitiveTypeMap: Map[Symbol, PrimitiveBType] - */ -trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { - val bTypes: BTS - import bTypes._ - - def boxedClasses: Set[ClassBType] - - def srNothingRef : ClassBType - def srNullRef : ClassBType - - def ObjectRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType -} - -/** - * See comment in class [[CoreBTypes]]. - */ -final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: DottyBackendInterface]](val bTypes: BTFS) extends CoreBTypesProxyGlobalIndependent[BTFS] { - import bTypes._ - - private var _coreBTypes: CoreBTypes[bTypes.type] = _ - def setBTypes(coreBTypes: CoreBTypes[BTFS]): Unit = { - _coreBTypes = coreBTypes.asInstanceOf[CoreBTypes[bTypes.type]] - } - - def primitiveTypeMap: Map[Symbol, PrimitiveBType] = _coreBTypes.primitiveTypeMap - - def boxedClasses: Set[ClassBType] = _coreBTypes.boxedClasses - - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] = _coreBTypes.boxedClassOfPrimitive - - def boxResultType: Map[Symbol, ClassBType] = _coreBTypes.boxResultType - - def unboxResultType: Map[Symbol, PrimitiveBType] = _coreBTypes.unboxResultType - - def srNothingRef : ClassBType = _coreBTypes.srNothingRef - def srNullRef : ClassBType = _coreBTypes.srNullRef - - def ObjectRef : ClassBType = _coreBTypes.ObjectRef - def StringRef : ClassBType = _coreBTypes.StringRef - def jlStringBuilderRef : ClassBType = _coreBTypes.jlStringBuilderRef - def jlStringBufferRef : ClassBType = _coreBTypes.jlStringBufferRef - def jlCharSequenceRef : ClassBType = _coreBTypes.jlCharSequenceRef - def jlClassRef : ClassBType = _coreBTypes.jlClassRef - def jlThrowableRef : ClassBType = _coreBTypes.jlThrowableRef - def jlCloneableRef : ClassBType = _coreBTypes.jlCloneableRef - def jiSerializableRef : ClassBType = _coreBTypes.jioSerializableRef - def jlClassCastExceptionRef : ClassBType = _coreBTypes.jlClassCastExceptionRef - def jlIllegalArgExceptionRef : ClassBType = _coreBTypes.jlIllegalArgExceptionRef - def jliSerializedLambdaRef : ClassBType = _coreBTypes.jliSerializedLambdaRef - - def srBoxesRuntimeRef: ClassBType = _coreBTypes.srBoxesRunTimeRef - - def jliLambdaMetaFactoryMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryMetafactoryHandle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle = _coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - def jliLambdaDeserializeBootstrapHandle : Handle = _coreBTypes.jliLambdaDeserializeBootstrapHandle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle = _coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle - - def asmBoxTo : Map[BType, MethodNameAndType] = _coreBTypes.asmBoxTo - def asmUnboxTo: Map[BType, MethodNameAndType] = _coreBTypes.asmUnboxTo - - def typeOfArrayOp: Map[Int, BType] = _coreBTypes.typeOfArrayOp -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala b/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala deleted file mode 100644 index 6ce434015b8c..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/DottyBackendInterface.scala +++ /dev/null @@ -1,204 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.transform.SymUtils._ -import java.io.{File => _} - -import scala.reflect.ClassTag -import dotty.tools.io.AbstractFile -import dotty.tools.dotc.core._ -import Contexts._ -import Types._ -import Symbols._ -import Phases._ -import Decorators.em - -import dotty.tools.dotc.util.ReadOnlyMap -import dotty.tools.dotc.report - -import tpd._ - -import StdNames.nme -import NameKinds.LazyBitMapName -import Names.Name - -class DottyBackendInterface(val outputDirectory: AbstractFile, val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: DetachedContext) { - - private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - - def cachedDesugarIdent(i: Ident): Option[tpd.Select] = { - var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { - case sel: tpd.Select => - desugared.put(i.tpe, sel) - found = sel - case _ => - } - } - if (found == null) None else Some(found) - } - - object DesugaredSelect extends DeconstructorCommon[tpd.Tree] { - - var desugared: tpd.Select = null - - override def isEmpty: Boolean = - desugared eq null - - def _1: Tree = desugared.qualifier - - def _2: Name = desugared.name - - override def unapply(s: tpd.Tree): this.type = { - s match { - case t: tpd.Select => desugared = t - case t: Ident => - cachedDesugarIdent(t) match { - case Some(t) => desugared = t - case None => desugared = null - } - case _ => desugared = null - } - - this - } - } - - object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral] { - def _1: Type = field.tpe match { - case JavaArrayType(elem) => elem - case _ => - report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) - UnspecifiedErrorType - } - def _2: List[Tree] = field.elems - } - - abstract class DeconstructorCommon[T >: Null <: AnyRef] { - var field: T = null - def get: this.type = this - def isEmpty: Boolean = field eq null - def isDefined = !isEmpty - def unapply(s: T): this.type ={ - field = s - this - } - } - -} - -object DottyBackendInterface { - - private def erasureString(clazz: Class[_]): String = { - if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" - else clazz.getName - } - - def requiredClass(str: String)(using Context): ClassSymbol = - Symbols.requiredClass(str) - - def requiredClass[T](using evidence: ClassTag[T], ctx: Context): Symbol = - requiredClass(erasureString(evidence.runtimeClass)) - - def requiredModule(str: String)(using Context): Symbol = - Symbols.requiredModule(str) - - def requiredModule[T](using evidence: ClassTag[T], ctx: Context): Symbol = { - val moduleName = erasureString(evidence.runtimeClass) - val className = if (moduleName.endsWith("$")) moduleName.dropRight(1) else moduleName - requiredModule(className) - } - - given symExtensions: AnyRef with - extension (sym: Symbol) - - def isInterface(using Context): Boolean = (sym.is(PureInterface)) || sym.is(Trait) - - def isStaticConstructor(using Context): Boolean = (sym.isStaticMember && sym.isClassConstructor) || (sym.name eq nme.STATIC_CONSTRUCTOR) - - /** Fields of static modules will be static at backend - * - * Note that lazy val encoding assumes bitmap fields are non-static. - * See also `genPlainClass` in `BCodeSkelBuilder.scala`. - * - * TODO: remove the special handing of `LazyBitMapName` once we swtich to - * the new lazy val encoding: https://github.com/scala/scala3/issues/7140 - */ - def isStaticModuleField(using Context): Boolean = - sym.owner.isStaticModuleClass && sym.isField && !sym.name.is(LazyBitMapName) - - def isStaticMember(using Context): Boolean = (sym ne NoSymbol) && - (sym.is(JavaStatic) || sym.isScalaStatic || sym.isStaticModuleField) - // guard against no sumbol cause this code is executed to select which call type(static\dynamic) to use to call array.clone - - /** - * True for module classes of modules that are top-level or owned only by objects. Module classes - * for such objects will get a MODULE$ flag and a corresponding static initializer. - */ - def isStaticModuleClass(using Context): Boolean = - (sym.is(Module)) && { - // scalac uses atPickling here - // this would not work if modules are created after pickling - // for example by specialization - val original = toDenot(sym).initial - val validity = original.validFor - atPhase(validity.phaseId) { - toDenot(sym).isStatic - } - } - - - - def originalLexicallyEnclosingClass(using Context): Symbol = - // used to populate the EnclosingMethod attribute. - // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls. - if (sym.exists) { - val validity = toDenot(sym).initial.validFor - atPhase(validity.phaseId) { - toDenot(sym).lexicallyEnclosingClass - } - } else NoSymbol - - /** - * True for module classes of package level objects. The backend will generate a mirror class for - * such objects. - */ - def isTopLevelModuleClass(using Context): Boolean = - sym.is(ModuleClass) && - atPhase(flattenPhase) { - toDenot(sym).owner.is(PackageClass) - } - - def javaSimpleName(using Context): String = toDenot(sym).name.mangledString - def javaClassName(using Context): String = toDenot(sym).fullName.mangledString - def javaBinaryName(using Context): String = javaClassName.replace('.', '/') - - end extension - - end symExtensions - - private val primitiveCompilationUnits = Set( - "Unit.scala", - "Boolean.scala", - "Char.scala", - "Byte.scala", - "Short.scala", - "Int.scala", - "Float.scala", - "Long.scala", - "Double.scala" - ) - - /** - * True if the current compilation unit is of a primitive class (scala.Boolean et al). - * Used only in assertions. - */ - def isCompilingPrimitive(using Context) = { - primitiveCompilationUnits(ctx.compilationUnit.source.file.name) - } - -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala deleted file mode 100644 index 1af7e5dd705a..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala +++ /dev/null @@ -1,672 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core.Phases.Phase - -import scala.collection.mutable -import scala.jdk.CollectionConverters._ -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.interfaces -import dotty.tools.dotc.report - -import dotty.tools.dotc.util.SourceFile -import java.util.Optional - -import dotty.tools.dotc.core._ -import dotty.tools.dotc.sbt.ExtractDependencies -import Contexts._ -import Phases._ -import Symbols._ -import Decorators.em - -import java.io.DataOutputStream -import java.nio.channels.ClosedByInterruptException - -import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig } -import dotty.tools.tasty.core.TastyUnpickler - -import scala.tools.asm -import scala.tools.asm.Handle -import scala.tools.asm.tree._ -import tpd._ -import StdNames._ -import dotty.tools.io._ -import scala.tools.asm.MethodTooLargeException -import scala.tools.asm.ClassTooLargeException - -class GenBCode extends Phase { - - override def phaseName: String = GenBCode.name - - override def description: String = GenBCode.description - - private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] - def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { - val old = superCallsMap.getOrElse(sym, Set.empty) - superCallsMap.update(sym, old + calls) - } - - private val entryPoints = new mutable.HashSet[String]() - def registerEntryPoint(s: String): Unit = entryPoints += s - - private var myOutput: AbstractFile = _ - - private def outputDir(using Context): AbstractFile = { - if (myOutput eq null) - myOutput = ctx.settings.outputDir.value - myOutput - } - - private var myPrimitives: DottyPrimitives = null - - override def run(using Context): Unit = - inDetachedContext: ctx ?=> - if myPrimitives == null then myPrimitives = new DottyPrimitives(ctx) - new GenBCodePipeline( - DottyBackendInterface(outputDir, superCallsMap), - myPrimitives - ).run(ctx.compilationUnit.tpdTree) - - - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - outputDir match - case jar: JarArchive => - updateJarManifestWithMainClass(jar, entryPoints.toList) - case _ => - try super.runOn(units) - finally outputDir match { - case jar: JarArchive => - if (ctx.run.nn.suspendedUnits.nonEmpty) - // If we close the jar the next run will not be able to write on the jar. - // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. - report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") - - jar.close() - case _ => - } - } - - private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = - val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { - entryPoints match - case List(mainClass) => - Some(mainClass) - case Nil => - report.warning("No Main-Class designated or discovered.") - None - case mcs => - report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") - None - } - mainClass.map { mc => - val manifest = Jar.WManifest() - manifest.mainClass = mc - val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") - val os = file.output - manifest.underlying.write(os) - os.close() - } - end updateJarManifestWithMainClass -} - -object GenBCode { - val name: String = "genBCode" - val description: String = "generate JVM bytecode" -} - -class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using DetachedContext) extends BCodeSyncAndTry { - import DottyBackendInterface.symExtensions - - private var tree: Tree = _ - - private val sourceFile: SourceFile = ctx.compilationUnit.source - - /** Convert a `dotty.tools.io.AbstractFile` into a - * `dotty.tools.dotc.interfaces.AbstractFile`. - */ - private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { - override def name = absfile.name - override def path = absfile.path - override def jfile = Optional.ofNullable(absfile.file) - } - - final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit) - -// class BCodePhase() { - - private var bytecodeWriter : BytecodeWriter = null - private var mirrorCodeGen : JMirrorBuilder = null - - /* ---------------- q1 ---------------- */ - - case class Item1(arrivalPos: Int, cd: TypeDef, cunit: CompilationUnit) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val poison1 = Item1(Int.MaxValue, null, ctx.compilationUnit) - private val q1 = new java.util.LinkedList[Item1] - - /* ---------------- q2 ---------------- */ - - case class SubItem2(classNode: asm.tree.ClassNode, - file: dotty.tools.io.AbstractFile) - - case class Item2(arrivalPos: Int, - mirror: SubItem2, - plain: SubItem2) { - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - - private val poison2 = Item2(Int.MaxValue, null, null) - private val q2 = new _root_.java.util.LinkedList[Item2] - - /* ---------------- q3 ---------------- */ - - /* - * An item of queue-3 (the last queue before serializing to disk) contains three of these - * (one for each of mirror and plain classes). - * - * @param jclassName internal name of the class - * @param jclassBytes bytecode emitted for the class SubItem3 represents - */ - case class SubItem3( - jclassName: String, - jclassBytes: Array[Byte], - jclassFile: dotty.tools.io.AbstractFile - ) - - case class Item3(arrivalPos: Int, - mirror: SubItem3, - plain: SubItem3) { - - def isPoison: Boolean = { arrivalPos == Int.MaxValue } - } - private val i3comparator = new java.util.Comparator[Item3] { - override def compare(a: Item3, b: Item3) = { - if (a.arrivalPos < b.arrivalPos) -1 - else if (a.arrivalPos == b.arrivalPos) 0 - else 1 - } - } - private val poison3 = Item3(Int.MaxValue, null, null) - private val q3 = new java.util.PriorityQueue[Item3](1000, i3comparator) - - /* - * Pipeline that takes ClassDefs from queue-1, lowers them into an intermediate form, placing them on queue-2 - */ - class Worker1(needsOutFolder: Boolean) { - - private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { - val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { - case None => - lowerCaseNames.put(lowerCaseName, classSymbol) - case Some(dupClassSym) => - // Order is not deterministic so we enforce lexicographic order between the duplicates for error-reporting - val (cl1, cl2) = - if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) - else (dupClassSym, classSymbol) - val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { - if (same) - report.warning( // FIXME: This should really be an error, but then FromTasty tests fail - em"$cl1 and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) - else - report.warning( - em"""$cl1 differs only in case from ${cl2.showLocated}. - |uch classes will overwrite one another on case-insensitive filesystems.""", cl1.sourcePos) - } - } - } - - def run(): Unit = { - while (true) { - val item = q1.poll - if (item.isPoison) { - q2 add poison2 - return - } - else { - try { /*withCurrentUnit(item.cunit)*/(visit(item)) } - catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.cunit.source.file.name}") - throw ex - } - } - } - } - - /* - * Checks for duplicate internal names case-insensitively, - * builds ASM ClassNodes for mirror and plain classes; - * enqueues them in queue-2. - * - */ - def visit(item: Item1): Boolean = { - val Item1(arrivalPos, cd, cunit) = item - val claszSymbol = cd.symbol - - // -------------- mirror class, if needed -------------- - val mirrorC = - if (claszSymbol.isTopLevelModuleClass) { - if (claszSymbol.companionClass == NoSymbol) { - mirrorCodeGen.genMirrorClass(claszSymbol, cunit) - } else { - report.log(s"No mirror class for module with linked class: ${claszSymbol.showFullName}") - null - } - } else null - - // -------------- "plain" class -------------- - val pcb = new PlainClassBuilder(cunit) - pcb.genPlainClass(cd) - val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName) else null; - val plainC = pcb.cnode - - if (claszSymbol.isClass) // @DarkDimius is this test needed here? - for (binary <- ctx.compilationUnit.pickled.get(claszSymbol.asClass)) { - val store = if (mirrorC ne null) mirrorC else plainC - val tasty = - val outTastyFile = getFileForClassfile(outF, store.name, ".tasty") - val outstream = new DataOutputStream(outTastyFile.bufferedOutput) - try outstream.write(binary()) - catch case ex: ClosedByInterruptException => - try - outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt - catch - case _: Throwable => - throw ex - finally outstream.close() - - val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader() - val lo = uuid.getMostSignificantBits - val hi = uuid.getLeastSignificantBits - - // TASTY attribute is created but only the UUID bytes are stored in it. - // A TASTY attribute has length 16 if and only if the .tasty file exists. - val buffer = new TastyBuffer(16) - buffer.writeUncompressedLong(lo) - buffer.writeUncompressedLong(hi) - buffer.bytes - - val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) - store.visitAttribute(dataAttr) - } - - - // ----------- create files - - val classNodes = List(mirrorC, plainC) - val classFiles = classNodes.map(cls => - if (outF != null && cls != null) { - try { - checkForCaseConflict(cls.name, claszSymbol) - getFileForClassfile(outF, cls.name, ".class") - } catch { - case e: FileConflictException => - report.error(em"error writing ${cls.name}: ${e.getMessage}") - null - } - } else null - ) - - // ----------- compiler and sbt's callbacks - - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } - - for ((cls, clsFile) <- classNodes.zip(classFiles)) { - if (cls != null) { - val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) - ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { - if (isLocal) - ctx.sbtCallback.generatedLocalClass(sourceFile.jfile.orElse(null), clsFile.file) - else { - ctx.sbtCallback.generatedNonLocalClass(sourceFile.jfile.orElse(null), clsFile.file, - className, fullClassName) - } - } - } - } - - // ----------- hand over to pipeline-2 - - val item2 = - Item2(arrivalPos, - SubItem2(mirrorC, classFiles(0)), - SubItem2(plainC, classFiles(1))) - - q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. - - } // end of method visit(Item1) - - } // end of class BCodePhase.Worker1 - - /* - * Pipeline that takes ClassNodes from queue-2. The unit of work depends on the optimization level: - * - * (a) no optimization involves: - * - converting the plain ClassNode to byte array and placing it on queue-3 - */ - class Worker2 { - import bTypes.ClassBType - import bTypes.coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle - // lazy val localOpt = new LocalOpt(new Settings()) - - private def localOptimizations(classNode: ClassNode): Unit = { - // BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) - } - - - /* Return an array of all serializable lambdas in this class */ - private def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { - val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { - val iter = m.instructions.iterator - while (iter.hasNext) { - val insn = iter.next() - insn match { - case indy: InvokeDynamicInsnNode - if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => - import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE - val metafactoryFlags = indy.bsmArgs(3).asInstanceOf[Integer].toInt - val isSerializable = (metafactoryFlags & FLAG_SERIALIZABLE) != 0 - if isSerializable then - val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] - indyLambdaBodyMethods += implMethod - case _ => - } - } - } - indyLambdaBodyMethods.toArray - } - - /* - * Add: - * - * private static Object $deserializeLambda$(SerializedLambda l) { - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$0](l) - * catch { - * case i: IllegalArgumentException => - * try return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup$1](l) - * catch { - * case i: IllegalArgumentException => - * ... - * return indy[scala.runtime.LambdaDeserialize.bootstrap, targetMethodGroup${NUM_GROUPS-1}](l) - * } - * - * We use invokedynamic here to enable caching within the deserializer without needing to - * host a static field in the enclosing class. This allows us to add this method to interfaces - * that define lambdas in default methods. - * - * SI-10232 we can't pass arbitrary number of method handles to the final varargs parameter of the bootstrap - * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target - * methods. - */ - private def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { - import asm.Opcodes._ - import bTypes._ - import coreBTypes._ - - val cw = classNode - - // Make sure to reference the ClassBTypes of all types that are used in the code generated - // here (e.g. java/util/Map) are initialized. Initializing a ClassBType adds it to - // `classBTypeFromInternalNameMap`. When writing the classfile, the asm ClassWriter computes - // stack map frames and invokes the `getCommonSuperClass` method. This method expects all - // ClassBTypes mentioned in the source code to exist in the map. - - val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - - val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { - mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } - - val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java - val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray - val numGroups = groups.length - - import scala.tools.asm.Label - val initialLabels = Array.fill(numGroups - 1)(new Label()) - val terminalLabel = new Label - def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { - mv.visitLabel(label) - emitLambdaDeserializeIndy(groups(i).toIndexedSeq) - mv.visitInsn(ARETURN) - } - mv.visitLabel(terminalLabel) - emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) - mv.visitInsn(ARETURN) - } - - private def setInnerClasses(classNode: ClassNode): Unit = if (classNode != null) { - classNode.innerClasses.clear() - val (declared, referred) = collectNestedClasses(classNode) - addInnerClasses(classNode, declared, referred) - } - - /** - * Visit the class node and collect all referenced nested classes. - */ - private def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { - // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { - def declaredNestedClasses(internalName: InternalName): List[ClassBType] = - bTypes.classBTypeFromInternalName(internalName).info.memberClasses - - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypes.classBTypeFromInternalName(internalName) - Option.when(c.isNestedClass)(c) - } - - def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { - // don't crash on invalid generic signatures - } - } - c.visit(classNode) - (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } - - def run(): Unit = { - while (true) { - val item = q2.poll - if (item.isPoison) { - q3 add poison3 - return - } - else { - try { - val plainNode = item.plain.classNode - localOptimizations(plainNode) - val serializableLambdas = collectSerializableLambdas(plainNode) - if (serializableLambdas.nonEmpty) - addLambdaDeserialize(plainNode, serializableLambdas) - setInnerClasses(plainNode) - setInnerClasses(item.mirror.classNode) - addToQ3(item) - } catch { - case ex: InterruptedException => - throw ex - case ex: Throwable => - println(s"Error while emitting ${item.plain.classNode.name}") - throw ex - } - } - } - } - - private def addToQ3(item: Item2) = { - - def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = { - val cw = new CClassWriter(extraProc) - cn.accept(cw) - cw.toByteArray - } - - val Item2(arrivalPos, SubItem2(mirror, mirrorFile), SubItem2(plain, plainFile)) = item - - val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror), mirrorFile) - val plainC = SubItem3(plain.name, getByteArray(plain), plainFile) - - if (AsmUtils.traceSerializedClassEnabled && plain.name.contains(AsmUtils.traceSerializedClassPattern)) { - if (mirrorC != null) AsmUtils.traceClass(mirrorC.jclassBytes) - AsmUtils.traceClass(plainC.jclassBytes) - } - - q3 add Item3(arrivalPos, mirrorC, plainC) - } - - } // end of class BCodePhase.Worker2 - - var arrivalPos: Int = 0 - - /* - * A run of the BCodePhase phase comprises: - * - * (a) set-up steps (most notably supporting maps in `BCodeTypes`, - * but also "the" writer where class files in byte-array form go) - * - * (b) building of ASM ClassNodes, their optimization and serialization. - * - * (c) tear down (closing the classfile-writer and clearing maps) - * - */ - def run(t: Tree)(using Context): Unit = { - this.tree = t - - // val bcodeStart = Statistics.startTimer(BackendStats.bcodeTimer) - - // val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) - arrivalPos = 0 // just in case - // scalaPrimitives.init() - bTypes.intializeCoreBTypes() - // Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) - - // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated. - bytecodeWriter = initBytecodeWriter() - mirrorCodeGen = new JMirrorBuilder - - val needsOutfileForSymbol = bytecodeWriter.isInstanceOf[ClassBytecodeWriter] - buildAndSendToDisk(needsOutfileForSymbol) - - // closing output files. - bytecodeWriter.close() - // Statistics.stopTimer(BackendStats.bcodeTimer, bcodeStart) - - if (ctx.compilerCallback != null) - ctx.compilerCallback.onSourceCompiled(sourceFile) - - /* TODO Bytecode can be verified (now that all classfiles have been written to disk) - * - * (1) asm.util.CheckAdapter.verify() - * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw) - * passing a custom ClassLoader to verify inter-dependent classes. - * Alternatively, - * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool). - * - -Xverify:all - * - * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()` - * - */ - } - - /* - * Sequentially: - * (a) place all ClassDefs in queue-1 - * (b) dequeue one at a time from queue-1, convert it to ASM ClassNode, place in queue-2 - * (c) dequeue one at a time from queue-2, convert it to byte-array, place in queue-3 - * (d) serialize to disk by draining queue-3. - */ - private def buildAndSendToDisk(needsOutFolder: Boolean)(using Context) = { - try - feedPipeline1() - // val genStart = Statistics.startTimer(BackendStats.bcodeGenStat) - (new Worker1(needsOutFolder)).run() - // Statistics.stopTimer(BackendStats.bcodeGenStat, genStart) - - (new Worker2).run() - - // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) - drainQ3() - // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - catch - case e: MethodTooLargeException => - val method = - s"${e.getClassName.replaceAll("/", ".")}.${e.getMethodName}" - val msg = - em"Generated bytecode for method '$method' is too large. Size: ${e.getCodeSize} bytes. Limit is 64KB" - report.error(msg) - case e: ClassTooLargeException => - val msg = - em"Class '${e.getClassName.replaceAll("/", ".")}' is too large. Constant pool size: ${e.getConstantPoolCount}. Limit is 64K entries" - report.error(msg) - - } - - /* Feed pipeline-1: place all ClassDefs on q1, recording their arrival position. */ - private def feedPipeline1() = { - def gen(tree: Tree): Unit = { - tree match { - case EmptyTree => () - case PackageDef(_, stats) => stats foreach gen - case ValDef(name, tpt, rhs) => () // module val not emitted - case cd: TypeDef => - q1 add Item1(arrivalPos, cd, int.ctx.compilationUnit) - arrivalPos += 1 - } - } - gen(tree) - q1 add poison1 - } - - /* Pipeline that writes classfile representations to disk. */ - private def drainQ3() = { - - def sendToDisk(cfr: SubItem3): Unit = { - if (cfr != null){ - val SubItem3(jclassName, jclassBytes, jclassFile) = cfr - bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, jclassFile) - } - } - - var moreComing = true - // `expected` denotes the arrivalPos whose Item3 should be serialized next - var expected = 0 - - while (moreComing) { - val incoming = q3.poll - moreComing = !incoming.isPoison - if (moreComing) { - val item = incoming - sendToDisk(item.mirror) - sendToDisk(item.plain) - expected += 1 - } - } - - // we're done - assert(q1.isEmpty, s"Some ClassDefs remained in the first queue: $q1") - assert(q2.isEmpty, s"Some classfiles remained in the second queue: $q2") - assert(q3.isEmpty, s"Some classfiles weren't written to disk: $q3") - - } - //} // end of class BCodePhase -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala deleted file mode 100644 index 210e47566cb9..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenBCodeOps.scala +++ /dev/null @@ -1,16 +0,0 @@ -package dotty.tools -package backend -package jvm - -import scala.tools.asm - -object GenBCodeOps extends GenBCodeOps - -class GenBCodeOps { - extension (flags: Int) - def addFlagIf(cond: Boolean, flag: Int): Int = if cond then flags | flag else flags - - final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC - final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL - final val PrivateStaticFinal = asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala b/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala deleted file mode 100644 index e9e532933290..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/GenericSignatureVisitor.scala +++ /dev/null @@ -1,326 +0,0 @@ -package dotty.tools.backend.jvm - -import scala.language.unsafeNulls - -import scala.tools.asm.{ClassReader, Type, Handle } -import scala.tools.asm.tree._ - -import scala.collection.mutable -import scala.util.control.{NoStackTrace, NonFatal} -import scala.annotation._ -import scala.jdk.CollectionConverters._ - -// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 -abstract class GenericSignatureVisitor(nestedOnly: Boolean) { - // For performance (`Char => Boolean` is not specialized) - private trait CharBooleanFunction { def apply(c: Char): Boolean } - - final def visitInternalName(internalName: String): Unit = visitInternalName(internalName, 0, if (internalName eq null) 0 else internalName.length) - def visitInternalName(internalName: String, offset: Int, length: Int): Unit - - def raiseError(msg: String, sig: String, e: Option[Throwable] = None): Unit - - def visitClassSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.classSignature() } - } - - def visitMethodSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.methodSignature() } - } - - def visitFieldSignature(sig: String): Unit = if (sig != null) { - val p = new Parser(sig, nestedOnly) - p.safely { p.fieldSignature() } - } - - private final class Parser(sig: String, nestedOnly: Boolean) { - - private var index = 0 - private val end = sig.length - - private val Aborted: Throwable = new NoStackTrace { } - private def abort(): Nothing = throw Aborted - - @inline def safely(f: => Unit): Unit = try f catch { - case Aborted => - case NonFatal(e) => raiseError(s"Exception thrown during signature parsing", sig, Some(e)) - } - - private def current = { - if (index >= end) { - raiseError(s"Out of bounds, $index >= $end", sig) - abort() // Don't continue, even if `notifyInvalidSignature` returns - } - sig.charAt(index) - } - - private def accept(c: Char): Unit = { - if (current != c) { - raiseError(s"Expected $c at $index, found $current", sig) - abort() - } - index += 1 - } - - private def skip(): Unit = { index += 1 } - private def getCurrentAndSkip(): Char = { val c = current; skip(); c } - - private def skipUntil(isDelimiter: CharBooleanFunction): Unit = { - while (!isDelimiter(current)) { index += 1 } - } - private def skipUntilDelimiter(delimiter: Char): Unit = { - sig.indexOf(delimiter, index) match { - case -1 => - raiseError(s"Out of bounds", sig) - abort() // Don't continue, even if `notifyInvalidSignature` returns - case i => - index = i - } - } - - private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = { - val start = index - skipUntil(isDelimiter) - builder.append(sig, start, index) - } - - def isBaseType(c: Char): Boolean = c match { - case 'B' | 'C' | 'D' | 'F' | 'I' | 'J' | 'S' | 'Z' => true - case _ => false - } - - private val isClassNameEnd: CharBooleanFunction = (c: Char) => c == '<' || c == '.' || c == ';' - - private def typeArguments(): Unit = if (current == '<') { - skip() - while (current != '>') current match { - case '*' | '+' | '-' => - skip() - case _ => - referenceTypeSignature() - } - accept('>') - } - - @tailrec private def referenceTypeSignature(): Unit = getCurrentAndSkip() match { - case 'L' => - var names: java.lang.StringBuilder = null - - val start = index - var seenDollar = false - while (!isClassNameEnd(current)) { - seenDollar ||= current == '$' - index += 1 - } - if ((current == '.' || seenDollar) || !nestedOnly) { - // OPT: avoid allocations when only a top-level class is encountered - names = new java.lang.StringBuilder(32) - names.append(sig, start, index) - visitInternalName(names.toString) - } - typeArguments() - - while (current == '.') { - skip() - names.append('$') - appendUntil(names, isClassNameEnd) - visitInternalName(names.toString) - typeArguments() - } - accept(';') - - case 'T' => - skipUntilDelimiter(';') - skip() - - case '[' => - if (isBaseType(current)) skip() - else referenceTypeSignature() - } - - private def typeParameters(): Unit = if (current == '<') { - skip() - while (current != '>') { - skipUntilDelimiter(':'); skip() - val c = current - // The ClassBound can be missing, but only if there's an InterfaceBound after. - // This is an assumption that's not in the spec, see https://stackoverflow.com/q/44284928 - if (c != ':' && c != '>') { referenceTypeSignature() } - while (current == ':') { skip(); referenceTypeSignature() } - } - accept('>') - } - - def classSignature(): Unit = { - typeParameters() - while (index < end) referenceTypeSignature() - } - - def methodSignature(): Unit = { - typeParameters() - - accept('(') - while (current != ')') { - if (isBaseType(current)) skip() - else referenceTypeSignature() - } - accept(')') - - if (current == 'V' || isBaseType(current)) skip() - else referenceTypeSignature() - - while (index < end) { - accept('^') - referenceTypeSignature() - } - } - - def fieldSignature(): Unit = if (sig != null) safely { - referenceTypeSignature() - } - } -} - -// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf -// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 -abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { - type InternalName = String - - def declaredNestedClasses(internalName: InternalName): List[T] - def getClassIfNested(internalName: InternalName): Option[T] - - val declaredInnerClasses = mutable.Set.empty[T] - val referredInnerClasses = mutable.Set.empty[T] - - def innerClasses: collection.Set[T] = declaredInnerClasses ++ referredInnerClasses - def clear(): Unit = { - declaredInnerClasses.clear() - referredInnerClasses.clear() - } - - def visit(classNode: ClassNode): Unit = { - visitInternalName(classNode.name) - declaredInnerClasses ++= declaredNestedClasses(classNode.name) - - visitInternalName(classNode.superName) - classNode.interfaces.asScala foreach visitInternalName - visitInternalName(classNode.outerClass) - - visitAnnotations(classNode.visibleAnnotations) - visitAnnotations(classNode.visibleTypeAnnotations) - visitAnnotations(classNode.invisibleAnnotations) - visitAnnotations(classNode.invisibleTypeAnnotations) - - visitClassSignature(classNode.signature) - - for (f <- classNode.fields.asScala) { - visitDescriptor(f.desc) - visitAnnotations(f.visibleAnnotations) - visitAnnotations(f.visibleTypeAnnotations) - visitAnnotations(f.invisibleAnnotations) - visitAnnotations(f.invisibleTypeAnnotations) - visitFieldSignature(f.signature) - } - - for (m <- classNode.methods.asScala) { - visitDescriptor(m.desc) - - visitAnnotations(m.visibleAnnotations) - visitAnnotations(m.visibleTypeAnnotations) - visitAnnotations(m.invisibleAnnotations) - visitAnnotations(m.invisibleTypeAnnotations) - visitAnnotationss(m.visibleParameterAnnotations) - visitAnnotationss(m.invisibleParameterAnnotations) - visitAnnotations(m.visibleLocalVariableAnnotations) - visitAnnotations(m.invisibleLocalVariableAnnotations) - - m.exceptions.asScala foreach visitInternalName - for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`) - - val iter = m.instructions.iterator - while (iter.hasNext) iter.next() match { - case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc) - case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc) - case mi: MethodInsnNode => visitInternalNameOrArrayReference(mi.owner); visitDescriptor(mi.desc) - case id: InvokeDynamicInsnNode => visitDescriptor(id.desc); visitHandle(id.bsm); id.bsmArgs foreach visitConstant - case ci: LdcInsnNode => visitConstant(ci.cst) - case ma: MultiANewArrayInsnNode => visitDescriptor(ma.desc) - case _ => - } - - visitMethodSignature(m.signature) - } - } - - private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = { - val ix = s.indexOf(char, offset) - !(ix == -1 || ix >= offset + length) - } - - def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) { - for (c <- getClassIfNested(internalName.substring(offset, length))) - if (!declaredInnerClasses.contains(c)) - referredInnerClasses += c - } - - // either an internal/Name or [[Linternal/Name; -- there are certain references in classfiles - // that are either an internal name (without the surrounding `L;`) or an array descriptor - // `[Linternal/Name;`. - def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) { - val bracket = ref.lastIndexOf('[') - if (bracket == -1) visitInternalName(ref) - else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref, bracket + 2, ref.length - 1) - } - - // we are only interested in the class references in the descriptor, so we can skip over - // primitives and the brackets of array descriptors - def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match { - case '(' => - var i = 1 - while (i < desc.length) { - if (desc.charAt(i) == 'L') { - val start = i + 1 // skip the L - var seenDollar = false - while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 - if (seenDollar) - visitInternalName(desc, start, i) - } - // skips over '[', ')', primitives - i += 1 - } - - case 'L' => - visitInternalName(desc, 1, desc.length - 1) - - case '[' => - visitInternalNameOrArrayReference(desc) - - case _ => // skip over primitive types - } - - def visitConstant(const: AnyRef): Unit = const match { - case t: Type => visitDescriptor(t.getDescriptor) - case _ => - } - - // in principle we could references to annotation types, as they only end up as strings in the - // constant pool, not as class references. however, the java compiler still includes nested - // annotation classes in the innerClass table, so we do the same. explained in detail in the - // large comment in class BTypes. - def visitAnnotation(annot: AnnotationNode): Unit = { - visitDescriptor(annot.desc) - if (annot.values != null) annot.values.asScala foreach visitConstant - } - - def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation - def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations - - def visitHandle(handle: Handle): Unit = { - visitInternalNameOrArrayReference(handle.getOwner) - visitDescriptor(handle.getDesc) - } -} - diff --git a/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java b/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java deleted file mode 100644 index cf91fe619f5d..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/LabelNode1.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.Label; -import scala.tools.asm.tree.ClassNode; -import scala.tools.asm.tree.LabelNode; - -/** - * A subclass of {@link LabelNode} to add user-definable flags. - */ -public class LabelNode1 extends LabelNode { - public LabelNode1() { - } - - public LabelNode1(Label label) { - super(label); - } - - public int flags; -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java b/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java deleted file mode 100644 index bfa4401830ba..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/MethodNode1.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package dotty.tools.backend.jvm; - -import scala.tools.asm.Label; -import scala.tools.asm.Opcodes; -import scala.tools.asm.tree.LabelNode; -import scala.tools.asm.tree.MethodNode; -/** - * A subclass of {@link MethodNode} to customize the representation of - * label nodes with {@link LabelNode1}. - */ -public class MethodNode1 extends MethodNode { - public MethodNode1(int api, int access, String name, String descriptor, String signature, String[] exceptions) { - super(api, access, name, descriptor, signature, exceptions); - } - - public MethodNode1(int access, String name, String descriptor, String signature, String[] exceptions) { - this(Opcodes.ASM6, access, name, descriptor, signature, exceptions); - } - - public MethodNode1(int api) { - super(api); - } - - public MethodNode1() { - this(Opcodes.ASM6); - } - - @Override - protected LabelNode getLabelNode(Label label) { - if (!(label.info instanceof LabelNode)) { - label.info = new LabelNode1(label); - } - return (LabelNode) label.info; - } -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala b/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala deleted file mode 100644 index c9ddfeab24e1..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/Primitives.scala +++ /dev/null @@ -1,191 +0,0 @@ -package dotty.tools -package backend -package jvm - -import java.io.PrintWriter - -object Primitives { - /** This class represents a primitive operation. */ - class Primitive { - } - - /** This class represents a test operation. */ - sealed abstract class TestOp { - - /** Returns the negation of this operation. */ - def negate(): TestOp - - /** Returns a string representation of this operation. */ - override def toString(): String - - /** used only from GenASM */ - def opcodeIF(): Int - - /** used only from GenASM */ - def opcodeIFICMP(): Int - - } - - /** An equality test */ - case object EQ extends TestOp { - def negate() = NE - override def toString() = "EQ" - override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ - } - - /** A non-equality test */ - case object NE extends TestOp { - def negate() = EQ - override def toString() = "NE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFNE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE - } - - /** A less-than test */ - case object LT extends TestOp { - def negate() = GE - override def toString() = "LT" - override def opcodeIF() = scala.tools.asm.Opcodes.IFLT - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT - } - - /** A greater-than-or-equal test */ - case object GE extends TestOp { - def negate() = LT - override def toString() = "GE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFGE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE - } - - /** A less-than-or-equal test */ - case object LE extends TestOp { - def negate() = GT - override def toString() = "LE" - override def opcodeIF() = scala.tools.asm.Opcodes.IFLE - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE - } - - /** A greater-than test */ - case object GT extends TestOp { - def negate() = LE - override def toString() = "GT" - override def opcodeIF() = scala.tools.asm.Opcodes.IFGT - override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT - } - - /** This class represents an arithmetic operation. */ - class ArithmeticOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case ADD => "ADD" - case SUB => "SUB" - case MUL => "MUL" - case DIV => "DIV" - case REM => "REM" - case NOT => "NOT" - case _ => throw new RuntimeException("ArithmeticOp unknown case") - } - } - - /** An arithmetic addition operation */ - case object ADD extends ArithmeticOp - - /** An arithmetic subtraction operation */ - case object SUB extends ArithmeticOp - - /** An arithmetic multiplication operation */ - case object MUL extends ArithmeticOp - - /** An arithmetic division operation */ - case object DIV extends ArithmeticOp - - /** An arithmetic remainder operation */ - case object REM extends ArithmeticOp - - /** Bitwise negation. */ - case object NOT extends ArithmeticOp - - /** This class represents a shift operation. */ - class ShiftOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case LSL => "LSL" - case ASR => "ASR" - case LSR => "LSR" - case _ => throw new RuntimeException("ShitOp unknown case") - } - } - - /** A logical shift to the left */ - case object LSL extends ShiftOp - - /** An arithmetic shift to the right */ - case object ASR extends ShiftOp - - /** A logical shift to the right */ - case object LSR extends ShiftOp - - /** This class represents a logical operation. */ - class LogicalOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case AND => "AND" - case OR => "OR" - case XOR => "XOR" - case _ => throw new RuntimeException("LogicalOp unknown case") - } - } - - /** A bitwise AND operation */ - case object AND extends LogicalOp - - /** A bitwise OR operation */ - case object OR extends LogicalOp - - /** A bitwise XOR operation */ - case object XOR extends LogicalOp - - /** Signals the beginning of a series of concatenations. - * On the JVM platform, it should create a new StringBuffer - */ - case object StartConcat extends Primitive - - /** - * type: (buf) => STR - * jvm : It should turn the StringBuffer into a String. - */ - case object EndConcat extends Primitive - - /** Pretty printer for primitives */ - class PrimitivePrinter(out: PrintWriter) { - def print(s: String): PrimitivePrinter = { - out.print(s) - this - } - } - - /** This class represents a comparison operation. */ - class ComparisonOp { - - /** Returns a string representation of this operation. */ - override def toString(): String = this match { - case CMPL => "CMPL" - case CMP => "CMP" - case CMPG => "CMPG" - case _ => throw new RuntimeException("ComparisonOp unknown case") - } - } - - /** A comparison operation with -1 default for NaNs */ - case object CMPL extends ComparisonOp - - /** A comparison operation with no default for NaNs */ - case object CMP extends ComparisonOp - - /** A comparison operation with +1 default for NaNs */ - case object CMPG extends ComparisonOp -} diff --git a/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala b/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala deleted file mode 100644 index 420ff7b20423..000000000000 --- a/tests/pos-with-compiler-cc/backend/jvm/scalaPrimitives.scala +++ /dev/null @@ -1,412 +0,0 @@ -package dotty.tools -package backend.jvm - -import dotc.ast.Trees.Select -import dotc.ast.tpd._ -import dotc.core._ -import Contexts._ -import Names.TermName, StdNames._ -import Types.{JavaArrayType, UnspecifiedErrorType, Type} -import Symbols.{Symbol, NoSymbol} -import Decorators.em -import dotc.report -import dotc.util.ReadOnlyMap - -import scala.annotation.threadUnsafe - -/** Scala primitive operations are represented as methods in `Any` and - * `AnyVal` subclasses. Here we demultiplex them by providing a mapping - * from their symbols to integers. Different methods exist for - * different value types, but with the same meaning (like plus, minus, - * etc.). They will all be mapped to the same int. - * - * Note: The three equal methods have the following semantics: - * - `"=="` checks for `null`, and if non-null, calls - * `java.lang.Object.equals` - * `(class: Any; modifier: final)`. Primitive: `EQ` - * - `"eq"` usual reference comparison - * `(class: AnyRef; modifier: final)`. Primitive: `ID` - * - `"equals"` user-defined equality (Java semantics) - * `(class: Object; modifier: none)`. Primitive: `EQUALS` - * - * Inspired from the `scalac` compiler. - */ -class DottyPrimitives(ictx: DetachedContext) { - import dotty.tools.backend.ScalaPrimitivesOps._ - - @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init - - /** Return the code for the given symbol. */ - def getPrimitive(sym: Symbol): Int = { - primitives(sym) - } - - /** - * Return the primitive code of the given operation. If the - * operation is an array get/set, we inspect the type of the receiver - * to demux the operation. - * - * @param fun The method symbol - * @param tpe The type of the receiver object. It is used only for array - * operations - */ - def getPrimitive(app: Apply, tpe: Type)(using Context): Int = { - val fun = app.fun.symbol - val defn = ctx.definitions - val code = app.fun match { - case Select(_, nme.primitive.arrayLength) => - LENGTH - case Select(_, nme.primitive.arrayUpdate) => - UPDATE - case Select(_, nme.primitive.arrayApply) => - APPLY - case _ => getPrimitive(fun) - } - - def elementType: Type = tpe.widenDealias match { - case defn.ArrayOf(el) => el - case JavaArrayType(el) => el - case _ => - report.error(em"expected Array $tpe") - UnspecifiedErrorType - } - - code match { - - case APPLY => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_GET - case tpnme.Byte => BARRAY_GET - case tpnme.Short => SARRAY_GET - case tpnme.Char => CARRAY_GET - case tpnme.Int => IARRAY_GET - case tpnme.Long => LARRAY_GET - case tpnme.Float => FARRAY_GET - case tpnme.Double => DARRAY_GET - case _ => OARRAY_GET - } - - case UPDATE => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_SET - case tpnme.Byte => BARRAY_SET - case tpnme.Short => SARRAY_SET - case tpnme.Char => CARRAY_SET - case tpnme.Int => IARRAY_SET - case tpnme.Long => LARRAY_SET - case tpnme.Float => FARRAY_SET - case tpnme.Double => DARRAY_SET - case _ => OARRAY_SET - } - - case LENGTH => - defn.scalaClassName(elementType) match { - case tpnme.Boolean => ZARRAY_LENGTH - case tpnme.Byte => BARRAY_LENGTH - case tpnme.Short => SARRAY_LENGTH - case tpnme.Char => CARRAY_LENGTH - case tpnme.Int => IARRAY_LENGTH - case tpnme.Long => LARRAY_LENGTH - case tpnme.Float => FARRAY_LENGTH - case tpnme.Double => DARRAY_LENGTH - case _ => OARRAY_LENGTH - } - - case _ => - code - } - } - - /** Initialize the primitive map */ - private def init: ReadOnlyMap[Symbol, Int] = { - - given Context = ictx - - import Symbols.defn - val primitives = Symbols.MutableSymbolMap[Int](512) - - /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { - assert(!(primitives contains s), "Duplicate primitive " + s) - primitives(s) = code - } - - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { - val alts = cls.info.member(method).alternatives.map(_.symbol) - if (alts.isEmpty) - report.error(em"Unknown primitive method $cls.$method") - else alts foreach (s => - addPrimitive(s, - s.info.paramInfoss match { - case List(tp :: _) if code == ADD && tp =:= ctx.definitions.StringType => CONCAT - case _ => code - } - ) - ) - } - - // scala.Any - addPrimitive(defn.Any_==, EQ) - addPrimitive(defn.Any_!=, NE) - addPrimitive(defn.Any_isInstanceOf, IS) - addPrimitive(defn.Any_asInstanceOf, AS) - addPrimitive(defn.Any_##, HASH) - - // java.lang.Object - addPrimitive(defn.Object_eq, ID) - addPrimitive(defn.Object_ne, NI) - /* addPrimitive(defn.Any_==, EQ) - addPrimitive(defn.Any_!=, NE)*/ - addPrimitive(defn.Object_synchronized, SYNCHRONIZED) - /*addPrimitive(defn.Any_isInstanceOf, IS) - addPrimitive(defn.Any_asInstanceOf, AS)*/ - - // java.lang.String - addPrimitive(defn.String_+, CONCAT) - - // scala.Array - lazy val ArrayClass = defn.ArrayClass - addPrimitives(ArrayClass, nme.length, LENGTH) - addPrimitives(ArrayClass, nme.apply, APPLY) - addPrimitives(ArrayClass, nme.update, UPDATE) - - // scala.Boolean - lazy val BooleanClass = defn.BooleanClass - addPrimitives(BooleanClass, nme.EQ, EQ) - addPrimitives(BooleanClass, nme.NE, NE) - addPrimitives(BooleanClass, nme.UNARY_!, ZNOT) - addPrimitives(BooleanClass, nme.ZOR, ZOR) - addPrimitives(BooleanClass, nme.ZAND, ZAND) - addPrimitives(BooleanClass, nme.OR, OR) - addPrimitives(BooleanClass, nme.AND, AND) - addPrimitives(BooleanClass, nme.XOR, XOR) - - // scala.Byte - lazy val ByteClass = defn.ByteClass - addPrimitives(ByteClass, nme.EQ, EQ) - addPrimitives(ByteClass, nme.NE, NE) - addPrimitives(ByteClass, nme.ADD, ADD) - addPrimitives(ByteClass, nme.SUB, SUB) - addPrimitives(ByteClass, nme.MUL, MUL) - addPrimitives(ByteClass, nme.DIV, DIV) - addPrimitives(ByteClass, nme.MOD, MOD) - addPrimitives(ByteClass, nme.LT, LT) - addPrimitives(ByteClass, nme.LE, LE) - addPrimitives(ByteClass, nme.GT, GT) - addPrimitives(ByteClass, nme.GE, GE) - addPrimitives(ByteClass, nme.XOR, XOR) - addPrimitives(ByteClass, nme.OR, OR) - addPrimitives(ByteClass, nme.AND, AND) - addPrimitives(ByteClass, nme.LSL, LSL) - addPrimitives(ByteClass, nme.LSR, LSR) - addPrimitives(ByteClass, nme.ASR, ASR) - // conversions - addPrimitives(ByteClass, nme.toByte, B2B) - addPrimitives(ByteClass, nme.toShort, B2S) - addPrimitives(ByteClass, nme.toChar, B2C) - addPrimitives(ByteClass, nme.toInt, B2I) - addPrimitives(ByteClass, nme.toLong, B2L) - // unary methods - addPrimitives(ByteClass, nme.UNARY_+, POS) - addPrimitives(ByteClass, nme.UNARY_-, NEG) - addPrimitives(ByteClass, nme.UNARY_~, NOT) - - addPrimitives(ByteClass, nme.toFloat, B2F) - addPrimitives(ByteClass, nme.toDouble, B2D) - - // scala.Short - lazy val ShortClass = defn.ShortClass - addPrimitives(ShortClass, nme.EQ, EQ) - addPrimitives(ShortClass, nme.NE, NE) - addPrimitives(ShortClass, nme.ADD, ADD) - addPrimitives(ShortClass, nme.SUB, SUB) - addPrimitives(ShortClass, nme.MUL, MUL) - addPrimitives(ShortClass, nme.DIV, DIV) - addPrimitives(ShortClass, nme.MOD, MOD) - addPrimitives(ShortClass, nme.LT, LT) - addPrimitives(ShortClass, nme.LE, LE) - addPrimitives(ShortClass, nme.GT, GT) - addPrimitives(ShortClass, nme.GE, GE) - addPrimitives(ShortClass, nme.XOR, XOR) - addPrimitives(ShortClass, nme.OR, OR) - addPrimitives(ShortClass, nme.AND, AND) - addPrimitives(ShortClass, nme.LSL, LSL) - addPrimitives(ShortClass, nme.LSR, LSR) - addPrimitives(ShortClass, nme.ASR, ASR) - // conversions - addPrimitives(ShortClass, nme.toByte, S2B) - addPrimitives(ShortClass, nme.toShort, S2S) - addPrimitives(ShortClass, nme.toChar, S2C) - addPrimitives(ShortClass, nme.toInt, S2I) - addPrimitives(ShortClass, nme.toLong, S2L) - // unary methods - addPrimitives(ShortClass, nme.UNARY_+, POS) - addPrimitives(ShortClass, nme.UNARY_-, NEG) - addPrimitives(ShortClass, nme.UNARY_~, NOT) - - addPrimitives(ShortClass, nme.toFloat, S2F) - addPrimitives(ShortClass, nme.toDouble, S2D) - - // scala.Char - lazy val CharClass = defn.CharClass - addPrimitives(CharClass, nme.EQ, EQ) - addPrimitives(CharClass, nme.NE, NE) - addPrimitives(CharClass, nme.ADD, ADD) - addPrimitives(CharClass, nme.SUB, SUB) - addPrimitives(CharClass, nme.MUL, MUL) - addPrimitives(CharClass, nme.DIV, DIV) - addPrimitives(CharClass, nme.MOD, MOD) - addPrimitives(CharClass, nme.LT, LT) - addPrimitives(CharClass, nme.LE, LE) - addPrimitives(CharClass, nme.GT, GT) - addPrimitives(CharClass, nme.GE, GE) - addPrimitives(CharClass, nme.XOR, XOR) - addPrimitives(CharClass, nme.OR, OR) - addPrimitives(CharClass, nme.AND, AND) - addPrimitives(CharClass, nme.LSL, LSL) - addPrimitives(CharClass, nme.LSR, LSR) - addPrimitives(CharClass, nme.ASR, ASR) - // conversions - addPrimitives(CharClass, nme.toByte, C2B) - addPrimitives(CharClass, nme.toShort, C2S) - addPrimitives(CharClass, nme.toChar, C2C) - addPrimitives(CharClass, nme.toInt, C2I) - addPrimitives(CharClass, nme.toLong, C2L) - // unary methods - addPrimitives(CharClass, nme.UNARY_+, POS) - addPrimitives(CharClass, nme.UNARY_-, NEG) - addPrimitives(CharClass, nme.UNARY_~, NOT) - addPrimitives(CharClass, nme.toFloat, C2F) - addPrimitives(CharClass, nme.toDouble, C2D) - - // scala.Int - lazy val IntClass = defn.IntClass - addPrimitives(IntClass, nme.EQ, EQ) - addPrimitives(IntClass, nme.NE, NE) - addPrimitives(IntClass, nme.ADD, ADD) - addPrimitives(IntClass, nme.SUB, SUB) - addPrimitives(IntClass, nme.MUL, MUL) - addPrimitives(IntClass, nme.DIV, DIV) - addPrimitives(IntClass, nme.MOD, MOD) - addPrimitives(IntClass, nme.LT, LT) - addPrimitives(IntClass, nme.LE, LE) - addPrimitives(IntClass, nme.GT, GT) - addPrimitives(IntClass, nme.GE, GE) - addPrimitives(IntClass, nme.XOR, XOR) - addPrimitives(IntClass, nme.OR, OR) - addPrimitives(IntClass, nme.AND, AND) - addPrimitives(IntClass, nme.LSL, LSL) - addPrimitives(IntClass, nme.LSR, LSR) - addPrimitives(IntClass, nme.ASR, ASR) - // conversions - addPrimitives(IntClass, nme.toByte, I2B) - addPrimitives(IntClass, nme.toShort, I2S) - addPrimitives(IntClass, nme.toChar, I2C) - addPrimitives(IntClass, nme.toInt, I2I) - addPrimitives(IntClass, nme.toLong, I2L) - // unary methods - addPrimitives(IntClass, nme.UNARY_+, POS) - addPrimitives(IntClass, nme.UNARY_-, NEG) - addPrimitives(IntClass, nme.UNARY_~, NOT) - addPrimitives(IntClass, nme.toFloat, I2F) - addPrimitives(IntClass, nme.toDouble, I2D) - - // scala.Long - lazy val LongClass = defn.LongClass - addPrimitives(LongClass, nme.EQ, EQ) - addPrimitives(LongClass, nme.NE, NE) - addPrimitives(LongClass, nme.ADD, ADD) - addPrimitives(LongClass, nme.SUB, SUB) - addPrimitives(LongClass, nme.MUL, MUL) - addPrimitives(LongClass, nme.DIV, DIV) - addPrimitives(LongClass, nme.MOD, MOD) - addPrimitives(LongClass, nme.LT, LT) - addPrimitives(LongClass, nme.LE, LE) - addPrimitives(LongClass, nme.GT, GT) - addPrimitives(LongClass, nme.GE, GE) - addPrimitives(LongClass, nme.XOR, XOR) - addPrimitives(LongClass, nme.OR, OR) - addPrimitives(LongClass, nme.AND, AND) - addPrimitives(LongClass, nme.LSL, LSL) - addPrimitives(LongClass, nme.LSR, LSR) - addPrimitives(LongClass, nme.ASR, ASR) - // conversions - addPrimitives(LongClass, nme.toByte, L2B) - addPrimitives(LongClass, nme.toShort, L2S) - addPrimitives(LongClass, nme.toChar, L2C) - addPrimitives(LongClass, nme.toInt, L2I) - addPrimitives(LongClass, nme.toLong, L2L) - // unary methods - addPrimitives(LongClass, nme.UNARY_+, POS) - addPrimitives(LongClass, nme.UNARY_-, NEG) - addPrimitives(LongClass, nme.UNARY_~, NOT) - addPrimitives(LongClass, nme.toFloat, L2F) - addPrimitives(LongClass, nme.toDouble, L2D) - - // scala.Float - lazy val FloatClass = defn.FloatClass - addPrimitives(FloatClass, nme.EQ, EQ) - addPrimitives(FloatClass, nme.NE, NE) - addPrimitives(FloatClass, nme.ADD, ADD) - addPrimitives(FloatClass, nme.SUB, SUB) - addPrimitives(FloatClass, nme.MUL, MUL) - addPrimitives(FloatClass, nme.DIV, DIV) - addPrimitives(FloatClass, nme.MOD, MOD) - addPrimitives(FloatClass, nme.LT, LT) - addPrimitives(FloatClass, nme.LE, LE) - addPrimitives(FloatClass, nme.GT, GT) - addPrimitives(FloatClass, nme.GE, GE) - // conversions - addPrimitives(FloatClass, nme.toByte, F2B) - addPrimitives(FloatClass, nme.toShort, F2S) - addPrimitives(FloatClass, nme.toChar, F2C) - addPrimitives(FloatClass, nme.toInt, F2I) - addPrimitives(FloatClass, nme.toLong, F2L) - addPrimitives(FloatClass, nme.toFloat, F2F) - addPrimitives(FloatClass, nme.toDouble, F2D) - // unary methods - addPrimitives(FloatClass, nme.UNARY_+, POS) - addPrimitives(FloatClass, nme.UNARY_-, NEG) - - // scala.Double - lazy val DoubleClass = defn.DoubleClass - addPrimitives(DoubleClass, nme.EQ, EQ) - addPrimitives(DoubleClass, nme.NE, NE) - addPrimitives(DoubleClass, nme.ADD, ADD) - addPrimitives(DoubleClass, nme.SUB, SUB) - addPrimitives(DoubleClass, nme.MUL, MUL) - addPrimitives(DoubleClass, nme.DIV, DIV) - addPrimitives(DoubleClass, nme.MOD, MOD) - addPrimitives(DoubleClass, nme.LT, LT) - addPrimitives(DoubleClass, nme.LE, LE) - addPrimitives(DoubleClass, nme.GT, GT) - addPrimitives(DoubleClass, nme.GE, GE) - // conversions - addPrimitives(DoubleClass, nme.toByte, D2B) - addPrimitives(DoubleClass, nme.toShort, D2S) - addPrimitives(DoubleClass, nme.toChar, D2C) - addPrimitives(DoubleClass, nme.toInt, D2I) - addPrimitives(DoubleClass, nme.toLong, D2L) - addPrimitives(DoubleClass, nme.toFloat, D2F) - addPrimitives(DoubleClass, nme.toDouble, D2D) - // unary methods - addPrimitives(DoubleClass, nme.UNARY_+, POS) - addPrimitives(DoubleClass, nme.UNARY_-, NEG) - - - primitives - } - - def isPrimitive(sym: Symbol): Boolean = - primitives.contains(sym) - - def isPrimitive(fun: Tree): Boolean = - given Context = ictx - primitives.contains(fun.symbol) - || (fun.symbol == NoSymbol // the only trees that do not have a symbol assigned are array.{update,select,length,clone}} - && { - fun match - case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op. - case _ => true - }) -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala b/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala deleted file mode 100644 index 1579b4577933..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/GenSJSIR.scala +++ /dev/null @@ -1,23 +0,0 @@ -package dotty.tools.backend.sjs - -import dotty.tools.dotc.core._ -import Contexts._ -import Phases._ - -/** Generates Scala.js IR files for the compilation unit. */ -class GenSJSIR extends Phase { - - override def phaseName: String = GenSJSIR.name - - override def description: String = GenSJSIR.description - - override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value - - def run(using Context): Unit = - new JSCodeGen().run() -} - -object GenSJSIR: - val name: String = "genSJSIR" - val description: String = "generate .sjsir files for Scala.js" diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala deleted file mode 100644 index c670b2de97b1..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala +++ /dev/null @@ -1,4897 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.switch -import scala.collection.mutable - -import dotty.tools.FatalError -import dotty.tools.dotc.CompilationUnit -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators._ -import Flags._ -import Names._ -import NameKinds.DefaultGetterName -import Types._ -import Symbols._ -import Phases._ -import StdNames._ -import TypeErasure.ErasedValueType - -import dotty.tools.dotc.transform.{Erasure, ValueClasses} -import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.SourcePosition -import dotty.tools.dotc.report - -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints - -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import JSEncoding._ -import ScopedVar.withScopedVars -import annotation.retains - -/** Main codegen for Scala.js IR. - * - * [[GenSJSIR]] creates one instance of `JSCodeGen` per compilation unit. - * The `run()` method processes the whole compilation unit and generates - * `.sjsir` files for it. - * - * There are 4 main levels of translation: - * - * - `genCompilationUnit()` iterates through all the type definitions in the - * compilation unit. Each generated `js.ClassDef` is serialized to an - * `.sjsir` file. - * - `genScalaClass()` and other similar methods generate the skeleton of - * classes. - * - `genMethod()` and similar methods generate the declarations of methods. - * - `genStatOrExpr()` and everything else generate the bodies of methods. - */ -class JSCodeGen()(using genCtx: DetachedContext) { - import JSCodeGen._ - import tpd._ - - val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform - val jsdefn = JSDefinitions.jsdefn - private val primitives = new JSPrimitives(genCtx) - - val positionConversions = new JSPositions()(using genCtx) - import positionConversions._ - - private val jsExportsGen = new JSExportsGen(this) - - // Some state -------------------------------------------------------------- - - private val lazilyGeneratedAnonClasses = new MutableSymbolMap[TypeDef] - private val generatedClasses = mutable.ListBuffer.empty[js.ClassDef] - private val generatedStaticForwarderClasses = mutable.ListBuffer.empty[(Symbol, js.ClassDef)] - - val currentClassSym: ScopedVar[Symbol] = new ScopedVar[Symbol] - private val currentMethodSym = new ScopedVar[Symbol] - private val localNames = new ScopedVar[LocalNameGenerator] - private val thisLocalVarIdent = new ScopedVar[Option[js.LocalIdent]] - private val isModuleInitialized = new ScopedVar[ScopedVar.VarBox[Boolean]] - private val undefinedDefaultParams = new ScopedVar[mutable.Set[Symbol]] - - /* Contextual JS class value for some operations of nested JS classes that need one. */ - private val contextualJSClassValue = new ScopedVar[Option[js.Tree]](None) - - /** Resets all of the scoped state in the context of `body`. */ - private def resetAllScopedVars[T](body: => T): T = { - withScopedVars( - currentClassSym := null, - currentMethodSym := null, - localNames := null, - thisLocalVarIdent := null, - isModuleInitialized := null, - undefinedDefaultParams := null - ) { - body - } - } - - private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = { - withScopedVars( - currentMethodSym := methodSym, - thisLocalVarIdent := None, - isModuleInitialized := new ScopedVar.VarBox(false), - undefinedDefaultParams := mutable.Set.empty, - ) { - body - } - } - - private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = { - val jsClassValue = contextualJSClassValue.get - withScopedVars( - contextualJSClassValue := None - ) { - f(jsClassValue) - } - } - - def withNewLocalNameScope[A](body: => A): A = { - withScopedVars(localNames := new LocalNameGenerator) { - body - } - } - - /** Implicitly materializes the current local name generator. */ - implicit def implicitLocalNames: LocalNameGenerator = localNames.get - - def currentThisType: jstpe.Type = { - encodeClassType(currentClassSym) match { - case tpe @ jstpe.ClassType(cls) => - jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) - case tpe => - tpe - } - } - - /** Returns a new fresh local identifier. */ - private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent() - - /** Returns a new fresh local identifier. */ - def freshLocalIdent(base: String)(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent(base) - - /** Returns a new fresh local identifier. */ - private def freshLocalIdent(base: TermName)(implicit pos: Position): js.LocalIdent = - localNames.get.freshLocalIdent(base) - - private def consumeLazilyGeneratedAnonClass(sym: Symbol): TypeDef = { - val typeDef = lazilyGeneratedAnonClasses.remove(sym) - if (typeDef == null) { - throw new FatalError( - i"Could not find tree for lazily generated anonymous class ${sym.fullName} at ${sym.sourcePos}") - } else { - typeDef - } - } - - // Compilation unit -------------------------------------------------------- - - def run(): Unit = { - try { - genCompilationUnit(ctx.compilationUnit) - } finally { - generatedClasses.clear() - generatedStaticForwarderClasses.clear() - } - } - - /** Generates the Scala.js IR for a compilation unit - * This method iterates over all the class and interface definitions - * found in the compilation unit and emits their IR (.sjsir). - * - * Some classes are never actually emitted: - * - Classes representing primitive types - * - The scala.Array class - * - * TODO Some classes representing anonymous functions are not actually emitted. - * Instead, a temporary representation of their `apply` method is built - * and recorded, so that it can be inlined as a JavaScript anonymous - * function in the method that instantiates it. - * - * Other ClassDefs are emitted according to their nature: - * * Non-native JS class -> `genNonNativeJSClass()` - * * Other JS type (<: js.Any) -> `genRawJSClassData()` - * * Interface -> `genInterface()` - * * Normal class -> `genClass()` - */ - private def genCompilationUnit(cunit: CompilationUnit): Unit = { - def collectTypeDefs(tree: Tree): List[TypeDef] = { - tree match { - case EmptyTree => Nil - case PackageDef(_, stats) => stats.flatMap(collectTypeDefs) - case cd: TypeDef => cd :: Nil - case _: ValDef => Nil // module instance - } - } - val allTypeDefs = collectTypeDefs(cunit.tpdTree) - - /* #13221 Set JavaStatic on all the Module fields of static module classes. - * This is necessary for `desugarIdent` not to crash in some obscure - * scenarios. - * - * !!! Part of this logic is duplicated in BCodeSkelBuilder.genPlainClass - * - * However, here we only do this for Module fields, not all fields. - */ - for (typeDef <- allTypeDefs) { - if (typeDef.symbol.is(ModuleClass)) { - typeDef.symbol.info.decls.foreach { f => - if (f.isField && f.is(Module)) - f.setFlag(JavaStatic) - } - } - } - - val (anonJSClassTypeDefs, otherTypeDefs) = - allTypeDefs.partition(td => td.symbol.isAnonymousClass && td.symbol.isJSType) - - // Record the TypeDefs of anonymous JS classes to be lazily generated - for (td <- anonJSClassTypeDefs) - lazilyGeneratedAnonClasses(td.symbol) = td - - /* Finally, we emit true code for the remaining class defs. */ - for (td <- otherTypeDefs) { - val sym = td.symbol - implicit val pos: Position = sym.span - - /* Do not actually emit code for primitive types nor scala.Array. */ - val isPrimitive = - sym.isPrimitiveValueClass || sym == defn.ArrayClass - - if (!isPrimitive) { - withScopedVars( - currentClassSym := sym - ) { - val tree = if (sym.isJSType) { - if (!sym.is(Trait) && sym.isNonNativeJSClass) - genNonNativeJSClass(td) - else - genRawJSClassData(td) - } else if (sym.is(Trait)) { - genInterface(td) - } else { - genScalaClass(td) - } - - generatedClasses += tree - } - } - } - - for (tree <- generatedClasses) - genIRFile(cunit, tree) - - if (generatedStaticForwarderClasses.nonEmpty) { - /* #4148 Add generated static forwarder classes, except those that - * would collide with regular classes on case insensitive file systems. - */ - - /* I could not find any reference anywhere about what locale is used - * by case insensitive file systems to compare case-insensitively. - * In doubt, force the English locale, which is probably going to do - * the right thing in virtually all cases (especially if users stick - * to ASCII class names), and it has the merit of being deterministic, - * as opposed to using the OS' default locale. - * The JVM backend performs a similar test to emit a warning for - * conflicting top-level classes. However, it uses `toLowerCase()` - * without argument, which is not deterministic. - */ - def caseInsensitiveNameOf(classDef: js.ClassDef): String = - classDef.name.name.nameString.toLowerCase(java.util.Locale.ENGLISH) - - val generatedCaseInsensitiveNames = - generatedClasses.map(caseInsensitiveNameOf).toSet - - for ((site, classDef) <- generatedStaticForwarderClasses) { - if (!generatedCaseInsensitiveNames.contains(caseInsensitiveNameOf(classDef))) { - genIRFile(cunit, classDef) - } else { - report.warning( - s"Not generating the static forwarders of ${classDef.name.name.nameString} " + - "because its name differs only in case from the name of another class or trait in this compilation unit.", - site.srcPos) - } - } - } - } - - private def genIRFile(cunit: CompilationUnit, tree: ir.Trees.ClassDef): Unit = { - val outfile = getFileFor(cunit, tree.name.name, ".sjsir") - val output = outfile.bufferedOutput - try { - ir.Serializers.serialize(output, tree) - } finally { - output.close() - } - } - - private def getFileFor(cunit: CompilationUnit, className: ClassName, - suffix: String): dotty.tools.io.AbstractFile = { - val outputDirectory = ctx.settings.outputDir.value - val pathParts = className.nameString.split('.') - val dir = pathParts.init.foldLeft(outputDirectory)(_.subdirectoryNamed(_)) - val filename = pathParts.last - dir.fileNamed(filename + suffix) - } - - // Generate a class -------------------------------------------------------- - - /** Gen the IR ClassDef for a Scala class definition (maybe a module class). - */ - private def genScalaClass(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - assert(!sym.is(Trait), - "genScalaClass() must be called only for normal classes: "+sym) - assert(sym.superClass != NoSymbol, sym) - - if (hasDefaultCtorArgsAndJSModule(sym)) { - report.error( - "Implementation restriction: " + - "constructors of Scala classes cannot have default parameters if their companion module is JS native.", - td) - } - - val classIdent = encodeClassNameIdent(sym) - val originalName = originalNameOfClass(sym) - val isHijacked = false //isHijackedBoxedClass(sym) - - // Optimizer hints - - val isDynamicImportThunk = sym.isSubClass(jsdefn.DynamicImportThunkClass) - - def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = { - val fullName = sym.fullName.toString - (fullName.startsWith("scala.Tuple") && !fullName.endsWith("$")) || - (fullName.startsWith("scala.collection.mutable.ArrayOps$of")) - } - - val shouldMarkInline = ( - isDynamicImportThunk || - sym.hasAnnotation(jsdefn.InlineAnnot) || - (sym.isAnonymousFunction && !sym.isSubClass(defn.PartialFunctionClass)) || - isStdLibClassWithAdHocInlineAnnot(sym)) - - val optimizerHints = { - OptimizerHints.empty - .withInline(shouldMarkInline) - .withNoinline(sym.hasAnnotation(jsdefn.NoinlineAnnot)) - } - - // Generate members (constructor + methods) - - val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - - case vd: ValDef => - // fields are added via genClassFields(), but we need to generate the JS native members - val sym = vd.symbol - if (!sym.is(Module) && sym.hasAnnotation(jsdefn.JSNativeAnnot)) - generatedNonFieldMembers += genJSNativeMemberDef(vd) - - case dd: DefDef => - val sym = dd.symbol - if sym.hasAnnotation(jsdefn.JSNativeAnnot) then - if !sym.is(Accessor) then - generatedNonFieldMembers += genJSNativeMemberDef(dd) - else - generatedNonFieldMembers ++= genMethod(dd) - - case _ => - throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) - } - } - - // Generate fields and add to methods + ctors - val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList - - // Generate member exports - val memberExports = jsExportsGen.genMemberExports(sym) - - // Generate top-level export definitions - val topLevelExportDefs = jsExportsGen.genTopLevelExports(sym) - - // Static initializer - val optStaticInitializer = { - // Initialization of reflection data, if required - val reflectInit = { - val enableReflectiveInstantiation = { - sym.baseClasses.exists { ancestor => - ancestor.hasAnnotation(jsdefn.EnableReflectiveInstantiationAnnot) - } - } - if (enableReflectiveInstantiation) - genRegisterReflectiveInstantiation(sym).toList - else - Nil - } - - // Initialization of the module because of field exports - val needsStaticModuleInit = - topLevelExportDefs.exists(_.isInstanceOf[js.TopLevelFieldExportDef]) - val staticModuleInit = - if (!needsStaticModuleInit) Nil - else List(genLoadModule(sym)) - - val staticInitializerStats = reflectInit ::: staticModuleInit - if (staticInitializerStats.nonEmpty) - List(genStaticConstructorWithStats(ir.Names.StaticInitializerName, js.Block(staticInitializerStats))) - else - Nil - } - - val optDynamicImportForwarder = - if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) - else Nil - - val allMemberDefsExceptStaticForwarders = - generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder - - // Add static forwarders - val allMemberDefs = if (!isCandidateForForwarders(sym)) { - allMemberDefsExceptStaticForwarders - } else { - if (isStaticModule(sym)) { - /* If the module class has no linked class, we must create one to - * hold the static forwarders. Otherwise, this is going to be handled - * when generating the companion class. - */ - if (!sym.linkedClass.exists) { - val forwarders = genStaticForwardersFromModuleClass(Nil, sym) - if (forwarders.nonEmpty) { - val forwardersClassDef = js.ClassDef( - js.ClassIdent(ClassName(classIdent.name.nameString.stripSuffix("$"))), - originalName, - ClassKind.Class, - None, - Some(js.ClassIdent(ir.Names.ObjectClass)), - Nil, - None, - None, - forwarders, - Nil - )(js.OptimizerHints.empty) - generatedStaticForwarderClasses += sym -> forwardersClassDef - } - } - allMemberDefsExceptStaticForwarders - } else { - val forwarders = genStaticForwardersForClassOrInterface( - allMemberDefsExceptStaticForwarders, sym) - allMemberDefsExceptStaticForwarders ::: forwarders - } - } - - // Hashed definitions of the class - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - // The complete class definition - val kind = - if (isStaticModule(sym)) ClassKind.ModuleClass - else if (isHijacked) ClassKind.HijackedClass - else ClassKind.Class - - val classDefinition = js.ClassDef( - classIdent, - originalName, - kind, - None, - Some(encodeClassNameIdent(sym.superClass)), - genClassInterfaces(sym, forJSClass = false), - None, - None, - hashedDefs, - topLevelExportDefs)( - optimizerHints) - - classDefinition - } - - /** Gen the IR ClassDef for a Scala.js-defined JS class. */ - private def genNonNativeJSClass(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - assert(sym.isNonNativeJSClass, - i"genNonNativeJSClass() must be called only for non-native JS classes: $sym") - assert(sym.superClass != NoSymbol, sym) - - if (hasDefaultCtorArgsAndJSModule(sym)) { - report.error( - "Implementation restriction: " + - "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", - td) - } - - val classIdent = encodeClassNameIdent(sym) - val originalName = originalNameOfClass(sym) - - // Generate members (constructor + methods) - - val constructorTrees = new mutable.ListBuffer[DefDef] - val generatedMethods = new mutable.ListBuffer[js.MethodDef] - val dispatchMethodNames = new mutable.ListBuffer[JSName] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - - case _: ValDef => - () // fields are added via genClassFields() - - case dd: DefDef => - val sym = dd.symbol - val exposed = sym.isJSExposed - - if (sym.isClassConstructor) { - constructorTrees += dd - } else if (exposed && sym.is(Accessor, butNot = Lazy)) { - // Exposed accessors must not be emitted, since the field they access is enough. - } else if (sym.hasAnnotation(jsdefn.JSOptionalAnnot)) { - // Optional methods must not be emitted - } else { - generatedMethods ++= genMethod(dd) - - // Collect the names of the dispatchers we have to create - if (exposed && !sym.is(Deferred)) { - /* We add symbols that we have to expose here. This way we also - * get inherited stuff that is implemented in this class. - */ - dispatchMethodNames += sym.jsName - } - } - - case _ => - throw new FatalError("Illegal tree in gen of genNonNativeJSClass(): " + tree) - } - } - - // Static members (exported from the companion object) - val staticMembers = { - val module = sym.companionModule - if (!module.exists) { - Nil - } else { - val companionModuleClass = module.moduleClass - val exports = withScopedVars(currentClassSym := companionModuleClass) { - jsExportsGen.genStaticExports(companionModuleClass) - } - if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { - val classInitializer = - genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) - exports :+ classInitializer - } else { - exports - } - } - } - - val topLevelExports = jsExportsGen.genTopLevelExports(sym) - - val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope { - val isNested = sym.isNestedJSClass - - if (isNested) - localNames.reserveLocalName(JSSuperClassParamName) - - val (captures, ctor) = genJSClassCapturesAndConstructor(constructorTrees.toList) - - val jsClassCaptures = if (isNested) { - val superParam = js.ParamDef(js.LocalIdent(JSSuperClassParamName), - NoOriginalName, jstpe.AnyType, mutable = false) - Some(superParam :: captures) - } else { - assert(captures.isEmpty, s"found non nested JS class with captures $captures at $pos") - None - } - - (ctor, jsClassCaptures) - } - - // Generate fields (and add to methods + ctors) - val generatedMembers = { - genClassFields(td) ::: - generatedConstructor :: - jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: - generatedMethods.toList ::: - staticMembers - } - - // Hashed definitions of the class - val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) - - // The complete class definition - val kind = - if (isStaticModule(sym)) ClassKind.JSModuleClass - else ClassKind.JSClass - - val classDefinition = js.ClassDef( - classIdent, - originalNameOfClass(sym), - kind, - jsClassCaptures, - Some(encodeClassNameIdent(sym.superClass)), - genClassInterfaces(sym, forJSClass = true), - jsSuperClass = jsClassCaptures.map(_.head.ref), - None, - hashedMemberDefs, - topLevelExports)( - OptimizerHints.empty) - - classDefinition - } - - /** Gen the IR ClassDef for a raw JS class or trait. - */ - private def genRawJSClassData(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: Position = sym.span - - val classIdent = encodeClassNameIdent(sym) - val kind = { - if (sym.is(Trait)) ClassKind.AbstractJSType - else if (sym.is(ModuleClass)) ClassKind.NativeJSModuleClass - else ClassKind.NativeJSClass - } - val superClass = - if (sym.is(Trait)) None - else Some(encodeClassNameIdent(sym.superClass)) - val jsNativeLoadSpec = computeJSNativeLoadSpecOfClass(sym) - - js.ClassDef( - classIdent, - originalNameOfClass(sym), - kind, - None, - superClass, - genClassInterfaces(sym, forJSClass = false), - None, - jsNativeLoadSpec, - Nil, - Nil)( - OptimizerHints.empty) - } - - /** Gen the IR ClassDef for an interface definition. - */ - private def genInterface(td: TypeDef): js.ClassDef = { - val sym = td.symbol.asClass - implicit val pos: SourcePosition = sym.sourcePos - - val classIdent = encodeClassNameIdent(sym) - - val generatedMethods = new mutable.ListBuffer[js.MethodDef] - - val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { - case EmptyTree => () - case dd: DefDef => generatedMethods ++= genMethod(dd) - case _ => - throw new FatalError( - i"""Illegal tree in gen of genInterface(): $tree - |class = $td - |in ${ctx.compilationUnit}""") - } - } - - val superInterfaces = genClassInterfaces(sym, forJSClass = false) - - val genMethodsList = generatedMethods.toList - val allMemberDefs = - if (!isCandidateForForwarders(sym)) genMethodsList - else genMethodsList ::: genStaticForwardersForClassOrInterface(genMethodsList, sym) - - // Hashed definitions of the interface - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - js.ClassDef( - classIdent, - originalNameOfClass(sym), - ClassKind.Interface, - None, - None, - superInterfaces, - None, - None, - hashedDefs, - Nil)( - OptimizerHints.empty) - } - - private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( - implicit pos: Position): List[js.ClassIdent] = { - for { - intf <- sym.directlyInheritedTraits - if !(forJSClass && intf == defn.DynamicClass) - } yield { - encodeClassNameIdent(intf) - } - } - - // Static forwarders ------------------------------------------------------- - - /* This mimics the logic in BCodeHelpers.addForwarders and the code that - * calls it, except that we never have collisions with existing methods in - * the companion class. This is because in the IR, only methods with the - * same `MethodName` (including signature) and that are also - * `PublicStatic` would collide. There should never be an actual collision - * because the only `PublicStatic` methods that are otherwise generated are - * the bodies of SAMs, which have mangled names. If that assumption is - * broken, an error message is emitted asking the user to report a bug. - * - * It is important that we always emit forwarders, because some Java APIs - * actually have a public static method and a public instance method with - * the same name. For example the class `Integer` has a - * `def hashCode(): Int` and a `static def hashCode(Int): Int`. The JVM - * back-end considers them as colliding because they have the same name, - * but we must not. - * - * By default, we only emit forwarders for top-level objects, like the JVM - * back-end. However, if requested via a compiler option, we enable them - * for all static objects. This is important so we can implement static - * methods of nested static classes of JDK APIs (see scala-js/#3950). - */ - - /** Is the given Scala class, interface or module class a candidate for - * static forwarders? - * - * - the flag `-XnoForwarders` is not set to true, and - * - the symbol is static, and - * - either of both of the following is true: - * - the flag `-scalajsGenStaticForwardersForNonTopLevelObjects` is set to true, or - * - the symbol was originally at the package level - * - * Other than the Scala.js-specific flag, and the fact that we also consider - * interfaces, this performs the same tests as the JVM back-end. - */ - def isCandidateForForwarders(sym: Symbol): Boolean = { - !ctx.settings.XnoForwarders.value && sym.isStatic && { - ctx.settings.scalajsGenStaticForwardersForNonTopLevelObjects.value || { - atPhase(flattenPhase) { - toDenot(sym).owner.is(PackageClass) - } - } - } - } - - /** Gen the static forwarders to the members of a class or interface for - * methods of its companion object. - * - * This is only done if there exists a companion object and it is not a JS - * type. - * - * Precondition: `isCandidateForForwarders(sym)` is true - */ - def genStaticForwardersForClassOrInterface( - existingMembers: List[js.MemberDef], sym: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { - val module = sym.companionModule - if (!module.exists) { - Nil - } else { - val moduleClass = module.moduleClass - if (!moduleClass.isJSType) - genStaticForwardersFromModuleClass(existingMembers, moduleClass) - else - Nil - } - } - - /** Gen the static forwarders for the methods of a module class. - * - * Precondition: `isCandidateForForwarders(moduleClass)` is true - */ - def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], - moduleClass: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { - - assert(moduleClass.is(ModuleClass), moduleClass) - - val existingPublicStaticMethodNames = existingMembers.collect { - case js.MethodDef(flags, name, _, _, _, _) - if flags.namespace == js.MemberNamespace.PublicStatic => - name.name - }.toSet - - val members = { - moduleClass.info.membersBasedOnFlags(required = Flags.Method, - excluded = Flags.ExcludedForwarder).map(_.symbol) - } - - def isExcluded(m: Symbol): Boolean = { - def hasAccessBoundary = m.accessBoundary(defn.RootClass) ne defn.RootClass - - def isOfJLObject: Boolean = m.owner eq defn.ObjectClass - - def isDefaultParamOfJSNativeDef: Boolean = { - m.name.is(DefaultGetterName) && { - val info = new DefaultParamInfo(m) - !info.isForConstructor && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } - - m.is(Deferred) - || m.isConstructor - || hasAccessBoundary - || isOfJLObject - || m.hasAnnotation(jsdefn.JSNativeAnnot) || isDefaultParamOfJSNativeDef // #4557 - } - - val forwarders = for { - m <- members - if !isExcluded(m) - } yield { - withNewLocalNameScope { - val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) - val methodIdent = encodeMethodSym(m) - val originalName = originalNameOfMethod(m) - val jsParams = for { - (paramName, paramInfo) <- m.info.paramNamess.flatten.zip(m.info.paramInfoss.flatten) - } yield { - js.ParamDef(freshLocalIdent(paramName), NoOriginalName, - toIRType(paramInfo), mutable = false) - } - val resultType = toIRType(m.info.resultType) - - if (existingPublicStaticMethodNames.contains(methodIdent.name)) { - report.error( - "Unexpected situation: found existing public static method " + - s"${methodIdent.name.nameString} in the companion class of " + - s"${moduleClass.fullName}; cannot generate a static forwarder " + - "the method of the same name in the object." + - "Please report this as a bug in the Scala.js support in dotty.", - pos) - } - - js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { - genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) - })(OptimizerHints.empty, None) - } - } - - forwarders.toList - } - - // Generate the fields of a class ------------------------------------------ - - /** Gen definitions for the fields of a class. */ - private def genClassFields(td: TypeDef): List[js.MemberDef] = { - val classSym = td.symbol.asClass - assert(currentClassSym.get == classSym, - "genClassFields called with a ClassDef other than the current one") - - val isJSClass = classSym.isNonNativeJSClass - - // Term members that are neither methods nor modules are fields - classSym.info.decls.filter { f => - !f.isOneOf(MethodOrModule) && f.isTerm - && !f.hasAnnotation(jsdefn.JSNativeAnnot) - && !f.hasAnnotation(jsdefn.JSOptionalAnnot) - && !f.hasAnnotation(jsdefn.JSExportStaticAnnot) - }.flatMap({ f => - implicit val pos = f.span - - val isTopLevelExport = f.hasAnnotation(jsdefn.JSExportTopLevelAnnot) - val isJavaStatic = f.is(JavaStatic) - assert(!(isTopLevelExport && isJavaStatic), - em"found ${f.fullName} which is both a top-level export and a Java static") - val isStaticField = isTopLevelExport || isJavaStatic - - val namespace = if isStaticField then js.MemberNamespace.PublicStatic else js.MemberNamespace.Public - val mutable = isStaticField || f.is(Mutable) - - val flags = js.MemberFlags.empty.withMutable(mutable).withNamespace(namespace) - - val irTpe0 = - if (isJSClass) genExposedFieldIRType(f) - else if (isTopLevelExport) jstpe.AnyType - else toIRType(f.info) - - // scala-js/#4370 Fields cannot have type NothingType - val irTpe = - if (irTpe0 == jstpe.NothingType) encodeClassType(defn.NothingClass) - else irTpe0 - - if (isJSClass && f.isJSExposed) - js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) :: Nil - else - val fieldIdent = encodeFieldSym(f) - val originalName = originalNameOfField(f) - val fieldDef = js.FieldDef(flags, fieldIdent, originalName, irTpe) - val optionalStaticFieldGetter = - if isJavaStatic then - // Here we are generating a public static getter for the static field, - // this is its API for other units. This is necessary for singleton - // enum values, which are backed by static fields. - val className = encodeClassName(classSym) - val body = js.Block( - js.LoadModule(className), - js.SelectStatic(className, fieldIdent)(irTpe)) - js.MethodDef(js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeStaticMemberSym(f), originalName, Nil, irTpe, - Some(body))( - OptimizerHints.empty, None) :: Nil - else - Nil - fieldDef :: optionalStaticFieldGetter - }).toList - } - - def genExposedFieldIRType(f: Symbol): jstpe.Type = { - val tpeEnteringPosterasure = atPhase(elimErasedValueTypePhase)(f.info) - tpeEnteringPosterasure match { - case tpe: ErasedValueType => - /* Here, we must store the field as the boxed representation of - * the value class. The default value of that field, as - * initialized at the time the instance is created, will - * therefore be null. This will not match the behavior we would - * get in a Scala class. To match the behavior, we would need to - * initialized to an instance of the boxed representation, with - * an underlying value set to the zero of its type. However we - * cannot implement that, so we live with the discrepancy. - * - * In dotc this is usually not an issue, because it unboxes `null` to - * the zero of the underlying type, unlike scalac which throws an NPE. - */ - jstpe.ClassType(encodeClassName(tpe.tycon.typeSymbol)) - - case _ => - // Other types are not boxed, so we can initialized them to their true zero. - toIRType(f.info) - } - } - - // Static initializers ----------------------------------------------------- - - private def genStaticConstructorWithStats(name: MethodName, stats: js.Tree)( - implicit pos: Position): js.MethodDef = { - js.MethodDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.StaticConstructor), - js.MethodIdent(name), - NoOriginalName, - Nil, - jstpe.NoType, - Some(stats))( - OptimizerHints.empty, None) - } - - private def genRegisterReflectiveInstantiation(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - if (isStaticModule(sym)) - genRegisterReflectiveInstantiationForModuleClass(sym) - else if (sym.is(ModuleClass)) - None // scala-js#3228 - else if (sym.is(Lifted) && !sym.originalOwner.isClass) - None // scala-js#3227 - else - genRegisterReflectiveInstantiationForNormalClass(sym) - } - - private def genRegisterReflectiveInstantiationForModuleClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - val fqcnArg = js.StringLiteral(sym.fullName.toString) - val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) - val loadModuleFunArg = - js.Closure(arrow = true, Nil, Nil, None, genLoadModule(sym), Nil) - - val stat = genApplyMethod( - genLoadModule(jsdefn.ReflectModule), - jsdefn.Reflect_registerLoadableModuleClass, - List(fqcnArg, runtimeClassArg, loadModuleFunArg)) - - Some(stat) - } - - private def genRegisterReflectiveInstantiationForNormalClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { - val ctors = - if (sym.is(Abstract)) Nil - else sym.info.member(nme.CONSTRUCTOR).alternatives.map(_.symbol).filter(m => !m.isOneOf(Private | Protected)) - - if (ctors.isEmpty) { - None - } else { - val constructorsInfos = for { - ctor <- ctors - } yield { - withNewLocalNameScope { - val (parameterTypes, formalParams, actualParams) = (for { - (paramName, paramInfo) <- ctor.info.paramNamess.flatten.zip(ctor.info.paramInfoss.flatten) - } yield { - val paramType = js.ClassOf(toTypeRef(paramInfo)) - val paramDef = js.ParamDef(freshLocalIdent(paramName), - NoOriginalName, jstpe.AnyType, mutable = false) - val actualParam = unbox(paramDef.ref, paramInfo) - (paramType, paramDef, actualParam) - }).unzip3 - - val paramTypesArray = js.JSArrayConstr(parameterTypes) - - val newInstanceFun = js.Closure(arrow = true, Nil, formalParams, None, { - js.New(encodeClassName(sym), encodeMethodSym(ctor), actualParams) - }, Nil) - - js.JSArrayConstr(List(paramTypesArray, newInstanceFun)) - } - } - - val fqcnArg = js.StringLiteral(sym.fullName.toString) - val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) - val ctorsInfosArg = js.JSArrayConstr(constructorsInfos) - - val stat = genApplyMethod( - genLoadModule(jsdefn.ReflectModule), - jsdefn.Reflect_registerInstantiatableClass, - List(fqcnArg, runtimeClassArg, ctorsInfosArg)) - - Some(stat) - } - } - - // Constructor of a non-native JS class ------------------------------------ - - def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( - implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { - /* We need to merge all Scala constructors into a single one because the - * IR, like JavaScript, only allows a single one. - * - * We do this by applying: - * 1. Applying runtime type based dispatch, just like exports. - * 2. Splitting secondary ctors into parts before and after the `this` call. - * 3. Topo-sorting all constructor statements and including/excluding - * them based on the overload that was chosen. - */ - - val (primaryTree :: Nil, secondaryTrees) = - constructorTrees.partition(_.symbol.isPrimaryConstructor): @unchecked - - val primaryCtor = genPrimaryJSClassCtor(primaryTree) - val secondaryCtors = secondaryTrees.map(genSecondaryJSClassCtor(_)) - - // VarDefs for the parameters of all constructors. - val paramVarDefs = for { - vparam <- constructorTrees.flatMap(_.paramss.flatten) - } yield { - val sym = vparam.symbol - val tpe = toIRType(sym.info) - js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), tpe, mutable = true, jstpe.zeroOf(tpe))(vparam.span) - } - - /* organize constructors in a called-by tree - * (the implicit root is the primary constructor) - */ - val ctorTree = { - val ctorToChildren = secondaryCtors - .groupBy(_.targetCtor) - .withDefaultValue(Nil) - - /* when constructing the call-by tree, we use pre-order traversal to - * assign overload numbers. - * this puts all descendants of a ctor in a range of overloads numbers. - * - * this property is useful, later, when we need to make statements - * conditional based on the chosen overload. - */ - var nextOverloadNum = 0 - def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = { - val overloadNum = nextOverloadNum - nextOverloadNum += 1 - val subtrees = ctorToChildren(ctor.sym).map(subTree(_)) - new ConstructorTree(overloadNum, ctor, subtrees) - } - - subTree(primaryCtor) - } - - /* prepare overload dispatch for all constructors. - * as a side-product, we retrieve the capture parameters. - */ - val (exports, jsClassCaptures) = { - val exports = List.newBuilder[jsExportsGen.Exported] - val jsClassCaptures = List.newBuilder[js.ParamDef] - - def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { - val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, - tree.ctor.paramsAndInfo, tree.overloadNum) - exports += e - jsClassCaptures ++= c - tree.subCtors.foreach(add(_)) - } - - add(ctorTree) - - (exports.result(), jsClassCaptures.result()) - } - - // The name 'constructor' is used for error reporting here - val (formalArgs, restParam, overloadDispatchBody) = - jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) - - val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, - jstpe.IntType, mutable = false, overloadDispatchBody) - - val constructorBody = wrapJSCtorBody( - paramVarDefs :+ overloadVar, - genJSClassCtorBody(overloadVar.ref, ctorTree), - js.Undefined() :: Nil - ) - - val constructorDef = js.JSConstructorDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), - formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) - - (jsClassCaptures, constructorDef) - } - - private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { - val sym = dd.symbol - val Block(stats, _) = dd.rhs: @unchecked - assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") - - var jsSuperCall: Option[js.JSSuperConstructorCall] = None - val jsStats = List.newBuilder[js.Tree] - - /* Move all statements after the super constructor call since JS - * cannot access `this` before the super constructor call. - * - * dotc inserts statements before the super constructor call for param - * accessor initializers (including val's and var's declared in the - * params). We move those after the super constructor call, and are - * therefore executed later than for a Scala class. - */ - withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(Super(This(_), _), _), args) - if fun.symbol.isClassConstructor => - assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") - implicit val pos: Position = tree.span - jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) - - case stat => - val jsStat = genStat(stat) - assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], - "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + - dd.sourcePos) - jsStats += jsStat - } - } - - assert(jsSuperCall.isDefined, - s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") - - new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) - } - - private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { - val sym = dd.symbol - assert(!sym.isPrimaryConstructor, s"called with primary ctor $sym") - - def flattenBlocks(t: Tree): List[Tree] = t match { - case Block(stats, expr) => (stats :+ expr).flatMap(flattenBlocks) - case _ => t :: Nil - } - val stats = flattenBlocks(dd.rhs) - - val beforeThisCall = List.newBuilder[js.Tree] - var thisCall: Option[(Symbol, List[js.Tree])] = None - val afterThisCall = List.newBuilder[js.Tree] - - withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(This(_), _), args) - if fun.symbol.isClassConstructor => - assert(thisCall.isEmpty, - s"duplicate this() call in secondary JS constructor at ${dd.sourcePos}") - - implicit val pos: Position = tree.span - val sym = fun.symbol - thisCall = Some((sym, genActualArgs(sym, args))) - - case stat => - val jsStat = genStat(stat) - if (thisCall.isEmpty) - beforeThisCall += jsStat - else - afterThisCall += jsStat - } - } - - assert(thisCall.isDefined, - i"could not find the this() call in secondary JS constructor at ${dd.sourcePos}:\n${stats.map(_.show).mkString("\n")}") - val Some((targetCtor, ctorArgs)) = thisCall: @unchecked - - new SplitSecondaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - beforeThisCall.result(), targetCtor, ctorArgs, afterThisCall.result()) - } - - private def genParamsAndInfo(ctorSym: Symbol, - vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = { - implicit val pos: SourcePosition = ctorSym.sourcePos - - val paramSyms = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) - paramSyms.zip(ctorSym.jsParamInfos) - } - - private def genJSClassCtorDispatch(ctorSym: Symbol, - allParamsAndInfos: List[(Symbol, JSParamInfo)], - overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = { - - implicit val pos: SourcePosition = ctorSym.sourcePos - - /* `allParams` are the parameters as seen from inside the constructor body, - * i.e., the ones generated by the trees in the constructor body. - */ - val (captureParamsAndInfos, normalParamsAndInfos) = - allParamsAndInfos.partition(_._2.capture) - - /* For class captures, we need to generate different names than the ones - * used by the constructor body. This is necessary so that we can forward - * captures properly between constructor delegation calls. - */ - val (jsClassCaptures, captureAssigns) = (for { - (param, info) <- captureParamsAndInfos - } yield { - val ident = freshLocalIdent(param.name.toTermName) - val jsClassCapture = - js.ParamDef(ident, originalNameOfLocal(param), toIRType(info.info), mutable = false) - val captureAssign = - js.Assign(genVarRef(param), jsClassCapture.ref) - (jsClassCapture, captureAssign) - }).unzip - - val normalInfos = normalParamsAndInfos.map(_._2).toIndexedSeq - - val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos) { - def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = { - val paramAssigns = for { - ((param, info), i) <- normalParamsAndInfos.zipWithIndex - } yield { - val rhs = jsExportsGen.genScalaArg(this, i, formalArgsRegistry, info, static = true, - captures = captureParamsAndInfos.map(pi => genVarRef(pi._1)))( - prevArgsCount => normalParamsAndInfos.take(prevArgsCount).map(pi => genVarRef(pi._1))) - - js.Assign(genVarRef(param), rhs) - } - - js.Block(captureAssigns ::: paramAssigns, js.IntLiteral(overloadNum)) - } - } - - (jsExport, jsClassCaptures) - } - - /** Generates a JS constructor body based on a constructor tree. */ - private def genJSClassCtorBody(overloadVar: js.VarRef, - ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { - - /* generates a statement that conditionally executes body iff the chosen - * overload is any of the descendants of `tree` (including itself). - * - * here we use the property from building the trees, that a set of - * descendants always has a range of overload numbers. - */ - def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { - case js.Skip() => js.Skip() - - case body => - val x = overloadVar - val cond = { - import tree.{lo, hi} - - if (lo == hi) { - js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), x) - } else { - val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), x) - val rhs = js.BinaryOp(js.BinaryOp.Int_<=, x, js.IntLiteral(hi)) - js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) - } - } - - js.If(cond, body, js.Skip())(jstpe.NoType) - } - - /* preStats / postStats use pre/post order traversal respectively to - * generate a topo-sorted sequence of statements. - */ - - def preStats(tree: ConstructorTree[SplitSecondaryJSCtor], - nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = { - val inner = tree.subCtors.map(preStats(_, tree.ctor.paramsAndInfo)) - - assert(tree.ctor.ctorArgs.size == nextParamsAndInfo.size, "param count mismatch") - val paramsInfosAndArgs = nextParamsAndInfo.zip(tree.ctor.ctorArgs) - - val (captureParamsInfosAndArgs, normalParamsInfosAndArgs) = - paramsInfosAndArgs.partition(_._1._2.capture) - - val captureAssigns = for { - ((param, _), arg) <- captureParamsInfosAndArgs - } yield { - js.Assign(genVarRef(param), arg) - } - - val normalAssigns = for { - (((param, info), arg), i) <- normalParamsInfosAndArgs.zipWithIndex - } yield { - val newArg = arg match { - case js.Transient(UndefinedParam) => - /* Go full circle: We have ignored the default param getter for - * this, we'll create it again. - * - * This seems not optimal: We could simply not ignore the calls to - * default param getters in the first place. - * - * However, this proves to be difficult: Because of translations in - * earlier phases, calls to default param getters may be assigned - * to temporary variables first (see the undefinedDefaultParams - * ScopedVar). If this happens, it becomes increasingly difficult - * to distinguish a default param getter call for a constructor - * call of *this* instance (in which case we would want to keep - * the default param getter call) from one for a *different* - * instance (in which case we would want to discard the default - * param getter call) - * - * Because of this, it ends up being easier to just re-create the - * default param getter call if necessary. - */ - implicit val pos: SourcePosition = tree.ctor.sym.sourcePos - jsExportsGen.genCallDefaultGetter(tree.ctor.sym, i, static = false, - captures = captureParamsInfosAndArgs.map(p => genVarRef(p._1._1)))( - prevArgsCount => normalParamsInfosAndArgs.take(prevArgsCount).map(p => genVarRef(p._1._1))) - - case arg => arg - } - - js.Assign(genVarRef(param), newArg) - } - - ifOverload(tree, js.Block( - inner ++ tree.ctor.beforeCall ++ captureAssigns ++ normalAssigns)) - } - - def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = { - val inner = tree.subCtors.map(postStats(_)) - ifOverload(tree, js.Block(tree.ctor.afterCall ++ inner)) - } - - val primaryCtor = ctorTree.ctor - val secondaryCtorTrees = ctorTree.subCtors - - wrapJSCtorBody( - secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)), - primaryCtor.body, - secondaryCtorTrees.map(postStats(_)) - ) - } - - private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, - after: List[js.Tree]): js.JSConstructorBody = { - js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, - body.afterSuper ::: after)(body.pos) - } - - private sealed trait JSCtor { - val sym: Symbol - val paramsAndInfo: List[(Symbol, JSParamInfo)] - } - - private class PrimaryJSCtor(val sym: Symbol, - val paramsAndInfo: List[(Symbol, JSParamInfo)], - val body: js.JSConstructorBody) extends JSCtor - - private class SplitSecondaryJSCtor(val sym: Symbol, - val paramsAndInfo: List[(Symbol, JSParamInfo)], - val beforeCall: List[js.Tree], - val targetCtor: Symbol, val ctorArgs: List[js.Tree], - val afterCall: List[js.Tree]) extends JSCtor - - private class ConstructorTree[Ctor <: JSCtor]( - val overloadNum: Int, val ctor: Ctor, - val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]) { - val lo: Int = overloadNum - val hi: Int = subCtors.lastOption.fold(lo)(_.hi) - - assert(lo <= hi, "bad overload range") - } - - // Generate a method ------------------------------------------------------- - - /** Generates the JSNativeMemberDef. */ - def genJSNativeMemberDef(tree: ValOrDefDef): js.JSNativeMemberDef = { - implicit val pos = tree.span - - val sym = tree.symbol - val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) - val methodName = encodeJSNativeMemberSym(sym) - val jsNativeLoadSpec = computeJSNativeLoadSpecOfValDef(sym) - js.JSNativeMemberDef(flags, methodName, jsNativeLoadSpec) - } - - private def genMethod(dd: DefDef): Option[js.MethodDef] = { - withScopedVars( - localNames := new LocalNameGenerator - ) { - genMethodWithCurrentLocalNameScope(dd) - } - } - - /** Gen JS code for a method definition in a class or in an impl class. - * On the JS side, method names are mangled to encode the full signature - * of the Scala method, as described in `JSEncoding`, to support - * overloading. - * - * Some methods are not emitted at all: - * - Primitives, since they are never actually called - * - Constructors of hijacked classes - * - * Constructors are emitted by generating their body as a statement. - * - * Other (normal) methods are emitted with `genMethodBody()`. - */ - private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = { - implicit val pos = dd.span - val sym = dd.symbol - val vparamss = dd.termParamss - val rhs = dd.rhs - - /* Is this method a default accessor that should be ignored? - * - * This is the case iff one of the following applies: - * - It is a constructor default accessor and the linked class is a - * native JS class. - * - It is a default accessor for a native JS def, but with the caveat - * that its rhs must be `js.native` because of #4553. - * - * Both of those conditions can only happen if the default accessor is in - * a module class, so we use that as a fast way out. (But omitting that - * condition would not change the result.) - * - * This is different than `isJSDefaultParam` in `genApply`: we do not - * ignore default accessors of *non-native* JS types. Neither for - * constructor default accessor nor regular default accessors. We also - * do not need to worry about non-constructor members of native JS types, - * since for those, the entire member list is ignored in `genJSClassData`. - */ - def isIgnorableDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) && { - val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { - /* This is a default accessor for a constructor parameter. Check - * whether the attached constructor is a native JS constructor, - * which is the case iff the linked class is a native JS type. - */ - info.constructorOwner.hasAnnotation(jsdefn.JSNativeAnnot) - } else { - /* #4553 We need to ignore default accessors for JS native defs. - * However, because Scala.js <= 1.7.0 actually emitted code calling - * those accessors, we must keep default accessors that would - * compile. The only accessors we can actually get rid of are those - * that are `= js.native`. - */ - !sym.owner.isJSType && - info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) && { - dd.rhs match { - case MaybeAsInstanceOf(Apply(fun, _)) => - fun.symbol == jsdefn.JSPackage_native - case _ => - false - } - } - } - } - } - - withPerMethodBodyState(sym) { - assert(vparamss.isEmpty || vparamss.tail.isEmpty, - "Malformed parameter list: " + vparamss) - val params = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) - - val methodName = encodeMethodSym(sym) - val originalName = originalNameOfMethod(sym) - - def jsParams = params.map(genParamDef(_)) - - if (primitives.isPrimitive(sym)) { - None - } else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) { - // scala-js/#4409: Do not emit abstract methods in non-native JS classes - None - } else if (sym.is(Deferred)) { - Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, - jsParams, toIRType(patchedResultType(sym)), None)( - OptimizerHints.empty, None)) - } else if (isIgnorableDefaultParam) { - // #11592 - None - } else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) { - /* #12572 Bridges for default accessors in non-native JS classes must not be emitted, - * because they call another default accessor, making their entire body an - * that cannot be eliminated. - * Such methods are never called anyway, because they are filtered out in - * JSExportsGen.defaultGetterDenot(). - */ - None - } else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { - None - } else*/ { - /*def isTraitImplForwarder = dd.rhs match { - case app: Apply => foreignIsImplClass(app.symbol.owner) - case _ => false - }*/ - - val shouldMarkInline = { - sym.hasAnnotation(jsdefn.InlineAnnot) || - sym.isAnonymousFunction - } - - val shouldMarkNoinline = { - sym.hasAnnotation(jsdefn.NoinlineAnnot) /*&& - !isTraitImplForwarder*/ - } - - val optimizerHints = { - OptimizerHints.empty - .withInline(shouldMarkInline) - .withNoinline(shouldMarkNoinline) - } - - val methodDef = { - if (sym.isClassConstructor) { - val namespace = js.MemberNamespace.Constructor - js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), - methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( - optimizerHints, None) - } else { - val namespace = if (isMethodStaticInIR(sym)) { - if (sym.isPrivate) js.MemberNamespace.PrivateStatic - else js.MemberNamespace.PublicStatic - } else { - if (sym.isPrivate) js.MemberNamespace.Private - else js.MemberNamespace.Public - } - val resultIRType = toIRType(patchedResultType(sym)) - genMethodDef(namespace, methodName, originalName, - params, resultIRType, rhs, optimizerHints) - } - } - - Some(methodDef) - } - } - } - - /** Generates the MethodDef of a (non-constructor) method - * - * Most normal methods are emitted straightforwardly. If the result - * type is Unit, then the body is emitted as a statement. Otherwise, it is - * emitted as an expression. - * - * Instance methods in non-native JS classes are compiled as static methods - * taking an explicit parameter for their `this` value. Static methods in - * non-native JS classes are compiled as is, like methods in Scala classes. - */ - private def genMethodDef(namespace: js.MemberNamespace, methodName: js.MethodIdent, - originalName: OriginalName, paramsSyms: List[Symbol], resultIRType: jstpe.Type, - tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = { - implicit val pos = tree.span - - val jsParams = paramsSyms.map(genParamDef(_)) - - def genBody() = localNames.makeLabeledIfRequiresEnclosingReturn(resultIRType) { - if (resultIRType == jstpe.NoType) genStat(tree) - else genExpr(tree) - } - - if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { - val flags = js.MemberFlags.empty.withNamespace(namespace) - js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) - } else { - val thisLocalIdent = freshLocalIdent("this") - withScopedVars( - thisLocalVarIdent := Some(thisLocalIdent) - ) { - val staticNamespace = - if (namespace.isPrivate) js.MemberNamespace.PrivateStatic - else js.MemberNamespace.PublicStatic - val flags = - js.MemberFlags.empty.withNamespace(staticNamespace) - val thisParamDef = js.ParamDef(thisLocalIdent, thisOriginalName, - jstpe.AnyType, mutable = false) - - js.MethodDef(flags, methodName, originalName, - thisParamDef :: jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) - } - } - } - - // ParamDefs --------------------------------------------------------------- - - def genParamDef(sym: Symbol): js.ParamDef = - genParamDef(sym, toIRType(sym.info)) - - private def genParamDef(sym: Symbol, ptpe: jstpe.Type): js.ParamDef = - genParamDef(sym, ptpe, sym.span) - - private def genParamDef(sym: Symbol, pos: Position): js.ParamDef = - genParamDef(sym, toIRType(sym.info), pos) - - private def genParamDef(sym: Symbol, ptpe: jstpe.Type, pos: Position): js.ParamDef = { - js.ParamDef(encodeLocalSym(sym)(implicitly, pos, implicitly), - originalNameOfLocal(sym), ptpe, mutable = false)(pos) - } - - // Generate statements and expressions ------------------------------------- - - /** Gen JS code for a tree in statement position (in the IR). - */ - private def genStat(tree: Tree): js.Tree = { - exprToStat(genStatOrExpr(tree, isStat = true)) - } - - /** Turn a JavaScript expression of type Unit into a statement */ - private def exprToStat(tree: js.Tree): js.Tree = { - /* Any JavaScript expression is also a statement, but at least we get rid - * of some pure expressions that come from our own codegen. - */ - implicit val pos = tree.pos - tree match { - case js.Block(stats :+ expr) => - js.Block(stats :+ exprToStat(expr)) - case _:js.Literal | _:js.This | _:js.VarRef => - js.Skip() - case _ => - tree - } - } - - /** Gen JS code for a tree in expression position (in the IR). - */ - private def genExpr(tree: Tree): js.Tree = { - val result = genStatOrExpr(tree, isStat = false) - assert(result.tpe != jstpe.NoType, - s"genExpr($tree) returned a tree with type NoType at pos ${tree.span}") - result - } - - def genExpr(name: JSName)(implicit pos: SourcePosition): js.Tree = name match { - case JSName.Literal(name) => js.StringLiteral(name) - case JSName.Computed(sym) => genComputedJSName(sym) - } - - private def genComputedJSName(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { - /* By construction (i.e. restriction in PrepJSInterop), we know that sym - * must be a static method. - * Therefore, at this point, we can invoke it by loading its owner and - * calling it. - */ - def moduleOrGlobalScope = genLoadModuleOrGlobalScope(sym.owner) - def module = genLoadModule(sym.owner) - - if (sym.owner.isJSType) { - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyJSMethodGeneric(sym, moduleOrGlobalScope, args = Nil, isStat = false) - else - genApplyJSClassMethod(module, sym, arguments = Nil) - } else { - genApplyMethod(module, sym, arguments = Nil) - } - } - - /** Gen JS code for a tree in expression position (in the IR) or the - * global scope. - */ - def genExprOrGlobalScope(tree: Tree): MaybeGlobalScope = { - implicit def pos: SourcePosition = tree.sourcePos - - tree match { - case _: This => - val sym = tree.symbol - if (sym != currentClassSym.get && sym.is(Module)) - genLoadModuleOrGlobalScope(sym) - else - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - - case _:Ident | _:Select => - val sym = tree.symbol - if (sym.is(Module)) { - assert(!sym.is(PackageClass), "Cannot use package as value: " + tree) - genLoadModuleOrGlobalScope(sym) - } else { - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } - - case Apply(fun, _) => - if (fun.symbol == jsdefn.JSDynamic_global) - MaybeGlobalScope.GlobalScope(pos) - else - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - - case _ => - MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } - } - - /** Gen JS code for a tree in statement or expression position (in the IR). - * - * This is the main transformation method. Each node of the Scala AST - * is transformed into an equivalent portion of the JS AST. - */ - private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - report.debuglog(" " + tree) - report.debuglog("") - - tree match { - /** Local val or var declaration */ - case tree @ ValDef(name, _, _) => - val sym = tree.symbol - val rhs = tree.rhs - val rhsTree = genExpr(rhs) - - rhsTree match { - case js.Transient(UndefinedParam) => - /* This is an intermediate assignment for default params on a - * js.Any. Add the symbol to the corresponding set to inform - * the Ident resolver how to replace it and don't emit the symbol. - */ - undefinedDefaultParams += sym - js.Skip() - case _ => - js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), - toIRType(sym.info), sym.is(Mutable), rhsTree) - } - - case If(cond, thenp, elsep) => - val tpe = - if (isStat) jstpe.NoType - else toIRType(tree.tpe) - - js.If(genExpr(cond), genStatOrExpr(thenp, isStat), - genStatOrExpr(elsep, isStat))(tpe) - - case Labeled(bind, expr) => - js.Labeled(encodeLabelSym(bind.symbol), toIRType(tree.tpe), genStatOrExpr(expr, isStat)) - - case Return(expr, from) => - val fromSym = from.symbol - val label = - if (fromSym.is(Label)) encodeLabelSym(fromSym) - else localNames.get.getEnclosingReturnLabel() - js.Return(toIRType(expr.tpe) match { - case jstpe.NoType => js.Block(genStat(expr), js.Undefined()) - case _ => genExpr(expr) - }, label) - - case WhileDo(cond, body) => - val genCond = - if (cond == EmptyTree) js.BooleanLiteral(true) - else genExpr(cond) - js.While(genCond, genStat(body)) - - case t: Try => - genTry(t, isStat) - - case app: Apply => - genApply(app, isStat) - - case app: TypeApply => - genTypeApply(app) - - /*case app: ApplyDynamic => - genApplyDynamic(app)*/ - - case tree: This => - val currentClass = currentClassSym.get - val symIsModuleClass = tree.symbol.is(ModuleClass) - assert(tree.symbol == currentClass || symIsModuleClass, - s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $currentClass") - if (symIsModuleClass && tree.symbol != currentClass) - genLoadModule(tree.symbol) - else - genThis() - - case Select(qualifier, _) => - val sym = tree.symbol - if (sym.is(Module)) { - assert(!sym.is(Package), "Cannot use package as value: " + tree) - genLoadModule(sym) - } else if (sym.is(JavaStatic)) { - genLoadStaticField(sym) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - genJSNativeMemberSelect(tree) - } else { - val (field, boxed) = genAssignableField(sym, qualifier) - if (boxed) unbox(field, atPhase(elimErasedValueTypePhase)(sym.info)) - else field - } - - case tree: Ident => - desugarIdent(tree).fold[js.Tree] { - val sym = tree.symbol - assert(!sym.is(Package), "Cannot use package as value: " + tree) - if (sym.is(Module)) { - genLoadModule(sym) - } else if (undefinedDefaultParams.contains(sym)) { - /* This is a default parameter whose assignment was moved to - * a local variable. Put an undefined param instead. - */ - js.Transient(UndefinedParam) - } else { - genVarRef(sym) - } - } { select => - genStatOrExpr(select, isStat) - } - - case Literal(value) => - import Constants._ - value.tag match { - case UnitTag => - js.Skip() - case BooleanTag => - js.BooleanLiteral(value.booleanValue) - case ByteTag => - js.ByteLiteral(value.byteValue) - case ShortTag => - js.ShortLiteral(value.shortValue) - case CharTag => - js.CharLiteral(value.charValue) - case IntTag => - js.IntLiteral(value.intValue) - case LongTag => - js.LongLiteral(value.longValue) - case FloatTag => - js.FloatLiteral(value.floatValue) - case DoubleTag => - js.DoubleLiteral(value.doubleValue) - case StringTag => - js.StringLiteral(value.stringValue) - case NullTag => - js.Null() - case ClazzTag => - genClassConstant(value.typeValue) - } - - case Block(stats, expr) => - // #15419 Collapse { ; BoxedUnit } to - val genStatsAndExpr0 = stats.map(genStat(_)) :+ genStatOrExpr(expr, isStat) - val genStatsAndExpr = genStatsAndExpr0 match { - case (undefParam @ js.Transient(UndefinedParam)) :: js.Undefined() :: Nil => - undefParam :: Nil - case _ => - genStatsAndExpr0 - } - js.Block(genStatsAndExpr) - - case Typed(expr, _) => - expr match { - case _: Super => genThis() - case _ => genExpr(expr) - } - - case Assign(lhs0, rhs) => - val sym = lhs0.symbol - if (sym.is(JavaStaticTerm) && sym.source != ctx.compilationUnit.source) - throw new FatalError(s"Assignment to static member ${sym.fullName} not supported") - def genRhs = genExpr(rhs) - val lhs = lhs0 match { - case lhs: Ident => desugarIdent(lhs).getOrElse(lhs) - case lhs => lhs - } - lhs match { - case lhs: Select => - val qualifier = lhs.qualifier - - def ctorAssignment = ( - currentMethodSym.get.name == nme.CONSTRUCTOR && - currentMethodSym.get.owner == qualifier.symbol && - qualifier.isInstanceOf[This] - ) - // TODO This fails for OFFSET$x fields. Re-enable when we can. - /*if (!sym.is(Mutable) && !ctorAssignment) - throw new FatalError(s"Assigning to immutable field ${sym.fullName} at $pos")*/ - - if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - /* This is an assignment to a @js.native field. Since we reject - * `@js.native var`s as compile errors, this can only happen in - * the constructor of the enclosing object. - * We simply ignore the assignment, since the field will not be - * emitted at all. - */ - js.Skip() - } else { - val (field, boxed) = genAssignableField(sym, qualifier) - if (boxed) { - val genBoxedRhs = box(genRhs, atPhase(elimErasedValueTypePhase)(sym.info)) - js.Assign(field, genBoxedRhs) - } else { - js.Assign(field, genRhs) - } - } - - case _ => - js.Assign(genVarRef(sym), genRhs) - } - - /** Array constructor */ - case javaSeqLiteral: JavaSeqLiteral => - genJavaSeqLiteral(javaSeqLiteral) - - /** A Match reaching the backend is supposed to be optimized as a switch */ - case mtch: Match => - genMatch(mtch, isStat) - - case tree: Closure => - genClosure(tree) - - case EmptyTree => - js.Skip() - - case _ => - throw new FatalError("Unexpected tree in genExpr: " + - tree + "/" + tree.getClass + " at: " + (tree.span: Position)) - } - } // end of genStatOrExpr() - - private def qualifierOf(fun: Tree): Tree = fun match { - case fun: Ident => - fun.tpe match { - case TermRef(prefix: TermRef, _) => tpd.ref(prefix) - case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) - } - case Select(qualifier, _) => - qualifier - case TypeApply(fun, _) => - qualifierOf(fun) - } - - /** Gen JS this of the current class. - * Normally encoded straightforwardly as a JS this. - * But must be replaced by the `thisLocalVarIdent` local variable if there - * is one. - */ - private def genThis()(implicit pos: Position): js.Tree = { - /*if (tryingToGenMethodAsJSFunction) { - throw new CancelGenMethodAsJSFunction( - "Trying to generate `this` inside the body") - }*/ - - thisLocalVarIdent.fold[js.Tree] { - js.This()(currentThisType) - } { thisLocalIdent => - js.VarRef(thisLocalIdent)(currentThisType) - } - } - - /** Gen IR code for a `try..catch` or `try..finally` block. - * - * `try..finally` blocks are compiled straightforwardly to `try..finally` - * blocks of the IR. - * - * `try..catch` blocks are a bit more subtle, as the IR does not have - * type-based selection of exceptions to catch. We thus encode explicitly - * the type tests, like in: - * - * ``` - * try { ... } - * catch (e) { - * if (e.isInstanceOf[IOException]) { ... } - * else if (e.isInstanceOf[Exception]) { ... } - * else { - * throw e; // default, re-throw - * } - * } - * ``` - * - * In addition, there are provisions to handle catching JavaScript - * exceptions (which do not extend `Throwable`) as wrapped in a - * `js.JavaScriptException`. - */ - private def genTry(tree: Try, isStat: Boolean): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - val Try(block, catches, finalizer) = tree - - val blockAST = genStatOrExpr(block, isStat) - - val resultType = - if (isStat) jstpe.NoType - else toIRType(tree.tpe) - - val handled = - if (catches.isEmpty) blockAST - else genTryCatch(blockAST, catches, resultType, isStat) - - genStat(finalizer) match { - case js.Skip() => handled - case ast => js.TryFinally(handled, ast) - } - } - - private def genTryCatch(body: js.Tree, catches: List[CaseDef], - resultType: jstpe.Type, - isStat: Boolean)(implicit pos: SourcePosition): js.Tree = { - val exceptIdent = freshLocalIdent("e") - val origExceptVar = js.VarRef(exceptIdent)(jstpe.AnyType) - - val mightCatchJavaScriptException = catches.exists { caseDef => - caseDef.pat match { - case Typed(Ident(nme.WILDCARD), tpt) => - isMaybeJavaScriptException(tpt.tpe) - case Ident(nme.WILDCARD) => - true - case pat @ Bind(_, _) => - isMaybeJavaScriptException(pat.symbol.info) - } - } - - val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { - val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, - encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) - (valDef, valDef.ref) - } else { - (js.Skip(), origExceptVar) - } - - val elseHandler: js.Tree = js.Throw(origExceptVar) - - val handler = catches.foldRight(elseHandler) { (caseDef, elsep) => - implicit val pos: SourcePosition = caseDef.sourcePos - val CaseDef(pat, _, body) = caseDef - - // Extract exception type and variable - val (tpe, boundVar) = (pat match { - case Typed(Ident(nme.WILDCARD), tpt) => - (tpt.tpe, None) - case Ident(nme.WILDCARD) => - (defn.ThrowableType, None) - case Bind(_, _) => - val ident = encodeLocalSym(pat.symbol) - val origName = originalNameOfLocal(pat.symbol) - (pat.symbol.info, Some(ident, origName)) - }) - - // Generate the body that must be executed if the exception matches - val bodyWithBoundVar = (boundVar match { - case None => - genStatOrExpr(body, isStat) - case Some((boundVarIdent, boundVarOriginalName)) => - val castException = genAsInstanceOf(exceptVar, tpe) - js.Block( - js.VarDef(boundVarIdent, boundVarOriginalName, toIRType(tpe), - mutable = false, castException), - genStatOrExpr(body, isStat)) - }) - - // Generate the test - if (tpe =:= defn.ThrowableType) { - bodyWithBoundVar - } else { - val cond = genIsInstanceOf(exceptVar, tpe) - js.If(cond, bodyWithBoundVar, elsep)(resultType) - } - } - - js.TryCatch(body, exceptIdent, NoOriginalName, - js.Block(exceptValDef, handler))(resultType) - } - - /** Gen JS code for an Apply node (method call) - * - * There's a whole bunch of varieties of Apply nodes: regular method - * calls, super calls, constructor calls, isInstanceOf/asInstanceOf, - * primitives, JS calls, etc. They are further dispatched in here. - */ - private def genApply(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val args = tree.args - val sym = tree.fun.symbol - - /* Is the method a JS default accessor, which should become an - * `UndefinedParam` rather than being compiled normally. - * - * This is true iff one of the following conditions apply: - * - It is a constructor default param for the constructor of a JS class. - * - It is a default param of an instance method of a native JS type. - * - It is a default param of an instance method of a non-native JS type - * and the attached method is exposed. - * - It is a default param for a native JS def. - * - * This is different than `isIgnorableDefaultParam` in - * `genMethodWithCurrentLocalNameScope`: we include here the default - * accessors of *non-native* JS types (unless the corresponding methods are - * not exposed). We also need to handle non-constructor members of native - * JS types. - */ - def isJSDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && { - val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { - /* This is a default accessor for a constructor parameter. Check - * whether the attached constructor is a JS constructor, which is - * the case iff the linked class is a JS type. - */ - info.constructorOwner.isJSType - } else { - if (sym.owner.isJSType) { - /* The default accessor is in a JS type. It is a JS default - * param iff the enclosing class is native or the attached method - * is exposed. - */ - !sym.owner.isNonNativeJSClass || info.attachedMethod.isJSExposed - } else { - /* The default accessor is in a Scala type. It is a JS default - * param iff the attached method is a native JS def. This can - * only happen if the owner is a module class, which we test - * first as a fast way out. - */ - sym.owner.is(ModuleClass) && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } - } - } - - tree.fun match { - case _ if isJSDefaultParam => - js.Transient(UndefinedParam) - - case Select(Super(_, _), _) => - genSuperCall(tree, isStat) - - case Select(New(_), nme.CONSTRUCTOR) => - genApplyNew(tree) - - case _ => - if (primitives.isPrimitive(tree)) { - genPrimitiveOp(tree, isStat) - } else if (Erasure.Boxing.isBox(sym)) { - // Box a primitive value (cannot be Unit) - val arg = args.head - makePrimitiveBox(genExpr(arg), arg.tpe) - } else if (Erasure.Boxing.isUnbox(sym)) { - // Unbox a primitive value (cannot be Unit) - val arg = args.head - makePrimitiveUnbox(genExpr(arg), tree.tpe) - } else { - genNormalApply(tree, isStat) - } - } - } - - /** Gen JS code for a super call, of the form Class.super[mix].fun(args). - * - * This does not include calls defined in mixin traits, as these are - * already desugared by the 'mixin' phase. Only calls to super classes - * remain. - * - * Since a class has exactly one direct superclass, and calling a method - * two classes above the current one is invalid in Scala, the `mix` item is - * irrelevant. - */ - private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked - val sym = fun.symbol - - if (sym == defn.Any_getClass) { - // The only primitive that is also callable as super call - js.GetClass(genThis()) - } else if (currentClassSym.isNonNativeJSClass) { - genJSSuperCall(tree, isStat) - } else { - /* #3013 `qual` can be `this.$outer()` in some cases since Scala 2.12, - * so we call `genExpr(qual)`, not just `genThis()`. - */ - val superCall = genApplyMethodStatically( - genExpr(qual), sym, genActualArgs(sym, args)) - - // Initialize the module instance just after the super constructor call. - if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && - currentMethodSym.get.isClassConstructor) { - isModuleInitialized.get.value = true - val className = encodeClassName(currentClassSym) - val thisType = jstpe.ClassType(className) - val initModule = js.StoreModule(className, js.This()(thisType)) - js.Block(superCall, initModule) - } else { - superCall - } - } - } - - /** Gen JS code for a constructor call (new). - * Further refined into: - * * new String(...) - * * new of a hijacked boxed class - * * new of an anonymous function class that was recorded as JS function - * * new of a raw JS class - * * new Array - * * regular new - */ - private def genApplyNew(tree: Apply): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - val Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) = tree: @unchecked - val ctor = fun.symbol - val tpe = tpt.tpe - - assert(ctor.isClassConstructor, - "'new' call to non-constructor: " + ctor.name) - - val clsSym = tpe.typeSymbol - - if (isHijackedClass(clsSym)) { - genNewHijackedClass(clsSym, ctor, args.map(genExpr)) - } else /*if (translatedAnonFunctions contains tpe.typeSymbol) { - val functionMaker = translatedAnonFunctions(tpe.typeSymbol) - functionMaker(args map genExpr) - } else*/ if (clsSym.isJSType) { - genNewJSClass(tree) - } else { - toTypeRef(tpe) match { - case jstpe.ClassRef(className) => - js.New(className, encodeMethodSym(ctor), genActualArgs(ctor, args)) - - case other => - throw new FatalError(s"Non ClassRef cannot be instantiated: $other") - } - } - } - - /** Gen JS code for a call to a constructor of a hijacked class. - * Reroute them to the `new` method with the same signature in the - * companion object. - */ - private def genNewHijackedClass(clazz: Symbol, ctor: Symbol, - args: List[js.Tree])(implicit pos: SourcePosition): js.Tree = { - - val className = encodeClassName(clazz) - val initName = encodeMethodSym(ctor).name - val newName = MethodName(newSimpleMethodName, initName.paramTypeRefs, - jstpe.ClassRef(className)) - val newMethodIdent = js.MethodIdent(newName) - - js.ApplyStatic(js.ApplyFlags.empty, className, newMethodIdent, args)( - jstpe.ClassType(className)) - } - - /** Gen JS code for a new of a JS class (subclass of `js.Any`). */ - private def genNewJSClass(tree: Apply): js.Tree = { - acquireContextualJSClassValue { jsClassValue => - implicit val pos: Position = tree.span - - val Apply(fun @ Select(New(tpt), _), args) = tree: @unchecked - val cls = tpt.tpe.typeSymbol - val ctor = fun.symbol - - val nestedJSClass = cls.isNestedJSClass - assert(jsClassValue.isDefined == nestedJSClass, - s"$cls at $pos: jsClassValue.isDefined = ${jsClassValue.isDefined} " + - s"but isInnerNonNativeJSClass = $nestedJSClass") - - def genArgs: List[js.TreeOrJSSpread] = genActualJSArgs(ctor, args) - def genArgsAsClassCaptures: List[js.Tree] = args.map(genExpr) - - jsClassValue.fold { - // Static JS class (by construction, it cannot be a module class, as their News do not reach the back-end) - if (cls == jsdefn.JSObjectClass && args.isEmpty) - js.JSObjectConstr(Nil) - else if (cls == jsdefn.JSArrayClass && args.isEmpty) - js.JSArrayConstr(Nil) - else - js.JSNew(genLoadJSConstructor(cls), genArgs) - } { jsClassVal => - // Nested JS class - if (cls.isAnonymousClass) - genNewAnonJSClass(cls, jsClassVal, genArgsAsClassCaptures)(fun.span) - else if (atPhase(erasurePhase)(cls.is(ModuleClass))) // LambdaLift removes the ModuleClass flag of lifted classes - js.JSNew(js.CreateJSClass(encodeClassName(cls), jsClassVal :: genArgsAsClassCaptures), Nil) - else - js.JSNew(jsClassVal, genArgs) - } - } - } - - /** Generate an instance of an anonymous (non-lambda) JS class inline - * - * @param sym Class to generate the instance of - * @param jsSuperClassValue JS class value of the super class - * @param args Arguments to the Scala constructor, which map to JS class captures - * @param pos Position of the original New tree - */ - private def genNewAnonJSClass(sym: Symbol, jsSuperClassValue: js.Tree, args: List[js.Tree])( - implicit pos: Position): js.Tree = { - assert(sym.isAnonymousClass, - s"Generating AnonJSClassNew of non anonymous JS class ${sym.fullName}") - - // Find the TypeDef for this anonymous class and generate it - val typeDef = consumeLazilyGeneratedAnonClass(sym) - val originalClassDef = resetAllScopedVars { - withScopedVars( - currentClassSym := sym - ) { - genNonNativeJSClass(typeDef) - } - } - - // Partition class members. - val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] - val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] - val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSConstructorDef] = None - - originalClassDef.memberDefs.foreach { - case fdef: js.FieldDef => - privateFieldDefs += fdef - - case fdef: js.JSFieldDef => - instanceMembers += fdef - - case mdef: js.MethodDef => - assert(mdef.flags.namespace.isStatic, - "Non-static, unexported method in non-native JS class") - classDefMembers += mdef - - case cdef: js.JSConstructorDef => - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(cdef) - - case mdef: js.JSMethodDef => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - - case property: js.JSPropertyDef => - instanceMembers += property - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } - - assert(originalClassDef.topLevelExportDefs.isEmpty, - "Found top-level exports in anonymous JS class at " + pos) - - // Make new class def with static members - val newClassDef = { - implicit val pos = originalClassDef.pos - val parent = js.ClassIdent(jsNames.ObjectClass) - js.ClassDef(originalClassDef.name, originalClassDef.originalName, - ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, - jsSuperClass = None, jsNativeLoadSpec = None, - classDefMembers.toList, Nil)( - originalClassDef.optimizerHints) - } - - generatedClasses += newClassDef - - // Construct inline class definition - - val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { - throw new AssertionError(s"no class captures for anonymous JS class at $pos") - } - val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { - throw new AssertionError("No ctor found") - } - assert(ctorParams.isEmpty && ctorRestParam.isEmpty, - s"non-empty constructor params for anonymous JS class at $pos") - - /* The first class capture is always a reference to the super class. - * This is enforced by genJSClassCapturesAndConstructor. - */ - def jsSuperClassRef(implicit pos: ir.Position): js.VarRef = - jsClassCaptures.head.ref - - /* The `this` reference. - * FIXME This could clash with a local variable of the constructor or a JS - * class capture. It seems Scala 2 has the same vulnerability. How do we - * avoid this? - */ - val selfName = freshLocalIdent("this")(pos) - def selfRef(implicit pos: ir.Position) = - js.VarRef(selfName)(jstpe.AnyType) - - def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = - js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) - - val memberDefinitions0 = instanceMembers.toList.map { - case fdef: js.FieldDef => - throw new AssertionError("unexpected FieldDef") - - case fdef: js.JSFieldDef => - implicit val pos = fdef.pos - js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) - - case mdef: js.MethodDef => - throw new AssertionError("unexpected MethodDef") - - case cdef: js.JSConstructorDef => - throw new AssertionError("unexpected JSConstructorDef") - - case mdef: js.JSMethodDef => - implicit val pos = mdef.pos - val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) - js.Assign(js.JSSelect(selfRef, mdef.name), impl) - - case pdef: js.JSPropertyDef => - implicit val pos = pdef.pos - val optGetter = pdef.getterBody.map { body => - js.StringLiteral("get") -> memberLambda(params = Nil, restParam = None, body) - } - val optSetter = pdef.setterArgAndBody.map { case (arg, body) => - js.StringLiteral("set") -> memberLambda(params = arg :: Nil, restParam = None, body) - } - val descriptor = js.JSObjectConstr( - optGetter.toList ::: - optSetter.toList ::: - List(js.StringLiteral("configurable") -> js.BooleanLiteral(true)) - ) - js.JSMethodApply(js.JSGlobalRef("Object"), - js.StringLiteral("defineProperty"), - List(selfRef, pdef.name, descriptor)) - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } - - val memberDefinitions = if (privateFieldDefs.isEmpty) { - memberDefinitions0 - } else { - /* Private fields, declared in FieldDefs, are stored in a separate - * object, itself stored as a non-enumerable field of the `selfRef`. - * The name of that field is retrieved at - * `scala.scalajs.runtime.privateFieldsSymbol()`, and is a Symbol if - * supported, or a randomly generated string that has the same enthropy - * as a UUID (i.e., 128 random bits). - * - * This encoding solves two issues: - * - * - Hide private fields in anonymous JS classes from `JSON.stringify` - * and other cursory inspections in JS (#2748). - * - Get around the fact that abstract JS types cannot declare - * FieldDefs (#3777). - */ - val fieldsObjValue = { - js.JSObjectConstr(privateFieldDefs.toList.map { fdef => - implicit val pos = fdef.pos - js.StringLiteral(fdef.name.name.nameString) -> jstpe.zeroOf(fdef.ftpe) - }) - } - val definePrivateFieldsObj = { - /* Object.defineProperty(selfRef, privateFieldsSymbol, { - * value: fieldsObjValue - * }); - * - * `writable`, `configurable` and `enumerable` are false by default. - */ - js.JSMethodApply( - js.JSGlobalRef("Object"), - js.StringLiteral("defineProperty"), - List( - selfRef, - genPrivateFieldsSymbol()(using sym.sourcePos), - js.JSObjectConstr(List( - js.StringLiteral("value") -> fieldsObjValue - )) - ) - ) - } - definePrivateFieldsObj :: memberDefinitions0 - } - - // Transform the constructor body. - val inlinedCtorStats: List[js.Tree] = { - val beforeSuper = ctorBody.beforeSuper - - val superCall = { - implicit val pos = ctorBody.superCall.pos - val js.JSSuperConstructorCall(args) = ctorBody.superCall - - val newTree = { - val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) - if (args.isEmpty && ident.name == JSObjectClassName) - js.JSObjectConstr(Nil) - else - js.JSNew(jsSuperClassRef, args) - } - - val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) - selfVarDef :: memberDefinitions - } - - // After the super call, substitute `selfRef` for `This()` - val afterSuper = new ir.Transformers.Transformer { - override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { - case js.This() => - selfRef(tree.pos) - - // Don't traverse closure boundaries - case closure: js.Closure => - val newCaptureValues = closure.captureValues.map(transformExpr) - closure.copy(captureValues = newCaptureValues)(closure.pos) - - case tree => - super.transform(tree, isStat) - } - }.transformStats(ctorBody.afterSuper) - - beforeSuper ::: superCall ::: afterSuper - } - - val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, - js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) - js.JSFunctionApply(closure, Nil) - } - - /** Gen JS code for a primitive method call. */ - private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val Apply(fun, args) = tree - val receiver = qualifierOf(fun) - - val code = primitives.getPrimitive(tree, receiver.tpe) - - if (isArithmeticOp(code) || isLogicalOp(code) || isComparisonOp(code)) - genSimpleOp(tree, receiver :: args, code) - else if (code == CONCAT) - genStringConcat(tree, receiver, args) - else if (code == HASH) - genScalaHash(tree, receiver) - else if (isArrayOp(code)) - genArrayOp(tree, code) - else if (code == SYNCHRONIZED) - genSynchronized(tree, isStat) - else if (isCoercion(code)) - genCoercion(tree, receiver, code) - else if (code == JSPrimitives.THROW) - genThrow(tree, args) - else if (JSPrimitives.isJSPrimitive(code)) - genJSPrimitive(tree, args, code, isStat) - else - throw new FatalError(s"Unknown primitive: ${tree.symbol.fullName} at: $pos") - } - - /** Gen JS code for a simple operation (arithmetic, logical, or comparison) */ - private def genSimpleOp(tree: Apply, args: List[Tree], code: Int): js.Tree = { - args match { - case List(arg) => genSimpleUnaryOp(tree, arg, code) - case List(lhs, rhs) => genSimpleBinaryOp(tree, lhs, rhs, code) - case _ => throw new FatalError("Incorrect arity for primitive") - } - } - - /** Gen JS code for a simple unary operation. */ - private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val resultIRType = toIRType(tree.tpe) - val genArg = adaptPrimitive(genExpr(arg), resultIRType) - - (code: @switch) match { - case POS => - genArg - - case NEG => - (resultIRType: @unchecked) match { - case jstpe.IntType => - js.BinaryOp(js.BinaryOp.Int_-, js.IntLiteral(0), genArg) - case jstpe.LongType => - js.BinaryOp(js.BinaryOp.Long_-, js.LongLiteral(0), genArg) - case jstpe.FloatType => - js.BinaryOp(js.BinaryOp.Float_*, js.FloatLiteral(-1.0f), genArg) - case jstpe.DoubleType => - js.BinaryOp(js.BinaryOp.Double_*, js.DoubleLiteral(-1.0), genArg) - } - - case NOT => - (resultIRType: @unchecked) match { - case jstpe.IntType => - js.BinaryOp(js.BinaryOp.Int_^, js.IntLiteral(-1), genArg) - case jstpe.LongType => - js.BinaryOp(js.BinaryOp.Long_^, js.LongLiteral(-1), genArg) - } - - case ZNOT => - js.UnaryOp(js.UnaryOp.Boolean_!, genArg) - - case _ => - throw new FatalError("Unknown unary operation code: " + code) - } - } - - /** Gen JS code for a simple binary operation. */ - private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos: SourcePosition = tree.sourcePos - - val lhsIRType = toIRType(lhs.tpe) - val rhsIRType = toIRType(rhs.tpe) - - val isShift = isShiftOp(code) - - val opType = { - if (isShift) { - if (lhsIRType == jstpe.LongType) jstpe.LongType - else jstpe.IntType - } else { - (lhsIRType, rhsIRType) match { - case (jstpe.DoubleType, _) | (_, jstpe.DoubleType) => jstpe.DoubleType - case (jstpe.FloatType, _) | (_, jstpe.FloatType) => jstpe.FloatType - case (jstpe.LongType, _) | (_, jstpe.LongType) => jstpe.LongType - case (jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType, _) => jstpe.IntType - case (_, jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType) => jstpe.IntType - case (jstpe.BooleanType, _) | (_, jstpe.BooleanType) => jstpe.BooleanType - case _ => jstpe.AnyType - } - } - } - - val lsrc = - if (opType == jstpe.AnyType) genExpr(lhs) - else adaptPrimitive(genExpr(lhs), opType) - val rsrc = - if (opType == jstpe.AnyType) genExpr(rhs) - else adaptPrimitive(genExpr(rhs), if (isShift) jstpe.IntType else opType) - - if (opType == jstpe.AnyType && isUniversalEqualityOp(code)) { - genUniversalEqualityOp(lhs.tpe, rhs.tpe, lsrc, rsrc, code) - } else if (code == ZOR) { - js.If(lsrc, js.BooleanLiteral(true), rsrc)(jstpe.BooleanType) - } else if (code == ZAND) { - js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) - } else { - import js.BinaryOp._ - - (opType: @unchecked) match { - case jstpe.IntType => - val op = (code: @switch) match { - case ADD => Int_+ - case SUB => Int_- - case MUL => Int_* - case DIV => Int_/ - case MOD => Int_% - case OR => Int_| - case AND => Int_& - case XOR => Int_^ - case LSL => Int_<< - case LSR => Int_>>> - case ASR => Int_>> - - case EQ => Int_== - case NE => Int_!= - case LT => Int_< - case LE => Int_<= - case GT => Int_> - case GE => Int_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.FloatType => - def withFloats(op: Int): js.Tree = - js.BinaryOp(op, lsrc, rsrc) - - def toDouble(value: js.Tree): js.Tree = - js.UnaryOp(js.UnaryOp.FloatToDouble, value) - - def withDoubles(op: Int): js.Tree = - js.BinaryOp(op, toDouble(lsrc), toDouble(rsrc)) - - (code: @switch) match { - case ADD => withFloats(Float_+) - case SUB => withFloats(Float_-) - case MUL => withFloats(Float_*) - case DIV => withFloats(Float_/) - case MOD => withFloats(Float_%) - - case EQ => withDoubles(Double_==) - case NE => withDoubles(Double_!=) - case LT => withDoubles(Double_<) - case LE => withDoubles(Double_<=) - case GT => withDoubles(Double_>) - case GE => withDoubles(Double_>=) - } - - case jstpe.DoubleType => - val op = (code: @switch) match { - case ADD => Double_+ - case SUB => Double_- - case MUL => Double_* - case DIV => Double_/ - case MOD => Double_% - - case EQ => Double_== - case NE => Double_!= - case LT => Double_< - case LE => Double_<= - case GT => Double_> - case GE => Double_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.LongType => - val op = (code: @switch) match { - case ADD => Long_+ - case SUB => Long_- - case MUL => Long_* - case DIV => Long_/ - case MOD => Long_% - case OR => Long_| - case XOR => Long_^ - case AND => Long_& - case LSL => Long_<< - case LSR => Long_>>> - case ASR => Long_>> - - case EQ => Long_== - case NE => Long_!= - case LT => Long_< - case LE => Long_<= - case GT => Long_> - case GE => Long_>= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.BooleanType => - val op = (code: @switch) match { - case EQ => Boolean_== - case NE => Boolean_!= - case OR => Boolean_| - case AND => Boolean_& - case XOR => Boolean_!= - } - js.BinaryOp(op, lsrc, rsrc) - - case jstpe.AnyType => - val op = code match { - case ID => === - case NI => !== - } - js.BinaryOp(op, lsrc, rsrc) - } - } - } - - private def adaptPrimitive(value: js.Tree, to: jstpe.Type)( - implicit pos: Position): js.Tree = { - genConversion(value.tpe, to, value) - } - - /* This method corresponds to the method of the same name in - * BCodeBodyBuilder of the JVM back-end. It ends up calling the method - * BCodeIdiomatic.emitT2T, whose logic we replicate here. - */ - private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( - implicit pos: Position): js.Tree = { - import js.UnaryOp._ - - if (from == to || from == jstpe.NothingType) { - value - } else if (from == jstpe.BooleanType || to == jstpe.BooleanType) { - throw new AssertionError(s"Invalid genConversion from $from to $to") - } else { - def intValue = (from: @unchecked) match { - case jstpe.IntType => value - case jstpe.CharType => js.UnaryOp(CharToInt, value) - case jstpe.ByteType => js.UnaryOp(ByteToInt, value) - case jstpe.ShortType => js.UnaryOp(ShortToInt, value) - case jstpe.LongType => js.UnaryOp(LongToInt, value) - case jstpe.FloatType => js.UnaryOp(DoubleToInt, js.UnaryOp(FloatToDouble, value)) - case jstpe.DoubleType => js.UnaryOp(DoubleToInt, value) - } - - def doubleValue = from match { - case jstpe.DoubleType => value - case jstpe.FloatType => js.UnaryOp(FloatToDouble, value) - case jstpe.LongType => js.UnaryOp(LongToDouble, value) - case _ => js.UnaryOp(IntToDouble, intValue) - } - - (to: @unchecked) match { - case jstpe.CharType => - js.UnaryOp(IntToChar, intValue) - case jstpe.ByteType => - js.UnaryOp(IntToByte, intValue) - case jstpe.ShortType => - js.UnaryOp(IntToShort, intValue) - case jstpe.IntType => - intValue - case jstpe.LongType => - from match { - case jstpe.FloatType | jstpe.DoubleType => - js.UnaryOp(DoubleToLong, doubleValue) - case _ => - js.UnaryOp(IntToLong, intValue) - } - case jstpe.FloatType => - if (from == jstpe.LongType) - js.UnaryOp(js.UnaryOp.LongToFloat, value) - else - js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) - case jstpe.DoubleType => - doubleValue - } - } - } - - /** Gen JS code for a universal equality test. */ - private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( - implicit pos: SourcePosition): js.Tree = { - - import dotty.tools.backend.ScalaPrimitivesOps._ - - val bypassEqEq = { - // Do not call equals if we have a literal null at either side. - lhs.isInstanceOf[js.Null] || - rhs.isInstanceOf[js.Null] - } - - if (bypassEqEq) { - js.BinaryOp( - if (code == EQ) js.BinaryOp.=== else js.BinaryOp.!==, - lhs, rhs) - } else { - val body = genEqEqPrimitive(ltpe, rtpe, lhs, rhs) - if (code == EQ) body - else js.UnaryOp(js.UnaryOp.Boolean_!, body) - } - } - - private lazy val externalEqualsNumNum: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) - private lazy val externalEqualsNumChar: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumChar) - private lazy val externalEqualsNumObject: Symbol = - defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) - private lazy val externalEquals: Symbol = - defn.BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol - - /** Gen JS code for a call to Any.== */ - private def genEqEqPrimitive(ltpe: Type, rtpe: Type, lsrc: js.Tree, rsrc: js.Tree)( - implicit pos: SourcePosition): js.Tree = { - report.debuglog(s"$ltpe == $rtpe") - val lsym = ltpe.typeSymbol.asClass - val rsym = rtpe.typeSymbol.asClass - - /* True if the equality comparison is between values that require the - * use of the rich equality comparator - * (scala.runtime.BoxesRunTime.equals). - * This is the case when either side of the comparison might have a - * run-time type subtype of java.lang.Number or java.lang.Character, - * **which includes when either is a JS type**. - * When it is statically known that both sides are equal and subtypes of - * Number or Character, not using the rich equality is possible (their - * own equals method will do ok), except for java.lang.Float and - * java.lang.Double: their `equals` have different behavior around `NaN` - * and `-0.0`, see Javadoc (scala-dev#329, scala-js#2799). - */ - val mustUseAnyComparator: Boolean = { - lsym.isJSType || rsym.isJSType || { - val p = ctx.platform - p.isMaybeBoxed(lsym) && p.isMaybeBoxed(rsym) && { - val areSameFinals = lsym.is(Final) && rsym.is(Final) && (ltpe =:= rtpe) - !areSameFinals || lsym == defn.BoxedFloatClass || lsym == defn.BoxedDoubleClass - } - } - } - - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { - val ptfm = ctx.platform - if (lsym.derivesFrom(defn.BoxedNumberClass)) { - if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum - else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumChar - else externalEqualsNumObject - } else externalEquals - } - genApplyStatic(equalsMethod, List(lsrc, rsrc)) - } else { - // if (lsrc eq null) rsrc eq null else lsrc.equals(rsrc) - if (lsym == defn.StringClass) { - // String.equals(that) === (this eq that) - js.BinaryOp(js.BinaryOp.===, lsrc, rsrc) - } else { - /* This requires to evaluate both operands in local values first. - * The optimizer will eliminate them if possible. - */ - val ltemp = js.VarDef(freshLocalIdent(), NoOriginalName, lsrc.tpe, mutable = false, lsrc) - val rtemp = js.VarDef(freshLocalIdent(), NoOriginalName, rsrc.tpe, mutable = false, rsrc) - js.Block( - ltemp, - rtemp, - js.If(js.BinaryOp(js.BinaryOp.===, ltemp.ref, js.Null()), - js.BinaryOp(js.BinaryOp.===, rtemp.ref, js.Null()), - genApplyMethod(ltemp.ref, defn.Any_equals, List(rtemp.ref)))( - jstpe.BooleanType)) - } - } - } - - /** Gen JS code for string concatenation. - */ - private def genStringConcat(tree: Apply, receiver: Tree, - args: List[Tree]): js.Tree = { - implicit val pos = tree.span - - js.BinaryOp(js.BinaryOp.String_+, genExpr(receiver), genExpr(args.head)) - } - - /** Gen JS code for a call to Any.## */ - private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - genModuleApplyMethod(defn.ScalaRuntimeModule.requiredMethod(nme.hash_), - List(genExpr(receiver))) - } - - /** Gen JS code for an array operation (get, set or length) */ - private def genArrayOp(tree: Tree, code: Int): js.Tree = { - import dotty.tools.backend.ScalaPrimitivesOps._ - - implicit val pos = tree.span - - val Apply(fun, args) = tree: @unchecked - val arrayObj = qualifierOf(fun) - - val genArray = genExpr(arrayObj) - val genArgs = args.map(genExpr) - - def elementType: Type = arrayObj.tpe.widenDealias match { - case defn.ArrayOf(el) => el - case JavaArrayType(el) => el - case tpe => - val msg = em"expected Array $tpe" - report.error(msg) - ErrorType(msg) - } - - def genSelect(): js.AssignLhs = - js.ArraySelect(genArray, genArgs(0))(toIRType(elementType)) - - if (isArrayGet(code)) { - // get an item of the array - assert(args.length == 1, - s"Array get requires 1 argument, found ${args.length} in $tree") - genSelect() - } else if (isArraySet(code)) { - // set an item of the array - assert(args.length == 2, - s"Array set requires 2 arguments, found ${args.length} in $tree") - js.Assign(genSelect(), genArgs(1)) - } else { - // length of the array - js.ArrayLength(genArray) - } - } - - /** Gen JS code for a call to AnyRef.synchronized */ - private def genSynchronized(tree: Apply, isStat: Boolean): js.Tree = { - /* JavaScript is single-threaded, so we can drop the - * synchronization altogether. - */ - val Apply(fun, List(arg)) = tree - val receiver = qualifierOf(fun) - - val genReceiver = genExpr(receiver) - val genArg = genStatOrExpr(arg, isStat) - - genReceiver match { - case js.This() => - // common case for which there is no side-effect nor NPE - genArg - case _ => - implicit val pos = tree.span - js.Block( - js.If(js.BinaryOp(js.BinaryOp.===, genReceiver, js.Null()), - js.Throw(js.New(NullPointerExceptionClass, js.MethodIdent(jsNames.NoArgConstructorName), Nil)), - js.Skip())(jstpe.NoType), - genArg) - } - } - - /** Gen JS code for a coercion */ - private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = { - implicit val pos = tree.span - - val source = genExpr(receiver) - val resultType = toIRType(tree.tpe) - adaptPrimitive(source, resultType) - } - - /** Gen a call to the special `throw` method. */ - private def genThrow(tree: Apply, args: List[Tree]): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - val exception = args.head - val genException = genExpr(exception) - genException match { - case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => - // Common case where ex is neither null nor a js.JavaScriptException - js.Throw(genException) - case _ => - js.Throw(js.UnwrapFromThrowable(genException)) - } - } - - /** Gen a "normal" apply (to a true method). - * - * But even these are further refined into: - * * Methods of java.lang.String, which are redirected to the - * RuntimeString trait implementation. - * * Calls to methods of raw JS types (Scala.js -> JS interop) - * * Calls to methods in impl classes of Scala2 traits. - * * Regular method call - */ - private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - - val fun = tree.fun match { - case fun: Ident => desugarIdent(fun).get - case fun: Select => fun - } - val receiver = fun.qualifier - val args = tree.args - val sym = fun.symbol - - def isStringMethodFromObject: Boolean = sym.name match { - case nme.toString_ | nme.equals_ | nme.hashCode_ => true - case _ => false - } - - if (isMethodStaticInIR(sym)) { - genApplyStatic(sym, genActualArgs(sym, args)) - } else if (sym.owner.isJSType) { - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyJSMethodGeneric(sym, genExprOrGlobalScope(receiver), genActualJSArgs(sym, args), isStat)(tree.sourcePos) - else - genApplyJSClassMethod(genExpr(receiver), sym, genActualArgs(sym, args)) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - genJSNativeMemberCall(tree) - } else { - genApplyMethodMaybeStatically(genExpr(receiver), sym, genActualArgs(sym, args)) - } - } - - /** Gen JS code for a call to a JS method (of a subclass of `js.Any`). - * - * Basically it boils down to calling the method as a `JSBracketSelect`, - * without name mangling. But other aspects come into play: - * - * - Operator methods are translated to JS operators (not method calls) - * - `apply` is translated as a function call, i.e., `o()` instead of `o.apply()` - * - Scala varargs are turned into JS varargs (see `genPrimitiveJSArgs()`) - * - Getters and parameterless methods are translated as `JSBracketSelect` - * - Setters are translated to `Assign` to `JSBracketSelect` - */ - private def genApplyJSMethodGeneric(sym: Symbol, - receiver: MaybeGlobalScope, args: List[js.TreeOrJSSpread], isStat: Boolean, - jsSuperClassValue: Option[js.Tree] = None)( - implicit pos: SourcePosition): js.Tree = { - - def argsNoSpread: List[js.Tree] = { - assert(!args.exists(_.isInstanceOf[js.JSSpread]), s"Unexpected spread at $pos") - args.asInstanceOf[List[js.Tree]] - } - - val argc = args.size // meaningful only for methods that don't have varargs - - def requireNotSuper(): Unit = { - if (jsSuperClassValue.isDefined) - report.error("Illegal super call in Scala.js-defined JS class", pos) - } - - def requireNotSpread(arg: js.TreeOrJSSpread): js.Tree = - arg.asInstanceOf[js.Tree] - - def genSuperReference(propName: js.Tree): js.AssignLhs = { - jsSuperClassValue.fold[js.AssignLhs] { - genJSSelectOrGlobalRef(receiver, propName) - } { superClassValue => - js.JSSuperSelect(superClassValue, ruleOutGlobalScope(receiver), propName) - } - } - - def genSelectGet(propName: js.Tree): js.Tree = - genSuperReference(propName) - - def genSelectSet(propName: js.Tree, value: js.Tree): js.Tree = - js.Assign(genSuperReference(propName), value) - - def genCall(methodName: js.Tree, args: List[js.TreeOrJSSpread]): js.Tree = { - jsSuperClassValue.fold[js.Tree] { - genJSMethodApplyOrGlobalRefApply(receiver, methodName, args) - } { superClassValue => - js.JSSuperMethodCall(superClassValue, ruleOutGlobalScope(receiver), methodName, args) - } - } - - val boxedResult = sym.jsCallingConvention match { - case JSCallingConvention.UnaryOp(code) => - requireNotSuper() - assert(argc == 0, s"bad argument count ($argc) for unary op at $pos") - js.JSUnaryOp(code, ruleOutGlobalScope(receiver)) - - case JSCallingConvention.BinaryOp(code) => - requireNotSuper() - assert(argc == 1, s"bad argument count ($argc) for binary op at $pos") - js.JSBinaryOp(code, ruleOutGlobalScope(receiver), requireNotSpread(args.head)) - - case JSCallingConvention.Call => - requireNotSuper() - if (sym.owner.isSubClass(jsdefn.JSThisFunctionClass)) - js.JSMethodApply(ruleOutGlobalScope(receiver), js.StringLiteral("call"), args) - else - js.JSFunctionApply(ruleOutGlobalScope(receiver), args) - - case JSCallingConvention.Property(jsName) => - argsNoSpread match { - case Nil => - genSelectGet(genExpr(jsName)) - case value :: Nil => - genSelectSet(genExpr(jsName), value) - case _ => - throw new AssertionError(s"property methods should have 0 or 1 non-varargs arguments at $pos") - } - - case JSCallingConvention.BracketAccess => - argsNoSpread match { - case keyArg :: Nil => - genSelectGet(keyArg) - case keyArg :: valueArg :: Nil => - genSelectSet(keyArg, valueArg) - case _ => - throw new AssertionError(s"@JSBracketAccess methods should have 1 or 2 non-varargs arguments at $pos") - } - - case JSCallingConvention.BracketCall => - val (methodName, actualArgs) = extractFirstArg(args) - genCall(methodName, actualArgs) - - case JSCallingConvention.Method(jsName) => - genCall(genExpr(jsName), args) - } - - if (isStat) { - boxedResult - } else { - val tpe = atPhase(elimErasedValueTypePhase) { - sym.info.finalResultType - } - if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { - /* Work around to reclaim Scala 2 erasure behavior, assumed by the test - * NonNativeJSTypeTest.defaultValuesForFields. - * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` - * (not `BoxedUnit`). Therefore, when called in expression position, - * the call site introduces an explicit `BoxedUnit.UNIT`. Even if the - * field has not been initialized at all (with `= _`), this results in - * an actual `()` value. - * In Scala 3, the same pattern returns `null`, as a `BoxedUnit`, so we - * introduce here an explicit `()` value. - * TODO We should remove this branch if the upstream test is updated - * not to assume such a strict interpretation of erasure. - */ - js.Block(boxedResult, js.Undefined()) - } else { - unbox(boxedResult, tpe) - } - } - } - - /** Extract the first argument in a list of actual arguments. - * - * This is nothing else than decomposing into head and tail, except that - * we assert that the first element is not a JSSpread. - */ - private def extractFirstArg(args: List[js.TreeOrJSSpread]): (js.Tree, List[js.TreeOrJSSpread]) = { - assert(args.nonEmpty, - "Trying to extract the first argument of an empty argument list") - val firstArg = args.head - assert(!firstArg.isInstanceOf[js.JSSpread], - "Trying to extract the first argument of an argument list starting " + - "with a Spread argument: " + firstArg) - (firstArg.asInstanceOf[js.Tree], args.tail) - } - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberSelect(tree: Tree): js.Tree = - genJSNativeMemberSelectOrCall(tree, Nil) - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberCall(tree: Apply): js.Tree = - genJSNativeMemberSelectOrCall(tree, tree.args) - - /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberSelectOrCall(tree: Tree, args: List[Tree]): js.Tree = { - val sym = tree.symbol - - implicit val pos = tree.span - - val jsNativeMemberValue = - js.SelectJSNativeMember(encodeClassName(sym.owner), encodeJSNativeMemberSym(sym)) - - val boxedResult = - if (sym.isJSGetter) jsNativeMemberValue - else js.JSFunctionApply(jsNativeMemberValue, genActualJSArgs(sym, args)) - - unbox(boxedResult, atPhase(elimErasedValueTypePhase) { - sym.info.resultType - }) - } - - private def genJSSuperCall(tree: Apply, isStat: Boolean): js.Tree = { - acquireContextualJSClassValue { explicitJSSuperClassValue => - implicit val pos = tree.span - val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked - val sym = fun.symbol - - val genReceiver = genExpr(qual) - def genScalaArgs = genActualArgs(sym, args) - def genJSArgs = genActualJSArgs(sym, args) - - if (sym.owner == defn.ObjectClass) { - // Normal call anyway - assert(!sym.isClassConstructor, - s"Trying to call the super constructor of Object in a non-native JS class at $pos") - genApplyMethod(genReceiver, sym, genScalaArgs) - } else if (sym.isClassConstructor) { - throw new AssertionError( - s"calling a JS super constructor should have happened in genPrimaryJSClassCtor at $pos") - } else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) { - // Reroute to the static method - genApplyJSClassMethod(genReceiver, sym, genScalaArgs) - } else { - val jsSuperClassValue = explicitJSSuperClassValue.orElse { - Some(genLoadJSConstructor(currentClassSym.get.asClass.superClass)) - } - genApplyJSMethodGeneric(sym, MaybeGlobalScope.NotGlobalScope(genReceiver), - genJSArgs, isStat, jsSuperClassValue)(tree.sourcePos) - } - } - } - - /** Gen JS code for a call to a polymorphic method. - * - * The only methods that reach the back-end as polymorphic are - * `isInstanceOf` and `asInstanceOf`. - * - * (Well, in fact `DottyRunTime.newRefArray` too, but it is handled as a - * primitive instead.) - */ - private def genTypeApply(tree: TypeApply): js.Tree = { - implicit val pos: SourcePosition = tree.sourcePos - - val TypeApply(fun, targs) = tree - - val sym = fun.symbol - val receiver = qualifierOf(fun) - - val to = targs.head.tpe - - assert(!isPrimitiveValueType(receiver.tpe), - s"Found receiver of type test with primitive type ${receiver.tpe} at $pos") - assert(!isPrimitiveValueType(to), - s"Found target type of type test with primitive type ${receiver.tpe} at $pos") - - val genReceiver = genExpr(receiver) - - if (sym == defn.Any_asInstanceOf) { - genAsInstanceOf(genReceiver, to) - } else if (sym == defn.Any_isInstanceOf) { - genIsInstanceOf(genReceiver, to) - } else { - throw new FatalError( - s"Unexpected type application $fun with symbol ${sym.fullName}") - } - } - - /** Gen JS code for a Java Seq literal. */ - private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = { - implicit val pos = tree.span - - val genElems = tree.elems.map(genExpr) - val arrayTypeRef = toTypeRef(tree.tpe).asInstanceOf[jstpe.ArrayTypeRef] - js.ArrayValue(arrayTypeRef, genElems) - } - - /** Gen JS code for a switch-`Match`, which is translated into an IR `js.Match`. */ - def genMatch(tree: Tree, isStat: Boolean): js.Tree = { - implicit val pos = tree.span - val Match(selector, cases) = tree: @unchecked - - def abortMatch(msg: String): Nothing = - throw new FatalError(s"$msg in switch-like pattern match at ${tree.span}: $tree") - - val genSelector = genExpr(selector) - - // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else - genSelector.tpe match { - case jstpe.IntType | jstpe.ClassType(jsNames.BoxedStringClass) | jstpe.NullType | jstpe.NothingType => - // ok - case _ => - abortMatch(s"Invalid selector type ${genSelector.tpe}") - } - - val resultType = toIRType(tree.tpe) match { - case jstpe.NothingType => jstpe.NothingType // must take priority over NoType below - case _ if isStat => jstpe.NoType - case resType => resType - } - - var clauses: List[(List[js.MatchableLiteral], js.Tree)] = Nil - var optDefaultClause: Option[js.Tree] = None - - for (caze @ CaseDef(pat, guard, body) <- cases) { - if (guard != EmptyTree) - abortMatch("Found a case guard") - - val genBody = genStatOrExpr(body, isStat) - - def invalidCase(): Nothing = - abortMatch("Invalid case") - - def genMatchableLiteral(tree: Literal): js.MatchableLiteral = { - genExpr(tree) match { - case matchableLiteral: js.MatchableLiteral => matchableLiteral - case otherExpr => invalidCase() - } - } - - pat match { - case lit: Literal => - clauses = (List(genMatchableLiteral(lit)), genBody) :: clauses - case Ident(nme.WILDCARD) => - optDefaultClause = Some(genBody) - case Alternative(alts) => - val genAlts = alts.map { - case lit: Literal => genMatchableLiteral(lit) - case _ => invalidCase() - } - clauses = (genAlts, genBody) :: clauses - case _ => - invalidCase() - } - } - - clauses = clauses.reverse - val defaultClause = optDefaultClause.getOrElse { - throw new AssertionError("No elseClause in pattern match") - } - - /* Builds a `js.Match`, but simplifies it to a `js.If` if there is only - * one case with one alternative, and to a `js.Block` if there is no case - * at all. This happens in practice in the standard library. Having no - * case is a typical product of `match`es that are full of - * `case n if ... =>`, which are used instead of `if` chains for - * convenience and/or readability. - */ - def isInt(tree: js.Tree): Boolean = tree.tpe == jstpe.IntType - - clauses match { - case Nil => - // Completely remove the Match. Preserve the side-effects of `genSelector`. - js.Block(exprToStat(genSelector), defaultClause) - - case (uniqueAlt :: Nil, caseRhs) :: Nil => - /* Simplify the `match` as an `if`, so that the optimizer has less - * work to do, and we emit less code at the end of the day. - * Use `Int_==` instead of `===` if possible, since it is a common case. - */ - val op = - if (isInt(genSelector) && isInt(uniqueAlt)) js.BinaryOp.Int_== - else js.BinaryOp.=== - js.If(js.BinaryOp(op, genSelector, uniqueAlt), caseRhs, defaultClause)(resultType) - - case _ => - // We have more than one case: use a js.Match - js.Match(genSelector, clauses, defaultClause)(resultType) - } - } - - /** Gen JS code for a closure. - * - * Input: a `Closure` tree of the form - * {{{ - * Closure(env, call, functionalInterface) - * }}} - * representing the pseudo-syntax - * {{{ - * { (p1, ..., pm) => call(env1, ..., envn, p1, ..., pm) }: functionInterface - * }}} - * where `envi` are identifiers in the local scope. The qualifier of `call` - * is also implicitly captured. - * - * Output: a `js.Closure` tree of the form - * {{{ - * js.Closure(formalCaptures, formalParams, body, actualCaptures) - * }}} - * representing the pseudo-syntax - * {{{ - * lambda( - * formalParam1, ..., formalParamM) = body - * }}} - * where the `actualCaptures` and `body` are, in general, arbitrary - * expressions. But in this case, `actualCaptures` will be identifiers from - * `env`, and the `body` will be of the form - * {{{ - * call(formalCapture1.ref, ..., formalCaptureN.ref, - * formalParam1.ref, ...formalParamM.ref) - * }}} - * - * When the `js.Closure` node is evaluated, i.e., when the closure value is - * created, the expressions of the `actualCaptures` are evaluated, and the - * results of those evaluations is "stored" in the environment of the - * closure as the corresponding `formalCapture`. - * - * When we later *call* the closure, the `formalCaptures` already have their - * values from the environment, and they are available in the `body`. The - * `formalParams` of the created closure receive their values from the - * actual arguments at the call-site of the closure, and they are also - * available in the `body`. - */ - private def genClosure(tree: Closure): js.Tree = { - implicit val pos = tree.span - val Closure(env, call, functionalInterface) = tree - - val envSize = env.size - - val (fun, args) = call match { - // case Apply(fun, args) => (fun, args) // Conjectured not to happen - case t @ Select(_, _) => (t, Nil) - case t @ Ident(_) => (t, Nil) - } - val sym = fun.symbol - val isStaticCall = isMethodStaticInIR(sym) - - val qualifier = qualifierOf(fun) - val allCaptureValues = - if (isStaticCall) env - else qualifier :: env - - val formalAndActualCaptures = allCaptureValues.map { value => - implicit val pos = value.span - val (formalIdent, originalName) = value match { - case Ident(name) => (freshLocalIdent(name.toTermName), OriginalName(name.toString)) - case This(_) => (freshLocalIdent("this"), thisOriginalName) - case _ => (freshLocalIdent(), NoOriginalName) - } - val formalCapture = js.ParamDef(formalIdent, originalName, - toIRType(value.tpe), mutable = false) - val actualCapture = genExpr(value) - (formalCapture, actualCapture) - } - val (formalCaptures, actualCaptures) = formalAndActualCaptures.unzip - - val funInterfaceSym = functionalInterface.tpe.typeSymbol - val hasRepeatedParam = { - funInterfaceSym.exists && { - val Seq(samMethodDenot) = funInterfaceSym.info.possibleSamMethods - val samMethod = samMethodDenot.symbol - atPhase(elimRepeatedPhase)(samMethod.info.paramInfoss.flatten.exists(_.isRepeatedParam)) - } - } - - val formalParamNames = sym.info.paramNamess.flatten.drop(envSize) - val formalParamTypes = sym.info.paramInfoss.flatten.drop(envSize) - val formalParamRepeateds = - if (hasRepeatedParam) (0 until (formalParamTypes.size - 1)).map(_ => false) :+ true - else (0 until formalParamTypes.size).map(_ => false) - - val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { - (name, tpe, repeated) => - val formalParam = js.ParamDef(freshLocalIdent(name), - OriginalName(name.toString), jstpe.AnyType, mutable = false) - val actualParam = - if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) - else unbox(formalParam.ref, tpe) - (formalParam, actualParam) - } - val (formalAndRestParams, actualParams) = formalAndActualParams.unzip - - val (formalParams, restParam) = - if (hasRepeatedParam) (formalAndRestParams.init, Some(formalAndRestParams.last)) - else (formalAndRestParams, None) - - val genBody = { - val call = if (isStaticCall) { - genApplyStatic(sym, formalCaptures.map(_.ref) ::: actualParams) - } else { - val thisCaptureRef :: argCaptureRefs = formalCaptures.map(_.ref): @unchecked - if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) - genApplyMethodMaybeStatically(thisCaptureRef, sym, argCaptureRefs ::: actualParams) - else - genApplyJSClassMethod(thisCaptureRef, sym, argCaptureRefs ::: actualParams) - } - box(call, sym.info.finalResultType) - } - - val isThisFunction = funInterfaceSym.isSubClass(jsdefn.JSThisFunctionClass) && { - val ok = formalParams.nonEmpty - if (!ok) - report.error("The SAM or apply method for a js.ThisFunction must have a leading non-varargs parameter", tree) - ok - } - - if (isThisFunction) { - val thisParam :: otherParams = formalParams: @unchecked - js.Closure( - arrow = false, - formalCaptures, - otherParams, - restParam, - js.Block( - js.VarDef(thisParam.name, thisParam.originalName, - thisParam.ptpe, mutable = false, - js.This()(thisParam.ptpe)(thisParam.pos))(thisParam.pos), - genBody), - actualCaptures) - } else { - val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) - - if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { - assert(!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym), - s"Invalid functional interface $funInterfaceSym reached the back-end") - val formalCount = formalParams.size - val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) - val ctorName = MethodName.constructor( - jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) - js.New(cls, js.MethodIdent(ctorName), List(closure)) - } else { - assert(funInterfaceSym.isJSType, - s"Invalid functional interface $funInterfaceSym reached the back-end") - closure - } - } - } - - /** Generates a static method instantiating and calling this - * DynamicImportThunk's `apply`: - * - * {{{ - * static def dynamicImport$;;Ljava.lang.Object(): any = { - * new .;:V().apply;Ljava.lang.Object() - * } - * }}} - */ - private def genDynamicImportForwarder(clsSym: Symbol)(using Position): js.MethodDef = { - withNewLocalNameScope { - val ctor = clsSym.primaryConstructor - val paramSyms = ctor.paramSymss.flatten - val paramDefs = paramSyms.map(genParamDef(_)) - - val body = { - val inst = js.New(encodeClassName(clsSym), encodeMethodSym(ctor), paramDefs.map(_.ref)) - genApplyMethod(inst, jsdefn.DynamicImportThunkClass_apply, Nil) - } - - js.MethodDef( - js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeDynamicImportForwarderIdent(paramSyms), - NoOriginalName, - paramDefs, - jstpe.AnyType, - Some(body))(OptimizerHints.empty, None) - } - } - - /** Boxes a value of the given type before `elimErasedValueType`. - * - * This should be used when sending values to a JavaScript context, which - * is erased/boxed at the IR level, although it is not erased at the - * dotty/JVM level. - * - * @param expr Tree to be boxed if needed. - * @param tpeEnteringElimErasedValueType The type of `expr` as it was - * entering the `elimErasedValueType` phase. - */ - def box(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { - case tpe if isPrimitiveValueType(tpe) => - makePrimitiveBox(expr, tpe) - - case tpe: ErasedValueType => - val boxedClass = tpe.tycon.typeSymbol - val ctor = boxedClass.primaryConstructor - js.New(encodeClassName(boxedClass), encodeMethodSym(ctor), List(expr)) - - case _ => - expr - } - } - - /** Unboxes a value typed as Any to the given type before `elimErasedValueType`. - * - * This should be used when receiving values from a JavaScript context, - * which is erased/boxed at the IR level, although it is not erased at the - * dotty/JVM level. - * - * @param expr Tree to be extracted. - * @param tpeEnteringElimErasedValueType The type of `expr` as it was - * entering the `elimErasedValueType` phase. - */ - def unbox(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { - case tpe if isPrimitiveValueType(tpe) => - makePrimitiveUnbox(expr, tpe) - - case tpe: ErasedValueType => - val boxedClass = tpe.tycon.typeSymbol.asClass - val unboxMethod = ValueClasses.valueClassUnbox(boxedClass) - val content = genApplyMethod( - js.AsInstanceOf(expr, encodeClassType(boxedClass)), unboxMethod, Nil) - if (unboxMethod.info.resultType <:< tpe.erasedUnderlying) - content - else - unbox(content, tpe.erasedUnderlying) - - case tpe => - genAsInstanceOf(expr, tpe) - } - } - - /** Gen JS code for an asInstanceOf cast (for reference types only) */ - private def genAsInstanceOf(value: js.Tree, to: Type)(implicit pos: Position): js.Tree = - genAsInstanceOf(value, toIRType(to)) - - /** Gen JS code for an asInstanceOf cast (for reference types only) */ - private def genAsInstanceOf(value: js.Tree, to: jstpe.Type)(implicit pos: Position): js.Tree = { - to match { - case jstpe.AnyType => - value - case jstpe.NullType => - js.If( - js.BinaryOp(js.BinaryOp.===, value, js.Null()), - js.Null(), - genThrowClassCastException())( - jstpe.NullType) - case jstpe.NothingType => - js.Block(value, genThrowClassCastException()) - case _ => - js.AsInstanceOf(value, to) - } - } - - private def genThrowClassCastException()(implicit pos: Position): js.Tree = { - js.Throw(js.New(jsNames.ClassCastExceptionClass, - js.MethodIdent(jsNames.NoArgConstructorName), Nil)) - } - - /** Gen JS code for an isInstanceOf test (for reference types only) */ - def genIsInstanceOf(value: js.Tree, to: Type)( - implicit pos: SourcePosition): js.Tree = { - val sym = to.typeSymbol - - if (sym == defn.ObjectClass) { - js.BinaryOp(js.BinaryOp.!==, value, js.Null()) - } else if (sym.isJSType) { - if (sym.is(Trait)) { - report.error( - em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", - pos) - js.BooleanLiteral(true) - } else { - js.AsInstanceOf(js.JSBinaryOp( - js.JSBinaryOp.instanceof, value, genLoadJSConstructor(sym)), - jstpe.BooleanType) - } - } else { - // The Scala type system prevents x.isInstanceOf[Null] and ...[Nothing] - assert(sym != defn.NullClass && sym != defn.NothingClass, - s"Found a .isInstanceOf[$sym] at $pos") - js.IsInstanceOf(value, toIRType(to)) - } - } - - /** Gen a statically linked call to an instance method. */ - def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, - arguments: List[js.Tree])(implicit pos: Position): js.Tree = { - if (method.isPrivate || method.isClassConstructor) - genApplyMethodStatically(receiver, method, arguments) - else - genApplyMethod(receiver, method, arguments) - } - - /** Gen a dynamically linked call to a Scala method. */ - def genApplyMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - assert(!method.isPrivate, - s"Cannot generate a dynamic call to private method $method at $pos") - js.Apply(js.ApplyFlags.empty, receiver, encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a statically linked call to an instance method. */ - def genApplyMethodStatically(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - val flags = js.ApplyFlags.empty - .withPrivate(method.isPrivate && !method.isClassConstructor) - .withConstructor(method.isClassConstructor) - js.ApplyStatically(flags, receiver, encodeClassName(method.owner), - encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a call to a static method. */ - private def genApplyStatic(method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - js.ApplyStatic(js.ApplyFlags.empty.withPrivate(method.isPrivate), - encodeClassName(method.owner), encodeMethodSym(method), arguments)( - toIRType(patchedResultType(method))) - } - - /** Gen a call to a non-exposed method of a non-native JS class. */ - def genApplyJSClassMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { - genApplyStatic(method, receiver :: arguments) - } - - /** Gen a call to a method of a Scala top-level module. */ - private def genModuleApplyMethod(methodSym: Symbol, arguments: List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - genApplyMethod(genLoadModule(methodSym.owner), methodSym, arguments) - } - - /** Gen a boxing operation (tpe is the primitive type) */ - private def makePrimitiveBox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { - case jstpe.NoType => // for JS interop cases - js.Block(expr, js.Undefined()) - case jstpe.BooleanType | jstpe.CharType | jstpe.ByteType | - jstpe.ShortType | jstpe.IntType | jstpe.LongType | jstpe.FloatType | - jstpe.DoubleType => - expr // box is identity for all those primitive types - case typeRef => - throw new FatalError( - s"makePrimitiveBox requires a primitive type, found $typeRef for $tpe at $pos") - } - } - - /** Gen an unboxing operation (tpe is the primitive type) */ - private def makePrimitiveUnbox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { - case jstpe.NoType => expr // for JS interop cases - case irTpe => js.AsInstanceOf(expr, irTpe) - } - } - - /** Gen JS code for a Scala.js-specific primitive method */ - private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, - isStat: Boolean): js.Tree = { - - import JSPrimitives._ - - implicit val pos = tree.span - - def genArgs1: js.Tree = { - assert(args.size == 1, - s"Expected exactly 1 argument for JS primitive $code but got " + - s"${args.size} at $pos") - genExpr(args.head) - } - - def genArgs2: (js.Tree, js.Tree) = { - assert(args.size == 2, - s"Expected exactly 2 arguments for JS primitive $code but got " + - s"${args.size} at $pos") - (genExpr(args.head), genExpr(args.tail.head)) - } - - def genArgsVarLength: List[js.TreeOrJSSpread] = - genActualJSArgs(tree.symbol, args) - - def resolveReifiedJSClassSym(arg: Tree): Symbol = { - def fail(): Symbol = { - report.error( - tree.symbol.name.toString + " must be called with a constant " + - "classOf[T] representing a class extending js.Any " + - "(not a trait nor an object)", - tree.sourcePos) - NoSymbol - } - arg match { - case Literal(value) if value.tag == Constants.ClazzTag => - val classSym = value.typeValue.typeSymbol - if (classSym.isJSType && !classSym.is(Trait) && !classSym.is(ModuleClass)) - classSym - else - fail() - case _ => - fail() - } - } - - (code: @switch) match { - case DYNNEW => - // js.Dynamic.newInstance(clazz)(actualArgs: _*) - val (jsClass, actualArgs) = extractFirstArg(genArgsVarLength) - js.JSNew(jsClass, actualArgs) - - case ARR_CREATE => - // js.Array(elements: _*) - js.JSArrayConstr(genArgsVarLength) - - case CONSTRUCTOROF => - // runtime.constructorOf(clazz) - val classSym = resolveReifiedJSClassSym(args.head) - if (classSym == NoSymbol) - js.Undefined() // compile error emitted by resolveReifiedJSClassSym - else - genLoadJSConstructor(classSym) - - case CREATE_INNER_JS_CLASS | CREATE_LOCAL_JS_CLASS => - // runtime.createInnerJSClass(clazz, superClass) - // runtime.createLocalJSClass(clazz, superClass, fakeNewInstances) - val classSym = resolveReifiedJSClassSym(args(0)) - val superClassValue = genExpr(args(1)) - if (classSym == NoSymbol) { - js.Undefined() // compile error emitted by resolveReifiedJSClassSym - } else { - val captureValues = { - if (code == CREATE_INNER_JS_CLASS) { - /* Private inner classes that do not actually access their outer - * pointer do not receive an outer argument. We therefore count - * the number of constructors that have non-empty param list to - * know how many times we need to pass `this`. - */ - val requiredThisParams = - classSym.info.decls.lookupAll(nme.CONSTRUCTOR).count(_.info.paramInfoss.head.nonEmpty) - val outer = genThis() - List.fill(requiredThisParams)(outer) - } else { - val fakeNewInstances = args(2).asInstanceOf[JavaSeqLiteral].elems - fakeNewInstances.flatMap(genCaptureValuesFromFakeNewInstance(_)) - } - } - js.CreateJSClass(encodeClassName(classSym), superClassValue :: captureValues) - } - - case WITH_CONTEXTUAL_JS_CLASS_VALUE => - // withContextualJSClassValue(jsclass, inner) - val jsClassValue = genExpr(args(0)) - withScopedVars( - contextualJSClassValue := Some(jsClassValue) - ) { - genStatOrExpr(args(1), isStat) - } - - case LINKING_INFO => - // runtime.linkingInfo - js.JSLinkingInfo() - - case DEBUGGER => - // js.special.debugger() - js.Debugger() - - case UNITVAL => - // BoxedUnit.UNIT, which is the boxed version of () - js.Undefined() - - case JS_NEW_TARGET => - // js.new.target - val valid = currentMethodSym.get.isClassConstructor && currentClassSym.isNonNativeJSClass - if (!valid) { - report.error( - "Illegal use of js.`new`.target.\n" + - "It can only be used in the constructor of a JS class, " + - "as a statement or in the rhs of a val or var.\n" + - "It cannot be used inside a lambda or by-name parameter, nor in any other location.", - tree.sourcePos) - } - js.JSNewTarget() - - case JS_IMPORT => - // js.import(arg) - val arg = genArgs1 - js.JSImportCall(arg) - - case JS_IMPORT_META => - // js.import.meta - js.JSImportMeta() - - case DYNAMIC_IMPORT => - // runtime.dynamicImport - assert(args.size == 1, - s"Expected exactly 1 argument for JS primitive $code but got " + - s"${args.size} at $pos") - - args.head match { - case Block(stats, expr @ Typed(Apply(fun @ Select(New(tpt), _), args), _)) => - /* stats is always empty if no other compiler plugin is present. - * However, code instrumentation (notably scoverage) might add - * statements here. If this is the case, the thunk anonymous class - * has already been created when the other plugin runs (i.e. the - * plugin ran after jsinterop). - * - * Therefore, it is OK to leave the statements on our side of the - * dynamic loading boundary. - */ - - val clsSym = tpt.symbol - val ctor = fun.symbol - - assert(clsSym.isSubClass(jsdefn.DynamicImportThunkClass), - s"expected subclass of DynamicImportThunk, got: $clsSym at: ${expr.sourcePos}") - assert(ctor.isPrimaryConstructor, - s"expected primary constructor, got: $ctor at: ${expr.sourcePos}") - - js.Block( - stats.map(genStat(_)), - js.ApplyDynamicImport( - js.ApplyFlags.empty, - encodeClassName(clsSym), - encodeDynamicImportForwarderIdent(ctor.paramSymss.flatten), - genActualArgs(ctor, args)) - ) - - case tree => - throw new FatalError( - s"Unexpected argument tree in dynamicImport: $tree/${tree.getClass} at: $pos") - } - - case JS_NATIVE => - // js.native - report.error( - "js.native may only be used as stub implementation in facade types", - tree.sourcePos) - js.Undefined() - - case TYPEOF => - // js.typeOf(arg) - val arg = genArgs1 - val typeofExpr = arg match { - case arg: js.JSGlobalRef => js.JSTypeOfGlobalRef(arg) - case _ => js.JSUnaryOp(js.JSUnaryOp.typeof, arg) - } - js.AsInstanceOf(typeofExpr, jstpe.ClassType(jsNames.BoxedStringClass)) - - case STRICT_EQ => - // js.special.strictEquals(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.JSBinaryOp(js.JSBinaryOp.===, arg1, arg2) - - case IN => - // js.special.in(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.in, arg1, arg2), - jstpe.BooleanType) - - case INSTANCEOF => - // js.special.instanceof(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.AsInstanceOf(js.JSBinaryOp(js.JSBinaryOp.instanceof, arg1, arg2), - jstpe.BooleanType) - - case DELETE => - // js.special.delete(arg1, arg2) - val (arg1, arg2) = genArgs2 - js.JSDelete(arg1, arg2) - - case FORIN => - /* js.special.forin(arg1, arg2) - * - * We must generate: - * - * val obj = arg1 - * val f = arg2 - * for (val key in obj) { - * f(key) - * } - * - * with temporary vals, because `arg2` must be evaluated only - * once, and after `arg1`. - */ - val (arg1, arg2) = genArgs2 - val objVarDef = js.VarDef(freshLocalIdent("obj"), NoOriginalName, - jstpe.AnyType, mutable = false, arg1) - val fVarDef = js.VarDef(freshLocalIdent("f"), NoOriginalName, - jstpe.AnyType, mutable = false, arg2) - val keyVarIdent = freshLocalIdent("key") - val keyVarRef = js.VarRef(keyVarIdent)(jstpe.AnyType) - js.Block( - objVarDef, - fVarDef, - js.ForIn(objVarDef.ref, keyVarIdent, NoOriginalName, { - js.JSFunctionApply(fVarDef.ref, List(keyVarRef)) - })) - - case JS_THROW => - // js.special.throw(arg) - js.Throw(genArgs1) - - case JS_TRY_CATCH => - /* js.special.tryCatch(arg1, arg2) - * - * We must generate: - * - * val body = arg1 - * val handler = arg2 - * try { - * body() - * } catch (e) { - * handler(e) - * } - * - * with temporary vals, because `arg2` must be evaluated before - * `body` executes. Moreover, exceptions thrown while evaluating - * the function values `arg1` and `arg2` must not be caught. - */ - val (arg1, arg2) = genArgs2 - val bodyVarDef = js.VarDef(freshLocalIdent("body"), NoOriginalName, - jstpe.AnyType, mutable = false, arg1) - val handlerVarDef = js.VarDef(freshLocalIdent("handler"), NoOriginalName, - jstpe.AnyType, mutable = false, arg2) - val exceptionVarIdent = freshLocalIdent("e") - val exceptionVarRef = js.VarRef(exceptionVarIdent)(jstpe.AnyType) - js.Block( - bodyVarDef, - handlerVarDef, - js.TryCatch( - js.JSFunctionApply(bodyVarDef.ref, Nil), - exceptionVarIdent, - NoOriginalName, - js.JSFunctionApply(handlerVarDef.ref, List(exceptionVarRef)) - )(jstpe.AnyType) - ) - - case WRAP_AS_THROWABLE => - // js.special.wrapAsThrowable(arg) - js.WrapAsThrowable(genArgs1) - - case UNWRAP_FROM_THROWABLE => - // js.special.unwrapFromThrowable(arg) - js.UnwrapFromThrowable(genArgs1) - - case UNION_FROM | UNION_FROM_TYPE_CONSTRUCTOR => - /* js.|.from and js.|.fromTypeConstructor - * We should not have to deal with those. They have a perfectly valid - * user-space implementation. However, the Dotty type checker inserts - * way too many of those, even when they are completely unnecessary. - * That still wouldn't be an issue ... if only it did not insert them - * around the default getters to their parameters! But even there it - * does it (although the types are, by construction, *equivalent*!), - * and that kills our `UndefinedParam` treatment. So we have to handle - * those two methods as primitives to completely eliminate them. - * - * Hopefully this will become unnecessary when/if we manage to - * reinterpret js.| as a true Dotty union type. - */ - genArgs2._1 - - case REFLECT_SELECTABLE_SELECTDYN => - // scala.reflect.Selectable.selectDynamic - genReflectiveCall(tree, isSelectDynamic = true) - case REFLECT_SELECTABLE_APPLYDYN => - // scala.reflect.Selectable.applyDynamic - genReflectiveCall(tree, isSelectDynamic = false) - } - } - - /** Gen the SJSIR for a reflective call. - * - * Reflective calls are calls to a structural type field or method that - * involve a reflective Selectable. They look like the following in source - * code: - * {{{ - * import scala.reflect.Selectable.reflectiveSelectable - * - * type Structural = { - * val foo: Int - * def bar(x: Int, y: String): String - * } - * - * val structural: Structural = new { - * val foo: Int = 5 - * def bar(x: Int, y: String): String = x.toString + y - * } - * - * structural.foo - * structural.bar(6, "hello") - * }}} - * - * After expansion by the Scala 3 rules for structural member selections and - * calls, they look like - * - * {{{ - * reflectiveSelectable(structural).selectDynamic("foo") - * reflectiveSelectable(structural).applyDynamic("bar", - * classOf[Int], classOf[String] - * )( - * 6, "hello" - * ) - * }}} - * - * When the original `structural` value is already of a subtype of - * `scala.reflect.Selectable`, there is no conversion involved. There could - * also be any other arbitrary conversion, such as the deprecated bridge for - * Scala 2's `import scala.language.reflectiveCalls`. In general, the shape - * is therefore the following, for some `selectable: reflect.Selectable`: - * - * {{{ - * selectable.selectDynamic("foo") - * selectable.applyDynamic("bar", - * classOf[Int], classOf[String] - * )( - * 6, "hello" - * ) - * }}} - * - * and eventually reaches the back-end as - * - * {{{ - * selectable.selectDynamic("foo") // same as above - * selectable.applyDynamic("bar", - * wrapRefArray([ classOf[Int], classOf[String] : jl.Class ] - * )( - * genericWrapArray([ Int.box(6), "hello" : Object ]) - * ) - * }}} - * - * In SJSIR, they must be encoded as follows: - * - * {{{ - * selectable.selectedValue;O().foo;R() - * selectable.selectedValue;O().bar;I;Ljava.lang.String;R( - * Int.box(6).asInstanceOf[int], - * "hello".asInstanceOf[java.lang.String] - * ) - * }}} - * - * where `selectedValue;O()` is declared in `scala.reflect.Selectable` and - * holds the actual instance on which to perform the reflective operations. - * For the typical use case from the first snippet, it returns `structural`. - * - * This means that we must deconstruct the elaborated calls to recover: - * - * - the method name as a compile-time string `foo` or `bar` - * - the `tp: Type`s that have been wrapped in `classOf[tp]`, as a - * compile-time List[Type], from which we'll derive `jstpe.Type`s for the - * `asInstanceOf`s and `jstpe.TypeRef`s for the `MethodName.reflectiveProxy` - * - the actual arguments as a compile-time `List[Tree]` - * - * Virtually all of the code in `genReflectiveCall` deals with recovering - * those elements. Constructing the IR Tree is the easy part after that. - */ - private def genReflectiveCall(tree: Apply, isSelectDynamic: Boolean): js.Tree = { - implicit val pos = tree.span - val Apply(fun @ Select(receiver, _), args) = tree: @unchecked - - val selectedValueTree = js.Apply(js.ApplyFlags.empty, genExpr(receiver), - js.MethodIdent(selectedValueMethodName), Nil)(jstpe.AnyType) - - // Extract the method name as a String - val methodNameStr = args.head match { - case Literal(Constants.Constant(name: String)) => - name - case _ => - report.error( - "The method name given to Selectable.selectDynamic or Selectable.applyDynamic " + - "must be a literal string. " + - "Other uses are not supported in Scala.js.", - args.head.sourcePos) - "erroneous" - } - - val (formalParamTypeRefs, actualArgs) = if (isSelectDynamic) { - (Nil, Nil) - } else { - // Extract the param type refs and actual args from the 2nd and 3rd argument to applyDynamic - args.tail match { - case WrapArray(classOfsArray: JavaSeqLiteral) :: WrapArray(actualArgsAnyArray: JavaSeqLiteral) :: Nil => - // Extract jstpe.Type's and jstpe.TypeRef's from the classOf[_] trees - val formalParamTypesAndTypeRefs = classOfsArray.elems.map { - // classOf[tp] -> tp - case Literal(const) if const.tag == Constants.ClazzTag => - toIRTypeAndTypeRef(const.typeValue) - // Anything else is invalid - case otherTree => - report.error( - "The java.lang.Class[_] arguments passed to Selectable.applyDynamic must be " + - "literal classOf[T] expressions (typically compiler-generated). " + - "Other uses are not supported in Scala.js.", - otherTree.sourcePos) - (jstpe.AnyType, jstpe.ClassRef(jsNames.ObjectClass)) - } - - // Gen the actual args, downcasting them to the formal param types - val actualArgs = actualArgsAnyArray.elems.zip(formalParamTypesAndTypeRefs).map { - (actualArgAny, formalParamTypeAndTypeRef) => - val genActualArgAny = genExpr(actualArgAny) - genAsInstanceOf(genActualArgAny, formalParamTypeAndTypeRef._1)(genActualArgAny.pos) - } - - (formalParamTypesAndTypeRefs.map(pair => toParamOrResultTypeRef(pair._2)), actualArgs) - - case _ => - report.error( - "Passing the varargs of Selectable.applyDynamic with `: _*` " + - "is not supported in Scala.js.", - tree.sourcePos) - (Nil, Nil) - } - } - - val methodName = MethodName.reflectiveProxy(methodNameStr, formalParamTypeRefs) - - js.Apply(js.ApplyFlags.empty, selectedValueTree, js.MethodIdent(methodName), actualArgs)(jstpe.AnyType) - } - - /** Gen actual actual arguments to Scala method call. - * Returns a list of the transformed arguments. - * - * This tries to optimize repeated arguments (varargs) by turning them - * into js.WrappedArray instead of Scala wrapped arrays. - */ - private def genActualArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.Tree] = { - args.map(genExpr) - /*val wereRepeated = exitingPhase(currentRun.typerPhase) { - sym.tpe.params.map(p => isScalaRepeatedParamType(p.tpe)) - } - - if (wereRepeated.size > args.size) { - // Should not happen, but let's not crash - args.map(genExpr) - } else { - /* Arguments that are in excess compared to the type signature after - * erasure are lambda-lifted arguments. They cannot be repeated, hence - * the extension to `false`. - */ - for ((arg, wasRepeated) <- args.zipAll(wereRepeated, EmptyTree, false)) yield { - if (wasRepeated) { - tryGenRepeatedParamAsJSArray(arg, handleNil = false).fold { - genExpr(arg) - } { genArgs => - genNew(WrappedArrayClass, WrappedArray_ctor, - List(js.JSArrayConstr(genArgs))) - } - } else { - genExpr(arg) - } - } - }*/ - } - - /** Gen actual actual arguments to a JS method call. - * Returns a list of the transformed arguments. - * - * - TODO Repeated arguments (varargs) are expanded - * - Default arguments are omitted or replaced by undefined - * - All arguments are boxed - * - * Repeated arguments that cannot be expanded at compile time (i.e., if a - * Seq is passed to a varargs parameter with the syntax `seq: _*`) will be - * wrapped in a [[js.JSSpread]] node to be expanded at runtime. - */ - private def genActualJSArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.TreeOrJSSpread] = { - - var reversedArgs: List[js.TreeOrJSSpread] = Nil - - for ((arg, info) <- args.zip(sym.jsParamInfos)) { - if (info.repeated) { - reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs - } else if (info.capture) { - // Ignore captures - assert(sym.isClassConstructor, - i"Found a capture param in method ${sym.fullName}, which is not a class constructor, at $pos") - } else { - val unboxedArg = genExpr(arg) - val boxedArg = unboxedArg match { - case js.Transient(UndefinedParam) => - unboxedArg - case _ => - box(unboxedArg, info.info) - } - reversedArgs ::= boxedArg - } - } - - /* Remove all consecutive UndefinedParam's at the end of the argument - * list. No check is performed whether they may be there, since they will - * only be placed where default arguments can be anyway. - */ - reversedArgs = reversedArgs.dropWhile(_.isInstanceOf[js.Transient]) - - /* Find remaining UndefinedParam and replace by js.Undefined. This can - * happen with named arguments or with multiple argument lists. - */ - reversedArgs = reversedArgs map { - case js.Transient(UndefinedParam) => js.Undefined() - case arg => arg - } - - reversedArgs.reverse - } - - /** Gen JS code for a repeated param of a JS method. - * - * In this case `arg` has type `Seq[T]` for some `T`, but the result should - * be an expanded list of the elements in the sequence. So this method - * takes care of the conversion. - * - * It is specialized for the shapes of tree generated by the desugaring - * of repeated params in Scala, so that these are actually expanded at - * compile-time. - * - * Otherwise, it returns a `JSSpread` with the `Seq` converted to a - * `js.Array`. - */ - private def genJSRepeatedParam(arg: Tree): List[js.TreeOrJSSpread] = { - tryGenRepeatedParamAsJSArray(arg, handleNil = true).getOrElse { - /* Fall back to calling runtime.genTraversableOnce2jsArray - * to perform the conversion to js.Array, then wrap in a Spread - * operator. - */ - implicit val pos: SourcePosition = arg.sourcePos - val jsArrayArg = genModuleApplyMethod( - jsdefn.Runtime_toJSVarArgs, - List(genExpr(arg))) - List(js.JSSpread(jsArrayArg)) - } - } - - /** Try and expand an actual argument to a repeated param `(xs: T*)`. - * - * This method recognizes the shapes of tree generated by the desugaring - * of repeated params in Scala, and expands them. - * If `arg` does not have the shape of a generated repeated param, this - * method returns `None`. - */ - private def tryGenRepeatedParamAsJSArray(arg: Tree, - handleNil: Boolean): Option[List[js.Tree]] = { - implicit val pos = arg.span - - // Given a method `def foo(args: T*)` - arg match { - // foo(arg1, arg2, ..., argN) where N > 0 - case MaybeAsInstanceOf(WrapArray(MaybeAsInstanceOf(array: JavaSeqLiteral))) => - /* Value classes in arrays are already boxed, so no need to use - * the type before erasure. - * TODO Is this true in dotty? - */ - Some(array.elems.map(e => box(genExpr(e), e.tpe))) - - // foo() - case Ident(_) if handleNil && arg.symbol == defn.NilModule => - Some(Nil) - - // foo(argSeq: _*) - cannot be optimized - case _ => - None - } - } - - private object MaybeAsInstanceOf { - def unapply(tree: Tree): Some[Tree] = tree match { - case TypeApply(asInstanceOf_? @ Select(base, _), _) - if asInstanceOf_?.symbol == defn.Any_asInstanceOf => - Some(base) - case _ => - Some(tree) - } - } - - private object WrapArray { - lazy val isWrapArray: Set[Symbol] = { - val names0 = defn.ScalaValueClasses().map(sym => nme.wrapXArray(sym.name)) - val names1 = names0 ++ Set(nme.wrapRefArray, nme.genericWrapArray) - val symsInPredef = names1.map(defn.ScalaPredefModule.requiredMethod(_)) - val symsInScalaRunTime = names1.map(defn.ScalaRuntimeModule.requiredMethod(_)) - (symsInPredef ++ symsInScalaRunTime).toSet - } - - def unapply(tree: Apply): Option[Tree] = tree match { - case Apply(wrapArray_?, List(wrapped)) if isWrapArray(wrapArray_?.symbol) => - Some(wrapped) - case _ => - None - } - } - - /** Wraps a `js.Array` to use as varargs. */ - def genJSArrayToVarArgs(arrayRef: js.Tree)(implicit pos: SourcePosition): js.Tree = - genModuleApplyMethod(jsdefn.Runtime_toScalaVarArgs, List(arrayRef)) - - /** Gen the actual capture values for a JS constructor based on its fake `new` invocation. */ - private def genCaptureValuesFromFakeNewInstance(tree: Tree): List[js.Tree] = { - implicit val pos: Position = tree.span - - val Apply(fun @ Select(New(_), _), args) = tree: @unchecked - val sym = fun.symbol - - /* We use the same strategy as genActualJSArgs to detect which parameters were - * introduced by explicitouter or lambdalift (but reversed, of course). - */ - - val existedBeforeUncurry = atPhase(elimRepeatedPhase) { - sym.info.paramNamess.flatten.toSet - } - - for { - (arg, paramName) <- args.zip(sym.info.paramNamess.flatten) - if !existedBeforeUncurry(paramName) - } yield { - genExpr(arg) - } - } - - private def genVarRef(sym: Symbol)(implicit pos: Position): js.VarRef = - js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) - - private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = { - def qual = genExpr(qualifier) - - if (sym.owner.isNonNativeJSClass) { - val f = if (sym.isJSExposed) { - js.JSSelect(qual, genExpr(sym.jsName)) - } else if (sym.owner.isAnonymousClass) { - js.JSSelect( - js.JSSelect(qual, genPrivateFieldsSymbol()), - encodeFieldSymAsStringLiteral(sym)) - } else { - js.JSPrivateSelect(qual, encodeClassName(sym.owner), - encodeFieldSym(sym)) - } - - (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { - val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) - (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { - val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - val companionClass = sym.owner.linkedClass - val f = js.JSSelect(genLoadJSConstructor(companionClass), js.StringLiteral(jsName)) - (f, true) - } else { - val className = encodeClassName(sym.owner) - val fieldIdent = encodeFieldSym(sym) - - /* #4370 Fields cannot have type NothingType, so we box them as - * scala.runtime.Nothing$ instead. They will be initialized with - * `null`, and any attempt to access them will throw a - * `ClassCastException` (generated in the unboxing code). - */ - val (irType, boxed) = toIRType(sym.info) match - case jstpe.NothingType => - (encodeClassType(defn.NothingClass), true) - case ftpe => - (ftpe, false) - - val f = - if sym.is(JavaStatic) then - js.SelectStatic(className, fieldIdent)(irType) - else - js.Select(qual, className, fieldIdent)(irType) - - (f, boxed) - } - } - - /** Gen JS code for loading a Java static field. - */ - private def genLoadStaticField(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { - /* Actually, there is no static member in Scala.js. If we come here, that - * is because we found the symbol in a Java-emitted .class in the - * classpath. But the corresponding implementation in Scala.js will - * actually be a val in the companion module. - */ - - if (sym == defn.BoxedUnit_UNIT) { - js.Undefined() - } else if (sym == defn.BoxedUnit_TYPE) { - js.ClassOf(jstpe.VoidRef) - } else { - val className = encodeClassName(sym.owner) - val method = encodeStaticMemberSym(sym) - js.ApplyStatic(js.ApplyFlags.empty, className, method, Nil)(toIRType(sym.info)) - } - } - - /** Generates a call to `runtime.privateFieldsSymbol()` */ - private def genPrivateFieldsSymbol()(implicit pos: SourcePosition): js.Tree = - genModuleApplyMethod(jsdefn.Runtime_privateFieldsSymbol, Nil) - - /** Generate loading of a module value. - * - * Can be given either the module symbol or its module class symbol. - * - * If the module we load refers to the global scope (i.e., it is - * annotated with `@JSGlobalScope`), report a compile error specifying - * that a global scope object should only be used as the qualifier of a - * `.`-selection. - */ - def genLoadModule(sym: Symbol)(implicit pos: SourcePosition): js.Tree = - ruleOutGlobalScope(genLoadModuleOrGlobalScope(sym)) - - /** Generate loading of a module value or the global scope. - * - * Can be given either the module symbol of its module class symbol. - * - * Unlike `genLoadModule`, this method does not fail if the module we load - * refers to the global scope. - */ - def genLoadModuleOrGlobalScope(sym0: Symbol)( - implicit pos: SourcePosition): MaybeGlobalScope = { - - require(sym0.is(Module), - "genLoadModule called with non-module symbol: " + sym0) - val sym = if (sym0.isTerm) sym0.moduleClass else sym0 - - // Does that module refer to the global scope? - if (sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - MaybeGlobalScope.GlobalScope(pos) - } else { - val cls = encodeClassName(sym) - val tree = - if (sym.isJSType) js.LoadJSModule(cls) - else js.LoadModule(cls) - MaybeGlobalScope.NotGlobalScope(tree) - } - } - - /** Gen JS code representing the constructor of a JS class. */ - private def genLoadJSConstructor(sym: Symbol)( - implicit pos: Position): js.Tree = { - assert(!isStaticModule(sym) && !sym.is(Trait), - s"genLoadJSConstructor called with non-class $sym") - js.LoadJSConstructor(encodeClassName(sym)) - } - - private inline val GenericGlobalObjectInformationMsg = { - "\n " + - "See https://www.scala-js.org/doc/interoperability/global-scope.html " + - "for further information." - } - - /** Rule out the `GlobalScope` case of a `MaybeGlobalScope` and extract the - * value tree. - * - * If `tree` represents the global scope, report a compile error. - */ - private def ruleOutGlobalScope(tree: MaybeGlobalScope): js.Tree = { - tree match { - case MaybeGlobalScope.NotGlobalScope(t) => - t - case MaybeGlobalScope.GlobalScope(pos) => - reportErrorLoadGlobalScope()(pos) - } - } - - /** Report a compile error specifying that the global scope cannot be - * loaded as a value. - */ - private def reportErrorLoadGlobalScope()(implicit pos: SourcePosition): js.Tree = { - report.error( - "Loading the global scope as a value (anywhere but as the " + - "left-hand-side of a `.`-selection) is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - - /** Gen a JS bracket select or a `JSGlobalRef`. - * - * If the receiver is a normal value, i.e., not the global scope, then - * emit a `JSSelect`. - * - * Otherwise, if the `item` is a constant string that is a valid - * JavaScript identifier, emit a `JSGlobalRef`. - * - * Otherwise, report a compile error. - */ - private def genJSSelectOrGlobalRef(qual: MaybeGlobalScope, item: js.Tree)( - implicit pos: SourcePosition): js.AssignLhs = { - qual match { - case MaybeGlobalScope.NotGlobalScope(qualTree) => - js.JSSelect(qualTree, item) - - case MaybeGlobalScope.GlobalScope(_) => - item match { - case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { - js.JSGlobalRef(value) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { - report.error( - "Invalid selection in the global scope of the reserved " + - s"identifier name `$value`." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } else { - report.error( - "Selecting a field of the global scope whose name is " + - "not a valid JavaScript identifier is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } - - case _ => - report.error( - "Selecting a field of the global scope with a dynamic " + - "name is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.JSGlobalRef("erroneous") - } - } - } - - /** Gen a JS bracket method apply or an apply of a `GlobalRef`. - * - * If the receiver is a normal value, i.e., not the global scope, then - * emit a `JSMethodApply`. - * - * Otherwise, if the `method` is a constant string that is a valid - * JavaScript identifier, emit a `JSFunctionApply(JSGlobalRef(...), ...)`. - * - * Otherwise, report a compile error. - */ - private def genJSMethodApplyOrGlobalRefApply( - receiver: MaybeGlobalScope, method: js.Tree, args: List[js.TreeOrJSSpread])( - implicit pos: SourcePosition): js.Tree = { - receiver match { - case MaybeGlobalScope.NotGlobalScope(receiverTree) => - js.JSMethodApply(receiverTree, method, args) - - case MaybeGlobalScope.GlobalScope(_) => - method match { - case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { - js.JSFunctionApply(js.JSGlobalRef(value), args) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { - report.error( - "Invalid call in the global scope of the reserved " + - s"identifier name `$value`." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } else { - report.error( - "Calling a method of the global scope whose name is not " + - "a valid JavaScript identifier is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - - case _ => - report.error( - "Calling a method of the global scope with a dynamic " + - "name is not allowed." + - GenericGlobalObjectInformationMsg, - pos) - js.Undefined() - } - } - } - - private def computeJSNativeLoadSpecOfValDef(sym: Symbol): js.JSNativeLoadSpec = { - atPhaseBeforeTransforms { - computeJSNativeLoadSpecOfInPhase(sym) - } - } - - private def computeJSNativeLoadSpecOfClass(sym: Symbol): Option[js.JSNativeLoadSpec] = { - if (sym.is(Trait) || sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - None - } else { - atPhaseBeforeTransforms { - if (sym.owner.isStaticOwner) - Some(computeJSNativeLoadSpecOfInPhase(sym)) - else - None - } - } - } - - private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { - import js.JSNativeLoadSpec._ - - val symOwner = sym.owner - - // Marks a code path as unexpected because it should have been reported as an error in `PrepJSInterop`. - def unexpected(msg: String): Nothing = - throw new FatalError(i"$msg for ${sym.fullName} at ${sym.srcPos}") - - if (symOwner.hasAnnotation(jsdefn.JSNativeAnnot)) { - val jsName = sym.jsName match { - case JSName.Literal(jsName) => jsName - case JSName.Computed(_) => unexpected("could not read the simple JS name as a string literal") - } - - if (symOwner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - Global(jsName, Nil) - } else { - val ownerLoadSpec = computeJSNativeLoadSpecOfInPhase(symOwner) - ownerLoadSpec match { - case Global(globalRef, path) => - Global(globalRef, path :+ jsName) - case Import(module, path) => - Import(module, path :+ jsName) - case ImportWithGlobalFallback(Import(module, modulePath), Global(globalRef, globalPath)) => - ImportWithGlobalFallback( - Import(module, modulePath :+ jsName), - Global(globalRef, globalPath :+ jsName)) - } - } - } else { - def parsePath(pathName: String): List[String] = - pathName.split('.').toList - - def parseGlobalPath(pathName: String): Global = { - val globalRef :: path = parsePath(pathName): @unchecked - Global(globalRef, path) - } - - val annot = sym.annotations.find { annot => - annot.symbol == jsdefn.JSGlobalAnnot || annot.symbol == jsdefn.JSImportAnnot - }.getOrElse { - unexpected("could not find the JS native load spec annotation") - } - - if (annot.symbol == jsdefn.JSGlobalAnnot) { - val pathName = annot.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - parseGlobalPath(pathName) - } else { // annot.symbol == jsdefn.JSImportAnnot - val module = annot.argumentConstantString(0).getOrElse { - unexpected("could not read the module argument as a string literal") - } - val path = annot.argumentConstantString(1).fold { - if (annot.arguments.sizeIs < 2) - parsePath(sym.defaultJSName) - else - Nil - } { pathName => - parsePath(pathName) - } - val importSpec = Import(module, path) - annot.argumentConstantString(2).fold[js.JSNativeLoadSpec] { - importSpec - } { globalPathName => - ImportWithGlobalFallback(importSpec, parseGlobalPath(globalPathName)) - } - } - } - } - - private def isMethodStaticInIR(sym: Symbol): Boolean = - sym.is(JavaStatic) - - /** Generate a Class[_] value (e.g. coming from classOf[T]) */ - private def genClassConstant(tpe: Type)(implicit pos: Position): js.Tree = - js.ClassOf(toTypeRef(tpe)) - - private def isStaticModule(sym: Symbol): Boolean = - sym.is(Module) && sym.isStatic - - private def isPrimitiveValueType(tpe: Type): Boolean = { - tpe.widenDealias match { - case JavaArrayType(_) => false - case _: ErasedValueType => false - case t => t.typeSymbol.asClass.isPrimitiveValueClass - } - } - - protected lazy val isHijackedClass: Set[Symbol] = { - /* This list is a duplicate of ir.Definitions.HijackedClasses, but - * with global.Symbol's instead of IR encoded names as Strings. - * We also add java.lang.Void, which BoxedUnit "erases" to. - */ - Set[Symbol]( - defn.BoxedUnitClass, defn.BoxedBooleanClass, defn.BoxedCharClass, defn.BoxedByteClass, - defn.BoxedShortClass, defn.BoxedIntClass, defn.BoxedLongClass, defn.BoxedFloatClass, - defn.BoxedDoubleClass, defn.StringClass, jsdefn.JavaLangVoidClass - ) - } - - private def isMaybeJavaScriptException(tpe: Type): Boolean = - jsdefn.JavaScriptExceptionClass.isSubClass(tpe.typeSymbol) - - private def hasDefaultCtorArgsAndJSModule(classSym: Symbol): Boolean = { - def hasNativeCompanion = - classSym.companionModule.moduleClass.hasAnnotation(jsdefn.JSNativeAnnot) - def hasDefaultParameters = - classSym.info.decls.exists(sym => sym.isClassConstructor && sym.hasDefaultParams) - - hasNativeCompanion && hasDefaultParameters - } - - // Copied from DottyBackendInterface - - private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - - def desugarIdent(i: Ident): Option[tpd.Select] = { - var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { - case sel: tpd.Select => - desugared.put(i.tpe, sel) - found = sel - case _ => - } - } - if (found == null) None else Some(found) - } -} - -object JSCodeGen { - - private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") - private val JSObjectClassName = ClassName("scala.scalajs.js.Object") - private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") - - private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) - - private val newSimpleMethodName = SimpleMethodName("new") - - private val selectedValueMethodName = MethodName("selectedValue", Nil, ObjectClassRef) - - private val ObjectArgConstructorName = MethodName.constructor(List(ObjectClassRef)) - - private val thisOriginalName = OriginalName("this") - - sealed abstract class MaybeGlobalScope - - object MaybeGlobalScope { - final case class NotGlobalScope(tree: js.Tree) extends MaybeGlobalScope - - final case class GlobalScope(pos: SourcePosition) extends MaybeGlobalScope - } - - /** Marker object for undefined parameters in JavaScript semantic calls. - * - * To be used inside a `js.Transient` node. - */ - case object UndefinedParam extends js.Transient.Value { - val tpe: jstpe.Type = jstpe.UndefType - - def traverse(traverser: ir.Traversers.Traverser): Unit = () - - def transform(transformer: ir.Transformers.Transformer, isStat: Boolean)( - implicit pos: ir.Position): js.Tree = { - js.Transient(this) - } - - def printIR(out: ir.Printers.IRTreePrinter): Unit = - out.print("") - } - - /** Info about a default param accessor. - * - * The method must have a default getter name for this class to make sense. - */ - private class DefaultParamInfo(sym: Symbol)(using Context) { - private val methodName = sym.name.exclude(DefaultGetterName) - - def isForConstructor: Boolean = methodName == nme.CONSTRUCTOR - - /** When `isForConstructor` is true, returns the owner of the attached - * constructor. - */ - def constructorOwner: Symbol = sym.owner.linkedClass - - /** When `isForConstructor` is false, returns the method attached to the - * specified default accessor. - */ - def attachedMethod: Symbol = { - // If there are overloads, we need to find the one that has default params. - val overloads = sym.owner.info.decl(methodName) - if (!overloads.isOverloaded) - overloads.symbol - else - overloads.suchThat(_.is(HasDefaultParams, butNot = Bridge)).symbol - } - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala deleted file mode 100644 index 964811c69e19..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSDefinitions.scala +++ /dev/null @@ -1,340 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.threadUnsafe - -import dotty.tools.dotc.core._ -import Names._ -import Types._ -import Contexts._ -import Symbols._ -import StdNames._ - -import dotty.tools.dotc.config.SJSPlatform - -object JSDefinitions { - /** The Scala.js-specific definitions for the current context. */ - def jsdefn(using Context): JSDefinitions = - ctx.platform.asInstanceOf[SJSPlatform].jsDefinitions -} - -final class JSDefinitions()(using DetachedContext) { - - @threadUnsafe lazy val InlineAnnotType: TypeRef = requiredClassRef("scala.inline") - def InlineAnnot(using Context) = InlineAnnotType.symbol.asClass - @threadUnsafe lazy val NoinlineAnnotType: TypeRef = requiredClassRef("scala.noinline") - def NoinlineAnnot(using Context) = NoinlineAnnotType.symbol.asClass - - @threadUnsafe lazy val JavaLangVoidType: TypeRef = requiredClassRef("java.lang.Void") - def JavaLangVoidClass(using Context) = JavaLangVoidType.symbol.asClass - - @threadUnsafe lazy val ScalaJSJSPackageVal = requiredPackage("scala.scalajs.js") - @threadUnsafe lazy val ScalaJSJSPackageClass = ScalaJSJSPackageVal.moduleClass.asClass - @threadUnsafe lazy val JSPackage_typeOfR = ScalaJSJSPackageClass.requiredMethodRef("typeOf") - def JSPackage_typeOf(using Context) = JSPackage_typeOfR.symbol - @threadUnsafe lazy val JSPackage_constructorOfR = ScalaJSJSPackageClass.requiredMethodRef("constructorOf") - def JSPackage_constructorOf(using Context) = JSPackage_constructorOfR.symbol - @threadUnsafe lazy val JSPackage_nativeR = ScalaJSJSPackageClass.requiredMethodRef("native") - def JSPackage_native(using Context) = JSPackage_nativeR.symbol - @threadUnsafe lazy val JSPackage_undefinedR = ScalaJSJSPackageClass.requiredMethodRef("undefined") - def JSPackage_undefined(using Context) = JSPackage_undefinedR.symbol - @threadUnsafe lazy val JSPackage_dynamicImportR = ScalaJSJSPackageClass.requiredMethodRef("dynamicImport") - def JSPackage_dynamicImport(using Context) = JSPackage_dynamicImportR.symbol - - @threadUnsafe lazy val JSNativeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.native") - def JSNativeAnnot(using Context) = JSNativeAnnotType.symbol.asClass - - @threadUnsafe lazy val JSAnyType: TypeRef = requiredClassRef("scala.scalajs.js.Any") - def JSAnyClass(using Context) = JSAnyType.symbol.asClass - @threadUnsafe lazy val JSObjectType: TypeRef = requiredClassRef("scala.scalajs.js.Object") - def JSObjectClass(using Context) = JSObjectType.symbol.asClass - @threadUnsafe lazy val JSFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.Function") - def JSFunctionClass(using Context) = JSFunctionType.symbol.asClass - @threadUnsafe lazy val JSThisFunctionType: TypeRef = requiredClassRef("scala.scalajs.js.ThisFunction") - def JSThisFunctionClass(using Context) = JSThisFunctionType.symbol.asClass - - @threadUnsafe lazy val PseudoUnionType: TypeRef = requiredClassRef("scala.scalajs.js.|") - def PseudoUnionClass(using Context) = PseudoUnionType.symbol.asClass - - @threadUnsafe lazy val PseudoUnionModuleRef = requiredModuleRef("scala.scalajs.js.|") - def PseudoUnionModule(using Context) = PseudoUnionModuleRef.symbol - @threadUnsafe lazy val PseudoUnion_fromR = PseudoUnionModule.requiredMethodRef("from") - def PseudoUnion_from(using Context) = PseudoUnion_fromR.symbol - @threadUnsafe lazy val PseudoUnion_fromTypeConstructorR = PseudoUnionModule.requiredMethodRef("fromTypeConstructor") - def PseudoUnion_fromTypeConstructor(using Context) = PseudoUnion_fromTypeConstructorR.symbol - - @threadUnsafe lazy val UnionOpsModuleRef = requiredModuleRef("scala.scalajs.js.internal.UnitOps") - - @threadUnsafe lazy val JSArrayType: TypeRef = requiredClassRef("scala.scalajs.js.Array") - def JSArrayClass(using Context) = JSArrayType.symbol.asClass - @threadUnsafe lazy val JSDynamicType: TypeRef = requiredClassRef("scala.scalajs.js.Dynamic") - def JSDynamicClass(using Context) = JSDynamicType.symbol.asClass - - @threadUnsafe lazy val RuntimeExceptionType: TypeRef = requiredClassRef("java.lang.RuntimeException") - def RuntimeExceptionClass(using Context) = RuntimeExceptionType.symbol.asClass - @threadUnsafe lazy val JavaScriptExceptionType: TypeRef = requiredClassRef("scala.scalajs.js.JavaScriptException") - def JavaScriptExceptionClass(using Context) = JavaScriptExceptionType.symbol.asClass - - @threadUnsafe lazy val JSGlobalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobal") - def JSGlobalAnnot(using Context) = JSGlobalAnnotType.symbol.asClass - @threadUnsafe lazy val JSImportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSImport") - def JSImportAnnot(using Context) = JSImportAnnotType.symbol.asClass - @threadUnsafe lazy val JSGlobalScopeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSGlobalScope") - def JSGlobalScopeAnnot(using Context) = JSGlobalScopeAnnotType.symbol.asClass - @threadUnsafe lazy val JSNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSName") - def JSNameAnnot(using Context) = JSNameAnnotType.symbol.asClass - @threadUnsafe lazy val JSFullNameAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSFullName") - def JSFullNameAnnot(using Context) = JSFullNameAnnotType.symbol.asClass - @threadUnsafe lazy val JSBracketAccessAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketAccess") - def JSBracketAccessAnnot(using Context) = JSBracketAccessAnnotType.symbol.asClass - @threadUnsafe lazy val JSBracketCallAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSBracketCall") - def JSBracketCallAnnot(using Context) = JSBracketCallAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportTopLevelAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportTopLevel") - def JSExportTopLevelAnnot(using Context) = JSExportTopLevelAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExport") - def JSExportAnnot(using Context) = JSExportAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportStaticAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportStatic") - def JSExportStaticAnnot(using Context) = JSExportStaticAnnotType.symbol.asClass - @threadUnsafe lazy val JSExportAllAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportAll") - def JSExportAllAnnot(using Context) = JSExportAllAnnotType.symbol.asClass - - def JSAnnotPackage(using Context) = JSGlobalAnnot.owner.asClass - - @threadUnsafe lazy val JSTypeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSType") - def JSTypeAnnot(using Context) = JSTypeAnnotType.symbol.asClass - @threadUnsafe lazy val JSOptionalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSOptional") - def JSOptionalAnnot(using Context) = JSOptionalAnnotType.symbol.asClass - @threadUnsafe lazy val ExposedJSMemberAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.ExposedJSMember") - def ExposedJSMemberAnnot(using Context) = ExposedJSMemberAnnotType.symbol.asClass - - @threadUnsafe lazy val JSImportNamespaceModuleRef = requiredModuleRef("scala.scalajs.js.annotation.JSImport.Namespace") - def JSImportNamespaceModule(using Context) = JSImportNamespaceModuleRef.symbol - - @threadUnsafe lazy val JSAnyModuleRef = requiredModuleRef("scala.scalajs.js.Any") - def JSAnyModule(using Context) = JSAnyModuleRef.symbol - @threadUnsafe lazy val JSAny_fromFunctionR = (0 to 22).map(n => JSAnyModule.requiredMethodRef("fromFunction" + n)).toArray - def JSAny_fromFunction(n: Int)(using Context) = JSAny_fromFunctionR(n).symbol - - @threadUnsafe lazy val JSDynamicModuleRef = requiredModuleRef("scala.scalajs.js.Dynamic") - def JSDynamicModule(using Context) = JSDynamicModuleRef.symbol - @threadUnsafe lazy val JSDynamic_globalR = JSDynamicModule.requiredMethodRef("global") - def JSDynamic_global(using Context) = JSDynamic_globalR.symbol - @threadUnsafe lazy val JSDynamic_newInstanceR = JSDynamicModule.requiredMethodRef("newInstance") - def JSDynamic_newInstance(using Context) = JSDynamic_newInstanceR.symbol - - @threadUnsafe lazy val JSDynamicLiteralModuleRef = JSDynamicModule.moduleClass.requiredValueRef("literal") - def JSDynamicLiteralModule(using Context) = JSDynamicLiteralModuleRef.symbol - @threadUnsafe lazy val JSDynamicLiteral_applyDynamicNamedR = JSDynamicLiteralModule.requiredMethodRef("applyDynamicNamed") - def JSDynamicLiteral_applyDynamicNamed(using Context) = JSDynamicLiteral_applyDynamicNamedR.symbol - @threadUnsafe lazy val JSDynamicLiteral_applyDynamicR = JSDynamicLiteralModule.requiredMethodRef("applyDynamic") - def JSDynamicLiteral_applyDynamic(using Context) = JSDynamicLiteral_applyDynamicR.symbol - - @threadUnsafe lazy val JSObjectModuleRef = requiredModuleRef("scala.scalajs.js.Object") - def JSObjectModule(using Context) = JSObjectModuleRef.symbol - - @threadUnsafe lazy val JSArrayModuleRef = requiredModuleRef("scala.scalajs.js.Array") - def JSArrayModule(using Context) = JSArrayModuleRef.symbol - @threadUnsafe lazy val JSArray_applyR = JSArrayModule.requiredMethodRef(nme.apply) - def JSArray_apply(using Context) = JSArray_applyR.symbol - - @threadUnsafe lazy val JSThisFunctionModuleRef = requiredModuleRef("scala.scalajs.js.ThisFunction") - def JSThisFunctionModule(using Context) = JSThisFunctionModuleRef.symbol - @threadUnsafe lazy val JSThisFunction_fromFunctionR = (1 to 22).map(n => JSThisFunctionModule.requiredMethodRef("fromFunction" + n)).toArray - def JSThisFunction_fromFunction(n: Int)(using Context) = JSThisFunction_fromFunctionR(n - 1).symbol - - @threadUnsafe lazy val JSConstructorTagModuleRef = requiredModuleRef("scala.scalajs.js.ConstructorTag") - def JSConstructorTagModule(using Context) = JSConstructorTagModuleRef.symbol - @threadUnsafe lazy val JSConstructorTag_materializeR = JSConstructorTagModule.requiredMethodRef("materialize") - def JSConstructorTag_materialize(using Context) = JSConstructorTag_materializeR.symbol - - @threadUnsafe lazy val JSNewModuleRef = requiredModuleRef("scala.scalajs.js.new") - def JSNewModule(using Context) = JSNewModuleRef.symbol - @threadUnsafe lazy val JSNew_targetR = JSNewModule.requiredMethodRef("target") - def JSNew_target(using Context) = JSNew_targetR.symbol - - @threadUnsafe lazy val JSImportModuleRef = requiredModuleRef("scala.scalajs.js.import") - def JSImportModule(using Context) = JSImportModuleRef.symbol - @threadUnsafe lazy val JSImport_applyR = JSImportModule.requiredMethodRef(nme.apply) - def JSImport_apply(using Context) = JSImport_applyR.symbol - @threadUnsafe lazy val JSImport_metaR = JSImportModule.requiredMethodRef("meta") - def JSImport_meta(using Context) = JSImport_metaR.symbol - - @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") - @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass - @threadUnsafe lazy val Runtime_toScalaVarArgsR = RuntimePackageClass.requiredMethodRef("toScalaVarArgs") - def Runtime_toScalaVarArgs(using Context) = Runtime_toScalaVarArgsR.symbol - @threadUnsafe lazy val Runtime_toJSVarArgsR = RuntimePackageClass.requiredMethodRef("toJSVarArgs") - def Runtime_toJSVarArgs(using Context) = Runtime_toJSVarArgsR.symbol - @threadUnsafe lazy val Runtime_privateFieldsSymbolR = RuntimePackageClass.requiredMethodRef("privateFieldsSymbol") - def Runtime_privateFieldsSymbol(using Context) = Runtime_privateFieldsSymbolR.symbol - @threadUnsafe lazy val Runtime_constructorOfR = RuntimePackageClass.requiredMethodRef("constructorOf") - def Runtime_constructorOf(using Context) = Runtime_constructorOfR.symbol - @threadUnsafe lazy val Runtime_newConstructorTagR = RuntimePackageClass.requiredMethodRef("newConstructorTag") - def Runtime_newConstructorTag(using Context) = Runtime_newConstructorTagR.symbol - @threadUnsafe lazy val Runtime_createInnerJSClassR = RuntimePackageClass.requiredMethodRef("createInnerJSClass") - def Runtime_createInnerJSClass(using Context) = Runtime_createInnerJSClassR.symbol - @threadUnsafe lazy val Runtime_createLocalJSClassR = RuntimePackageClass.requiredMethodRef("createLocalJSClass") - def Runtime_createLocalJSClass(using Context) = Runtime_createLocalJSClassR.symbol - @threadUnsafe lazy val Runtime_withContextualJSClassValueR = RuntimePackageClass.requiredMethodRef("withContextualJSClassValue") - def Runtime_withContextualJSClassValue(using Context) = Runtime_withContextualJSClassValueR.symbol - @threadUnsafe lazy val Runtime_linkingInfoR = RuntimePackageClass.requiredMethodRef("linkingInfo") - def Runtime_linkingInfo(using Context) = Runtime_linkingInfoR.symbol - @threadUnsafe lazy val Runtime_dynamicImportR = RuntimePackageClass.requiredMethodRef("dynamicImport") - def Runtime_dynamicImport(using Context) = Runtime_dynamicImportR.symbol - - @threadUnsafe lazy val DynamicImportThunkType: TypeRef = requiredClassRef("scala.scalajs.runtime.DynamicImportThunk") - def DynamicImportThunkClass(using Context) = DynamicImportThunkType.symbol.asClass - @threadUnsafe lazy val DynamicImportThunkClass_applyR = DynamicImportThunkClass.requiredMethodRef(nme.apply) - def DynamicImportThunkClass_apply(using Context) = DynamicImportThunkClass_applyR.symbol - - @threadUnsafe lazy val SpecialPackageVal = requiredPackage("scala.scalajs.js.special") - @threadUnsafe lazy val SpecialPackageClass = SpecialPackageVal.moduleClass.asClass - @threadUnsafe lazy val Special_debuggerR = SpecialPackageClass.requiredMethodRef("debugger") - def Special_debugger(using Context) = Special_debuggerR.symbol - @threadUnsafe lazy val Special_deleteR = SpecialPackageClass.requiredMethodRef("delete") - def Special_delete(using Context) = Special_deleteR.symbol - @threadUnsafe lazy val Special_forinR = SpecialPackageClass.requiredMethodRef("forin") - def Special_forin(using Context) = Special_forinR.symbol - @threadUnsafe lazy val Special_inR = SpecialPackageClass.requiredMethodRef("in") - def Special_in(using Context) = Special_inR.symbol - @threadUnsafe lazy val Special_instanceofR = SpecialPackageClass.requiredMethodRef("instanceof") - def Special_instanceof(using Context) = Special_instanceofR.symbol - @threadUnsafe lazy val Special_strictEqualsR = SpecialPackageClass.requiredMethodRef("strictEquals") - def Special_strictEquals(using Context) = Special_strictEqualsR.symbol - @threadUnsafe lazy val Special_throwR = SpecialPackageClass.requiredMethodRef("throw") - def Special_throw(using Context) = Special_throwR.symbol - @threadUnsafe lazy val Special_tryCatchR = SpecialPackageClass.requiredMethodRef("tryCatch") - def Special_tryCatch(using Context) = Special_tryCatchR.symbol - @threadUnsafe lazy val Special_wrapAsThrowableR = SpecialPackageClass.requiredMethodRef("wrapAsThrowable") - def Special_wrapAsThrowable(using Context) = Special_wrapAsThrowableR.symbol - @threadUnsafe lazy val Special_unwrapFromThrowableR = SpecialPackageClass.requiredMethodRef("unwrapFromThrowable") - def Special_unwrapFromThrowable(using Context) = Special_unwrapFromThrowableR.symbol - - @threadUnsafe lazy val WrappedArrayType: TypeRef = requiredClassRef("scala.scalajs.js.WrappedArray") - def WrappedArrayClass(using Context) = WrappedArrayType.symbol.asClass - - @threadUnsafe lazy val ScalaRunTime_isArrayR = defn.ScalaRuntimeModule.requiredMethodRef("isArray", List(???, ???)) - def ScalaRunTime_isArray(using Context): Symbol = ScalaRunTime_isArrayR.symbol - - @threadUnsafe lazy val BoxesRunTime_boxToCharacterR = defn.BoxesRunTimeModule.requiredMethodRef("boxToCharacter") - def BoxesRunTime_boxToCharacter(using Context): Symbol = BoxesRunTime_boxToCharacterR.symbol - @threadUnsafe lazy val BoxesRunTime_unboxToCharR = defn.BoxesRunTimeModule.requiredMethodRef("unboxToChar") - def BoxesRunTime_unboxToChar(using Context): Symbol = BoxesRunTime_unboxToCharR.symbol - - @threadUnsafe lazy val EnableReflectiveInstantiationAnnotType: TypeRef = requiredClassRef("scala.scalajs.reflect.annotation.EnableReflectiveInstantiation") - def EnableReflectiveInstantiationAnnot(using Context) = EnableReflectiveInstantiationAnnotType.symbol.asClass - - @threadUnsafe lazy val ReflectModuleRef = requiredModuleRef("scala.scalajs.reflect.Reflect") - def ReflectModule(using Context) = ReflectModuleRef.symbol - @threadUnsafe lazy val Reflect_registerLoadableModuleClassR = ReflectModule.requiredMethodRef("registerLoadableModuleClass") - def Reflect_registerLoadableModuleClass(using Context) = Reflect_registerLoadableModuleClassR.symbol - @threadUnsafe lazy val Reflect_registerInstantiatableClassR = ReflectModule.requiredMethodRef("registerInstantiatableClass") - def Reflect_registerInstantiatableClass(using Context) = Reflect_registerInstantiatableClassR.symbol - - @threadUnsafe lazy val ReflectSelectableType: TypeRef = requiredClassRef("scala.reflect.Selectable") - def ReflectSelectableClass(using Context) = ReflectSelectableType.symbol.asClass - @threadUnsafe lazy val ReflectSelectable_selectDynamicR = ReflectSelectableClass.requiredMethodRef("selectDynamic") - def ReflectSelectable_selectDynamic(using Context) = ReflectSelectable_selectDynamicR.symbol - @threadUnsafe lazy val ReflectSelectable_applyDynamicR = ReflectSelectableClass.requiredMethodRef("applyDynamic") - def ReflectSelectable_applyDynamic(using Context) = ReflectSelectable_applyDynamicR.symbol - - @threadUnsafe lazy val ReflectSelectableModuleRef = requiredModuleRef("scala.reflect.Selectable") - def ReflectSelectableModule(using Context) = ReflectSelectableModuleRef.symbol - @threadUnsafe lazy val ReflectSelectable_reflectiveSelectableR = ReflectSelectableModule.requiredMethodRef("reflectiveSelectable") - def ReflectSelectable_reflectiveSelectable(using Context) = ReflectSelectable_reflectiveSelectableR.symbol - - @threadUnsafe lazy val SelectableModuleRef = requiredModuleRef("scala.Selectable") - def SelectableModule(using Context) = SelectableModuleRef.symbol - @threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls") - def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol - - private var allRefClassesCache: Set[Symbol] = _ - def allRefClasses(using Context): Set[Symbol] = { - if (allRefClassesCache == null) { - val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", - "Int", "Long", "Float", "Double") - val fullNames = baseNames.flatMap { base => - List(s"scala.runtime.${base}Ref", s"scala.runtime.Volatile${base}Ref") - } - allRefClassesCache = fullNames.map(name => requiredClass(name)).toSet - } - allRefClassesCache - } - - /** Definitions related to scala.Enumeration. */ - object scalaEnumeration { - val nmeValue = termName("Value") - val nmeVal = termName("Val") - val hasNext = termName("hasNext") - val next = termName("next") - - @threadUnsafe lazy val EnumerationClass = requiredClass("scala.Enumeration") - @threadUnsafe lazy val Enumeration_Value_NoArg = EnumerationClass.requiredValue(nmeValue) - @threadUnsafe lazy val Enumeration_Value_IntArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType)) - @threadUnsafe lazy val Enumeration_Value_StringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.StringType)) - @threadUnsafe lazy val Enumeration_Value_IntStringArg = EnumerationClass.requiredMethod(nmeValue, List(defn.IntType, defn.StringType)) - @threadUnsafe lazy val Enumeration_nextName = EnumerationClass.requiredMethod(termName("nextName")) - - @threadUnsafe lazy val EnumerationValClass = EnumerationClass.requiredClass("Val") - @threadUnsafe lazy val Enumeration_Val_NoArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, Nil) - @threadUnsafe lazy val Enumeration_Val_IntArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType)) - @threadUnsafe lazy val Enumeration_Val_StringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.StringType)) - @threadUnsafe lazy val Enumeration_Val_IntStringArg = EnumerationValClass.requiredMethod(nme.CONSTRUCTOR, List(defn.IntType, defn.StringType)) - - def isValueMethod(sym: Symbol)(using Context): Boolean = - sym.name == nmeValue && sym.owner == EnumerationClass - - def isValueMethodNoName(sym: Symbol)(using Context): Boolean = - isValueMethod(sym) && (sym == Enumeration_Value_NoArg || sym == Enumeration_Value_IntArg) - - def isValueMethodName(sym: Symbol)(using Context): Boolean = - isValueMethod(sym) && (sym == Enumeration_Value_StringArg || sym == Enumeration_Value_IntStringArg) - - def isValCtor(sym: Symbol)(using Context): Boolean = - sym.isClassConstructor && sym.owner == EnumerationValClass - - def isValCtorNoName(sym: Symbol)(using Context): Boolean = - isValCtor(sym) && (sym == Enumeration_Val_NoArg || sym == Enumeration_Val_IntArg) - - def isValCtorName(sym: Symbol)(using Context): Boolean = - isValCtor(sym) && (sym == Enumeration_Val_StringArg || sym == Enumeration_Val_IntStringArg) - } - - /** Definitions related to the treatment of JUnit bootstrappers. */ - object junit { - @threadUnsafe lazy val TestAnnotType: TypeRef = requiredClassRef("org.junit.Test") - def TestAnnotClass(using Context): ClassSymbol = TestAnnotType.symbol.asClass - - @threadUnsafe lazy val BeforeAnnotType: TypeRef = requiredClassRef("org.junit.Before") - def BeforeAnnotClass(using Context): ClassSymbol = BeforeAnnotType.symbol.asClass - - @threadUnsafe lazy val AfterAnnotType: TypeRef = requiredClassRef("org.junit.After") - def AfterAnnotClass(using Context): ClassSymbol = AfterAnnotType.symbol.asClass - - @threadUnsafe lazy val BeforeClassAnnotType: TypeRef = requiredClassRef("org.junit.BeforeClass") - def BeforeClassAnnotClass(using Context): ClassSymbol = BeforeClassAnnotType.symbol.asClass - - @threadUnsafe lazy val AfterClassAnnotType: TypeRef = requiredClassRef("org.junit.AfterClass") - def AfterClassAnnotClass(using Context): ClassSymbol = AfterClassAnnotType.symbol.asClass - - @threadUnsafe lazy val IgnoreAnnotType: TypeRef = requiredClassRef("org.junit.Ignore") - def IgnoreAnnotClass(using Context): ClassSymbol = IgnoreAnnotType.symbol.asClass - - @threadUnsafe lazy val BootstrapperType: TypeRef = requiredClassRef("org.scalajs.junit.Bootstrapper") - - @threadUnsafe lazy val TestMetadataType: TypeRef = requiredClassRef("org.scalajs.junit.TestMetadata") - - @threadUnsafe lazy val NoSuchMethodExceptionType: TypeRef = requiredClassRef("java.lang.NoSuchMethodException") - - @threadUnsafe lazy val FutureType: TypeRef = requiredClassRef("scala.concurrent.Future") - def FutureClass(using Context): ClassSymbol = FutureType.symbol.asClass - - @threadUnsafe private lazy val FutureModule_successfulR = requiredModule("scala.concurrent.Future").requiredMethodRef("successful") - def FutureModule_successful(using Context): Symbol = FutureModule_successfulR.symbol - - @threadUnsafe private lazy val SuccessModule_applyR = requiredModule("scala.util.Success").requiredMethodRef(nme.apply) - def SuccessModule_apply(using Context): Symbol = SuccessModule_applyR.symbol - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala deleted file mode 100644 index 73a150c60290..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala +++ /dev/null @@ -1,428 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.collection.mutable - -import dotty.tools.dotc.core._ -import Contexts._ -import Flags._ -import Types._ -import Symbols._ -import NameOps._ -import Names._ -import StdNames._ - -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String - -import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions - -import JSDefinitions.jsdefn - -/** Encoding of symbol names for JavaScript - * - * Some issues that this encoding solves: - * * Overloading: encode the full signature in the JS name - * * Same scope for fields and methods of a class - * * Global access to classes and modules (by their full name) - * - * @author Sébastien Doeraene - */ -object JSEncoding { - - /** Name of the capture param storing the JS super class. - * - * This is used by the dispatchers of exposed JS methods and properties of - * nested JS classes when they need to perform a super call. Other super - * calls (in the actual bodies of the methods, not in the dispatchers) do - * not use this value, since they are implemented as static methods that do - * not have access to it. Instead, they get the JS super class value through - * the magic method inserted by `ExplicitLocalJS`, leveraging `lambdalift` - * to ensure that it is properly captured. - * - * Using this identifier is only allowed if it was reserved in the current - * local name scope using [[reserveLocalName]]. Otherwise, this name can - * clash with another local identifier. - */ - final val JSSuperClassParamName = LocalName("superClass$") - - private val ScalaRuntimeNothingClassName = ClassName("scala.runtime.Nothing$") - private val ScalaRuntimeNullClassName = ClassName("scala.runtime.Null$") - - private val dynamicImportForwarderSimpleName = SimpleMethodName("dynamicImport$") - - // Fresh local name generator ---------------------------------------------- - - class LocalNameGenerator { - import LocalNameGenerator._ - - private val usedLocalNames = mutable.Set.empty[LocalName] - private val localSymbolNames = mutable.Map.empty[Symbol, LocalName] - private val usedLabelNames = mutable.Set.empty[LabelName] - private val labelSymbolNames = mutable.Map.empty[Symbol, LabelName] - private var returnLabelName: Option[LabelName] = None - - def reserveLocalName(name: LocalName): Unit = { - require(usedLocalNames.isEmpty, - s"Trying to reserve the name '$name' but names have already been allocated") - usedLocalNames += name - } - - private def freshNameGeneric[N <: ir.Names.Name](base: N, usedNamesSet: mutable.Set[N])( - withSuffix: (N, String) => N): N = { - - var suffix = 1 - var result = base - while (usedNamesSet(result)) { - suffix += 1 - result = withSuffix(base, "$" + suffix) - } - usedNamesSet += result - result - } - - def freshName(base: LocalName): LocalName = - freshNameGeneric(base, usedLocalNames)(_.withSuffix(_)) - - def freshName(base: String): LocalName = - freshName(LocalName(base)) - - def freshLocalIdent()(implicit pos: ir.Position): js.LocalIdent = - js.LocalIdent(freshName(xLocalName)) - - def freshLocalIdent(base: LocalName)(implicit pos: ir.Position): js.LocalIdent = - js.LocalIdent(freshName(base)) - - def freshLocalIdent(base: String)(implicit pos: ir.Position): js.LocalIdent = - freshLocalIdent(LocalName(base)) - - def freshLocalIdent(base: TermName)(implicit pos: ir.Position): js.LocalIdent = - freshLocalIdent(base.mangledString) - - def localSymbolName(sym: Symbol)(using Context): LocalName = { - localSymbolNames.getOrElseUpdate(sym, { - /* The emitter does not like local variables that start with a '$', - * because it needs to encode them not to clash with emitter-generated - * names. There are two common cases, caused by scalac-generated names: - * - the `$this` parameter of tailrec methods and "extension" methods of - * AnyVals, which scalac knows as `nme.SELF`, and - * - the `$outer` parameter of inner class constructors, which scalac - * knows as `nme.OUTER`. - * We choose different base names for those two cases instead, so that - * the avoidance mechanism of the emitter doesn't happen as a common - * case. It can still happen for user-defined variables, but in that case - * the emitter will deal with it. - */ - val base = sym.name match { - case nme.SELF => "this$" // instead of $this - case nme.OUTER => "outer" // instead of $outer - case name => name.mangledString - } - freshName(base) - }) - } - - def freshLabelName(base: LabelName): LabelName = - freshNameGeneric(base, usedLabelNames)(_.withSuffix(_)) - - def freshLabelName(base: String): LabelName = - freshLabelName(LabelName(base)) - - def freshLabelIdent(base: String)(implicit pos: ir.Position): js.LabelIdent = - js.LabelIdent(freshLabelName(base)) - - def labelSymbolName(sym: Symbol)(using Context): LabelName = - labelSymbolNames.getOrElseUpdate(sym, freshLabelName(sym.javaSimpleName)) - - def getEnclosingReturnLabel()(implicit pos: ir.Position): js.LabelIdent = { - if (returnLabelName.isEmpty) - returnLabelName = Some(freshLabelName("_return")) - js.LabelIdent(returnLabelName.get) - } - - /* If this `LocalNameGenerator` has a `returnLabelName` (often added in the - * construction of the `body` argument), wrap the resulting js.Tree to use that label. - */ - def makeLabeledIfRequiresEnclosingReturn(tpe: jstpe.Type)(body: js.Tree)(implicit pos: ir.Position): js.Tree = { - returnLabelName match { - case None => - body - case Some(labelName) => - js.Labeled(js.LabelIdent(labelName), tpe, body) - } - } - } - - private object LocalNameGenerator { - private val xLocalName = LocalName("x") - } - - // Encoding methods ---------------------------------------------------------- - - def encodeLabelSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LabelIdent = { - require(sym.is(Flags.Label), "encodeLabelSym called with non-label symbol: " + sym) - js.LabelIdent(localNames.labelSymbolName(sym)) - } - - def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = - js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) - - def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = - js.StringLiteral(encodeFieldSymAsString(sym)) - - private def encodeFieldSymAsString(sym: Symbol)(using Context): String = { - require(sym.owner.isClass && sym.isTerm && !sym.isOneOf(MethodOrModule), - "encodeFieldSym called with non-field symbol: " + sym) - - val name0 = sym.javaSimpleName - if (name0.charAt(name0.length() - 1) != ' ') name0 - else name0.substring(0, name0.length() - 1) - } - - def encodeMethodSym(sym: Symbol, reflProxy: Boolean = false)( - implicit ctx: Context, pos: ir.Position): js.MethodIdent = { - require(sym.is(Flags.Method), "encodeMethodSym called with non-method symbol: " + sym) - - val tpe = sym.info - - val paramTypeRefs0 = tpe.firstParamTypes.map(paramOrResultTypeRef(_)) - - val hasExplicitThisParameter = !sym.is(JavaStatic) && sym.owner.isNonNativeJSClass - val paramTypeRefs = - if (!hasExplicitThisParameter) paramTypeRefs0 - else encodeClassRef(sym.owner) :: paramTypeRefs0 - - val name = sym.name - val simpleName = SimpleMethodName(name.mangledString) - - val methodName = { - if (sym.isClassConstructor) - MethodName.constructor(paramTypeRefs) - else if (reflProxy) - MethodName.reflectiveProxy(simpleName, paramTypeRefs) - else - MethodName(simpleName, paramTypeRefs, paramOrResultTypeRef(patchedResultType(sym))) - } - - js.MethodIdent(methodName) - } - - def encodeJSNativeMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - require(sym.hasAnnotation(jsdefn.JSNativeAnnot), - "encodeJSNativeMemberSym called with non-native symbol: " + sym) - if (sym.is(Method)) - encodeMethodSym(sym) - else - encodeFieldSymAsMethod(sym) - } - - def encodeStaticMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - require(sym.is(Flags.JavaStaticTerm), - "encodeStaticMemberSym called with non-static symbol: " + sym) - encodeFieldSymAsMethod(sym) - } - - private def encodeFieldSymAsMethod(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { - val name = sym.name - val resultTypeRef = paramOrResultTypeRef(sym.info) - val methodName = MethodName(name.mangledString, Nil, resultTypeRef) - js.MethodIdent(methodName) - } - - def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = { - val paramTypeRefs = params.map(sym => paramOrResultTypeRef(sym.info)) - val resultTypeRef = jstpe.ClassRef(ir.Names.ObjectClass) - val methodName = MethodName(dynamicImportForwarderSimpleName, paramTypeRefs, resultTypeRef) - js.MethodIdent(methodName) - } - - /** Computes the type ref for a type, to be used in a method signature. */ - private def paramOrResultTypeRef(tpe: Type)(using Context): jstpe.TypeRef = - toParamOrResultTypeRef(toTypeRef(tpe)) - - def encodeLocalSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LocalIdent = { - require(!sym.owner.isClass && sym.isTerm && !sym.is(Flags.Method) && !sym.is(Flags.Module), - "encodeLocalSym called with non-local symbol: " + sym) - js.LocalIdent(localNames.localSymbolName(sym)) - } - - def encodeClassType(sym: Symbol)(using Context): jstpe.Type = { - if (sym == defn.ObjectClass) jstpe.AnyType - else if (sym.isJSType) jstpe.AnyType - else { - assert(sym != defn.ArrayClass, - "encodeClassType() cannot be called with ArrayClass") - jstpe.ClassType(encodeClassName(sym)) - } - } - - def encodeClassRef(sym: Symbol)(using Context): jstpe.ClassRef = - jstpe.ClassRef(encodeClassName(sym)) - - def encodeClassNameIdent(sym: Symbol)( - implicit ctx: Context, pos: ir.Position): js.ClassIdent = - js.ClassIdent(encodeClassName(sym)) - - def encodeClassName(sym: Symbol)(using Context): ClassName = { - val sym1 = - if (sym.isAllOf(ModuleClass | JavaDefined)) sym.linkedClass - else sym - - /* Some rewirings: - * - scala.runtime.BoxedUnit to java.lang.Void, as the IR expects. - * BoxedUnit$ is a JVM artifact. - * - scala.Nothing to scala.runtime.Nothing$. - * - scala.Null to scala.runtime.Null$. - */ - if (sym1 == defn.BoxedUnitClass) - ir.Names.BoxedUnitClass - else if (sym1 == defn.NothingClass) - ScalaRuntimeNothingClassName - else if (sym1 == defn.NullClass) - ScalaRuntimeNullClassName - else - ClassName(sym1.javaClassName) - } - - /** Converts a general TypeRef to a TypeRef to be used in a method signature. */ - def toParamOrResultTypeRef(typeRef: jstpe.TypeRef): jstpe.TypeRef = { - typeRef match { - case jstpe.ClassRef(ScalaRuntimeNullClassName) => jstpe.NullRef - case jstpe.ClassRef(ScalaRuntimeNothingClassName) => jstpe.NothingRef - case _ => typeRef - } - } - - def toIRTypeAndTypeRef(tp: Type)(using Context): (jstpe.Type, jstpe.TypeRef) = { - val typeRefInternal = toTypeRefInternal(tp) - (toIRTypeInternal(typeRefInternal), typeRefInternal._1) - } - - def toIRType(tp: Type)(using Context): jstpe.Type = - toIRTypeInternal(toTypeRefInternal(tp)) - - private def toIRTypeInternal(typeRefInternal: (jstpe.TypeRef, Symbol))(using Context): jstpe.Type = { - typeRefInternal._1 match { - case jstpe.PrimRef(irTpe) => - irTpe - - case typeRef: jstpe.ClassRef => - val sym = typeRefInternal._2 - if (sym == defn.ObjectClass || sym.isJSType) - jstpe.AnyType - else if (sym == defn.NothingClass) - jstpe.NothingType - else if (sym == defn.NullClass) - jstpe.NullType - else - jstpe.ClassType(typeRef.className) - - case typeRef: jstpe.ArrayTypeRef => - jstpe.ArrayType(typeRef) - } - } - - def toTypeRef(tp: Type)(using Context): jstpe.TypeRef = - toTypeRefInternal(tp)._1 - - private def toTypeRefInternal(tp: Type)(using Context): (jstpe.TypeRef, Symbol) = { - def primitiveOrClassToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { - assert(sym.isClass, sym) - //assert(sym != defn.ArrayClass || isCompilingArray, sym) - val typeRef = if (sym.isPrimitiveValueClass) { - if (sym == defn.UnitClass) jstpe.VoidRef - else if (sym == defn.BooleanClass) jstpe.BooleanRef - else if (sym == defn.CharClass) jstpe.CharRef - else if (sym == defn.ByteClass) jstpe.ByteRef - else if (sym == defn.ShortClass) jstpe.ShortRef - else if (sym == defn.IntClass) jstpe.IntRef - else if (sym == defn.LongClass) jstpe.LongRef - else if (sym == defn.FloatClass) jstpe.FloatRef - else if (sym == defn.DoubleClass) jstpe.DoubleRef - else throw new Exception(s"unknown primitive value class $sym") - } else { - encodeClassRef(sym) - } - (typeRef, sym) - } - - /** - * When compiling Array.scala, the type parameter T is not erased and shows up in method - * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. - */ - def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { - //assert(sym.isType && isCompilingArray, sym) - (jstpe.ClassRef(ir.Names.ObjectClass), defn.ObjectClass) - } - - tp.widenDealias match { - // Array type such as Array[Int] (kept by erasure) - case JavaArrayType(el) => - val elTypeRef = toTypeRefInternal(el) - (jstpe.ArrayTypeRef.of(elTypeRef._1), elTypeRef._2) - - case t: TypeRef => - if (!t.symbol.isClass) nonClassTypeRefToTypeRef(t.symbol) // See comment on nonClassTypeRefToBType - else primitiveOrClassToTypeRef(t.symbol) // Common reference to a type such as scala.Int or java.lang.String - - case Types.ClassInfo(_, sym, _, _, _) => - /* We get here, for example, for genLoadModule, which invokes - * toTypeKind(moduleClassSymbol.info) - */ - primitiveOrClassToTypeRef(sym) - - /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for - * meta-annotated annotations (@(ann @getter) val x = 0), so we don't emit a warning. - * The type in the AnnotationInfo is an AnnotatedTpe. Tested in jvm/annotations.scala. - */ - case a @ AnnotatedType(t, _) => - //debuglog(s"typeKind of annotated type $a") - toTypeRefInternal(t) - } - } - - /** Patches the result type of a method symbol to sanitize it. - * - * For some reason, dotc thinks that the `info.resultType`of an - * `isConstructor` method (for classes or traits) is the enclosing class - * or trait, but the bodies and usages act as if the result type was `Unit`. - * - * This method returns `UnitType` for constructor methods, and otherwise - * `sym.info.resultType`. - */ - def patchedResultType(sym: Symbol)(using Context): Type = - if (sym.isConstructor) defn.UnitType - else sym.info.resultType - - def originalNameOfLocal(sym: Symbol)( - implicit ctx: Context, localNames: LocalNameGenerator): OriginalName = { - val irName = localNames.localSymbolName(sym) - val originalName = UTF8String(sym.name.unexpandedName.toString) - if (UTF8String.equals(originalName, irName.encoded)) NoOriginalName - else OriginalName(originalName) - } - - def originalNameOfField(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.name) - - def originalNameOfMethod(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.name) - - def originalNameOfClass(sym: Symbol)(using Context): OriginalName = - originalNameOf(sym.fullName) - - private def originalNameOf(name: Name): OriginalName = { - val originalName = name.unexpandedName.toString - if (originalName == name.mangledString) NoOriginalName - else OriginalName(originalName) - } -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala deleted file mode 100644 index 78412999bb34..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala +++ /dev/null @@ -1,1025 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import scala.annotation.tailrec -import scala.collection.mutable - -import dotty.tools.dotc.core._ - -import Contexts._ -import Decorators._ -import Denotations._ -import Flags._ -import Names._ -import NameKinds.DefaultGetterName -import NameOps._ -import Phases._ -import Symbols._ -import Types._ -import TypeErasure.ErasedValueType - -import dotty.tools.dotc.util.{SourcePosition, SrcPos} -import dotty.tools.dotc.report - -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints - -import dotty.tools.dotc.transform.sjs.JSExportUtils._ -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -import JSEncoding._ - -final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { - import jsCodeGen._ - import positionConversions._ - - /** Info for a non-member export. */ - sealed trait ExportInfo { - val pos: SourcePosition - } - - final case class TopLevelExportInfo(moduleID: String, jsName: String)(val pos: SourcePosition) extends ExportInfo - final case class StaticExportInfo(jsName: String)(val pos: SourcePosition) extends ExportInfo - - private sealed trait ExportKind - - private object ExportKind { - case object Module extends ExportKind - case object JSClass extends ExportKind - case object Constructor extends ExportKind - case object Method extends ExportKind - case object Property extends ExportKind - case object Field extends ExportKind - - def apply(sym: Symbol): ExportKind = { - if (sym.is(Flags.Module) && sym.isStatic) Module - else if (sym.isClass) JSClass - else if (sym.isConstructor) Constructor - else if (!sym.is(Flags.Method)) Field - else if (sym.isJSProperty) Property - else Method - } - } - - private def topLevelExportsOf(sym: Symbol): List[TopLevelExportInfo] = { - def isScalaClass(sym: Symbol): Boolean = - sym.isClass && !sym.isOneOf(Module | Trait) && !sym.isJSType - - if (isScalaClass(sym)) { - // Scala classes are never exported; their constructors are - Nil - } else if (sym.is(Accessor) || sym.is(Module, butNot = ModuleClass)) { - /* - Accessors receive the `@JSExportTopLevel` annotation of their associated field, - * but only the field is really exported. - * - Module values are not exported; their module class takes care of the export. - */ - Nil - } else { - val symForAnnot = - if (sym.isConstructor && isScalaClass(sym.owner)) sym.owner - else sym - - symForAnnot.annotations.collect { - case annot if annot.symbol == jsdefn.JSExportTopLevelAnnot => - val jsName = annot.argumentConstantString(0).get - val moduleID = annot.argumentConstantString(1).getOrElse(DefaultModuleID) - TopLevelExportInfo(moduleID, jsName)(annot.tree.sourcePos) - } - } - } - - private def staticExportsOf(sym: Symbol): List[StaticExportInfo] = { - if (sym.is(Accessor)) { - Nil - } else { - sym.annotations.collect { - case annot if annot.symbol == jsdefn.JSExportStaticAnnot => - val jsName = annot.argumentConstantString(0).getOrElse { - sym.defaultJSName - } - StaticExportInfo(jsName)(annot.tree.sourcePos) - } - } - } - - private def checkSameKind(tups: List[(ExportInfo, Symbol)]): Option[ExportKind] = { - assert(tups.nonEmpty, "must have at least one export") - - val firstSym = tups.head._2 - val overallKind = ExportKind(firstSym) - var bad = false - - for ((info, sym) <- tups.tail) { - val kind = ExportKind(sym) - - if (kind != overallKind) { - bad = true - report.error( - em"export overload conflicts with export of $firstSym: they are of different types (${kind.tryToShow} / ${overallKind.tryToShow})", - info.pos) - } - } - - if (bad) None - else Some(overallKind) - } - - private def checkSingleField(tups: List[(ExportInfo, Symbol)]): Symbol = { - assert(tups.nonEmpty, "must have at least one export") - - val firstSym = tups.head._2 - - for ((info, _) <- tups.tail) { - report.error( - em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", - info.pos) - } - - firstSym - } - - def genTopLevelExports(classSym: ClassSymbol): List[js.TopLevelExportDef] = { - val exports = for { - sym <- classSym :: classSym.info.decls.toList - info <- topLevelExportsOf(sym) - } yield { - (info, sym) - } - - (for { - (info, tups) <- exports.groupBy(_._1) - kind <- checkSameKind(tups) - } yield { - import ExportKind._ - - implicit val pos = info.pos - - kind match { - case Module => - js.TopLevelModuleExportDef(info.moduleID, info.jsName) - - case JSClass => - assert(classSym.isNonNativeJSClass, "found export on non-JS class") - js.TopLevelJSClassExportDef(info.moduleID, info.jsName) - - case Constructor | Method => - val exported = tups.map(_._2) - - val methodDef = withNewLocalNameScope { - genExportMethod(exported, JSName.Literal(info.jsName), static = true) - } - - js.TopLevelMethodExportDef(info.moduleID, methodDef) - - case Property => - throw new AssertionError("found top-level exported property") - - case Field => - val sym = checkSingleField(tups) - js.TopLevelFieldExportDef(info.moduleID, info.jsName, encodeFieldSym(sym)) - } - }).toList - } - - def genStaticExports(classSym: Symbol): List[js.MemberDef] = { - val exports = for { - sym <- classSym.info.decls.toList - info <- staticExportsOf(sym) - } yield { - (info, sym) - } - - (for { - (info, tups) <- exports.groupBy(_._1) - kind <- checkSameKind(tups) - } yield { - def alts = tups.map(_._2) - - implicit val pos = info.pos - - import ExportKind._ - - kind match { - case Method => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) - - case Property => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) - - case Field => - val sym = checkSingleField(tups) - - // static fields must always be mutable - val flags = js.MemberFlags.empty - .withNamespace(js.MemberNamespace.PublicStatic) - .withMutable(true) - val name = js.StringLiteral(info.jsName) - val irTpe = genExposedFieldIRType(sym) - js.JSFieldDef(flags, name, irTpe) - - case kind => - throw new AssertionError(s"unexpected static export kind: $kind") - } - }).toList - } - - /** Generates exported methods and properties for a class. - * - * @param classSym symbol of the class we export for - */ - def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { - val classInfo = classSym.info - val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => - if (isExportName(name)) - buf ++= classInfo.member(name).alternatives - }) - - val newlyDeclaredExports = if (classSym.superClass == NoSymbol) { - allExports - } else { - allExports.filterNot { denot => - classSym.superClass.info.member(denot.name).hasAltWith(_.info =:= denot.info) - } - } - - val newlyDeclaredExportNames = newlyDeclaredExports.map(_.name.toTermName).toList.distinct - - newlyDeclaredExportNames.map(genMemberExport(classSym, _)) - } - - private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { - /* This used to be `.member(name)`, but it caused #3538, since we were - * sometimes selecting mixin forwarders, whose type history does not go - * far enough back in time to see varargs. We now explicitly exclude - * mixed-in members in addition to bridge methods (the latter are always - * excluded by `.member(name)`). - */ - val alts = classSym - .findMemberNoShadowingBasedOnFlags(name, classSym.appliedRef, required = Method, excluded = Bridge | MixedIn) - .alternatives - - assert(!alts.isEmpty, - em"""Ended up with no alternatives for ${classSym.fullName}::$name. - |Original set was ${alts} with types ${alts.map(_.info)}""") - - val (jsName, isProp) = exportNameInfo(name) - - // Check if we have a conflicting export of the other kind - val conflicting = classSym.info.member(makeExportName(jsName, !isProp)) - - if (conflicting.exists) { - val kind = if (isProp) "property" else "method" - val conflictingMember = conflicting.alternatives.head.symbol.fullName - val errorPos: SrcPos = alts.map(_.symbol).filter(_.owner == classSym) match { - case Nil => classSym - case altsInClass => altsInClass.minBy(_.span.point) - } - report.error(em"Exported $kind $jsName conflicts with $conflictingMember", errorPos) - } - - genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) - } - - def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { - dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) - } - - private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { - val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) - .map(_.symbol) - .filter { sym => - /* scala-js#3939: Object is not a "real" superclass of JS types. - * as such, its methods do not participate in overload resolution. - * An exception is toString, which is handled specially in genExportMethod. - */ - sym.owner != defn.ObjectClass && sym.jsName == name - } - .toList - - assert(!alts.isEmpty, s"Ended up with no alternatives for ${classSym.fullName}::$name.") - - val (propSyms, methodSyms) = alts.partition(_.isJSProperty) - val isProp = propSyms.nonEmpty - - if (isProp && methodSyms.nonEmpty) { - val firstAlt = alts.head - report.error( - em"Conflicting properties and methods for ${classSym.fullName}::$name.", - firstAlt.srcPos) - implicit val pos = firstAlt.span - js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) - } else { - genMemberExportOrDispatcher(name, isProp, alts, static = false) - } - } - - private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, - alts: List[Symbol], static: Boolean): js.MemberDef = { - withNewLocalNameScope { - if (isProp) - genExportProperty(alts, jsName, static) - else - genExportMethod(alts, jsName, static) - } - } - - private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = { - assert(!alts.isEmpty, s"genExportProperty with empty alternatives for $jsName") - - implicit val pos: Position = alts.head.span - - val namespace = - if (static) js.MemberNamespace.PublicStatic - else js.MemberNamespace.Public - val flags = js.MemberFlags.empty.withNamespace(namespace) - - /* Separate getters and setters. Since we only have getters and setters, we - * simply test the param list size, which is faster than using the full isJSGetter. - */ - val (getter, setters) = alts.partition(_.info.paramInfoss.head.isEmpty) - - // We can have at most one getter - if (getter.sizeIs > 1) - reportCannotDisambiguateError(jsName, alts) - - val getterBody = getter.headOption.map { getterSym => - genApplyForSingleExported(new FormalArgsRegistry(0, false), new ExportedSymbol(getterSym, static), static) - } - - val setterArgAndBody = { - if (setters.isEmpty) { - None - } else { - val formalArgsRegistry = new FormalArgsRegistry(1, false) - val (List(arg), None) = formalArgsRegistry.genFormalArgs(): @unchecked - val body = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, - setters.map(new ExportedSymbol(_, static)), jstpe.AnyType, None) - Some((arg, body)) - } - } - - js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) - } - - private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { - assert(alts0.nonEmpty, "need at least one alternative to generate exporter method") - - implicit val pos: SourcePosition = alts0.head.sourcePos - - val namespace = - if (static) js.MemberNamespace.PublicStatic - else js.MemberNamespace.Public - val flags = js.MemberFlags.empty.withNamespace(namespace) - - // toString() is always exported. We might need to add it here to get correct overloading. - val alts = jsName match { - case JSName.Literal("toString") if alts0.forall(_.info.paramInfoss.exists(_.nonEmpty)) => - defn.Any_toString :: alts0 - case _ => - alts0 - } - - val overloads = alts.map(new ExportedSymbol(_, static)) - - val (formalArgs, restParam, body) = - genOverloadDispatch(jsName, overloads, jstpe.AnyType) - - js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( - OptimizerHints.empty, None) - } - - def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( - using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = { - - // Create the formal args registry - val hasVarArg = alts.exists(_.hasRepeatedParam) - val minArgc = alts.map(_.minArgc).min - val maxNonRepeatedArgc = alts.map(_.maxNonRepeatedArgc).max - val needsRestParam = maxNonRepeatedArgc != minArgc || hasVarArg - val formalArgsRegistry = new FormalArgsRegistry(minArgc, needsRestParam) - - // Generate the list of formal parameters - val (formalArgs, restParam) = formalArgsRegistry.genFormalArgs() - - /* Generate the body - * We have a fast-path for methods that are not overloaded. In addition to - * being a fast path, it does a better job than `genExportMethodMultiAlts` - * when the only alternative has default parameters, because it avoids a - * spurious dispatch. - * In scalac, the spurious dispatch was avoided by a more elaborate case - * generation in `genExportMethod`, which was very convoluted and was not - * ported to dotc. - */ - val body = - if (alts.tail.isEmpty) alts.head.genBody(formalArgsRegistry) - else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, tpe, jsName) - - (formalArgs, restParam, body) - } - - private def genExportMethodMultiAlts(formalArgsRegistry: FormalArgsRegistry, - maxNonRepeatedArgc: Int, alts: List[Exported], tpe: jstpe.Type, jsName: JSName)( - implicit pos: SourcePosition): js.Tree = { - - // Generate tuples (argc, method) - val methodArgCounts = for { - alt <- alts - argc <- alt.minArgc to (if (alt.hasRepeatedParam) maxNonRepeatedArgc else alt.maxNonRepeatedArgc) - } yield { - (argc, alt) - } - - // Create a list of (argCount -> methods), sorted by argCount (methods may appear multiple times) - val methodsByArgCount: List[(Int, List[Exported])] = - methodArgCounts.groupMap(_._1)(_._2).toList.sortBy(_._1) // sort for determinism - - val altsWithVarArgs = alts.filter(_.hasRepeatedParam) - - // Generate a case block for each (argCount, methods) tuple - // TODO? We could optimize this a bit by putting together all the `argCount`s that have the same methods - // (Scala.js for scalac does that, but the code is very convoluted and it's not clear that it is worth it). - val cases = for { - (argc, methods) <- methodsByArgCount - if methods != altsWithVarArgs // exclude default case we're generating anyways for varargs - } yield { - // body of case to disambiguates methods with current count - val caseBody = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, methods, tpe, Some(argc)) - List(js.IntLiteral(argc - formalArgsRegistry.minArgc)) -> caseBody - } - - def defaultCase = { - if (altsWithVarArgs.isEmpty) - genThrowTypeError() - else - genOverloadDispatchSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, tpe, None) - } - - val body = { - if (cases.isEmpty) { - defaultCase - } else if (cases.tail.isEmpty && altsWithVarArgs.isEmpty) { - cases.head._2 - } else { - val restArgRef = formalArgsRegistry.genRestArgRef() - js.Match( - js.AsInstanceOf(js.JSSelect(restArgRef, js.StringLiteral("length")), jstpe.IntType), - cases, - defaultCase)( - tpe) - } - } - - body - } - - /** Resolves method calls to [[alts]] while assuming they have the same parameter count. - * - * @param jsName - * The JS name of the method, for error reporting - * @param formalArgsRegistry - * The registry of all the formal arguments - * @param alts - * Alternative methods - * @param tpe - * Result type - * @param maxArgc - * Maximum number of arguments to use for disambiguation - */ - private def genOverloadDispatchSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = { - genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex = 0, maxArgc) - } - - /** Resolves method calls to [[alts]] while assuming they have the same parameter count. - * - * @param jsName - * The JS name of the method, for error reporting - * @param formalArgsRegistry - * The registry of all the formal arguments - * @param alts - * Alternative methods - * @param tpe - * Result type - * @param paramIndex - * Index where to start disambiguation (starts at 0, increases through recursion) - * @param maxArgc - * Maximum number of arguments to use for disambiguation - */ - private def genOverloadDispatchSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = { - - implicit val pos = alts.head.pos - - if (alts.sizeIs == 1) { - alts.head.genBody(formalArgsRegistry) - } else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) { - // We reach here in three cases: - // 1. The parameter list has been exhausted - // 2. The optional argument count restriction has triggered - // 3. We only have (more than once) repeated parameters left - // Therefore, we should fail - reportCannotDisambiguateError(jsName, alts.map(_.sym)) - js.Undefined() - } else { - val altsByTypeTest = groupByWithoutHashCode(alts) { exported => - typeTestForTpe(exported.exportArgTypeAt(paramIndex)) - } - - if (altsByTypeTest.size == 1) { - // Testing this parameter is not doing any us good - genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex + 1, maxArgc) - } else { - // Sort them so that, e.g., isInstanceOf[String] comes before isInstanceOf[Object] - val sortedAltsByTypeTest = topoSortDistinctsWith(altsByTypeTest) { (lhs, rhs) => - (lhs._1, rhs._1) match { - // NoTypeTest is always last - case (_, NoTypeTest) => true - case (NoTypeTest, _) => false - - case (PrimitiveTypeTest(_, rank1), PrimitiveTypeTest(_, rank2)) => - rank1 <= rank2 - - case (InstanceOfTypeTest(t1), InstanceOfTypeTest(t2)) => - t1 <:< t2 - - case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true - case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false - } - } - - val defaultCase = genThrowTypeError() - - sortedAltsByTypeTest.foldRight[js.Tree](defaultCase) { (elem, elsep) => - val (typeTest, subAlts) = elem - implicit val pos = subAlts.head.pos - - val paramRef = formalArgsRegistry.genArgRef(paramIndex) - val genSubAlts = genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, - subAlts, tpe, paramIndex + 1, maxArgc) - - def hasDefaultParam = subAlts.exists(_.hasDefaultAt(paramIndex)) - - val optCond = typeTest match { - case PrimitiveTypeTest(tpe, _) => Some(js.IsInstanceOf(paramRef, tpe)) - case InstanceOfTypeTest(tpe) => Some(genIsInstanceOf(paramRef, tpe)) - case NoTypeTest => None - } - - optCond.fold[js.Tree] { - genSubAlts // note: elsep is discarded, obviously - } { cond => - val condOrUndef = if (!hasDefaultParam) cond else { - js.If(cond, js.BooleanLiteral(true), - js.BinaryOp(js.BinaryOp.===, paramRef, js.Undefined()))( - jstpe.BooleanType) - } - js.If(condOrUndef, genSubAlts, elsep)(tpe) - } - } - } - } - } - - private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = { - val currentClass = currentClassSym.get - - /* Find a position that is in the current class for decent error reporting. - * If there are more than one, always use the "highest" one (i.e., the - * one coming last in the source text) so that we reliably display the - * same error in all compilers. - */ - val validPositions = alts.collect { - case alt if alt.owner == currentClass => alt.sourcePos - } - val pos: SourcePosition = - if (validPositions.isEmpty) currentClass.sourcePos - else validPositions.maxBy(_.point) - - val kind = - if (alts.head.isJSGetter) "getter" - else if (alts.head.isJSSetter) "setter" - else "method" - - val fullKind = - if (currentClass.isJSType) kind - else "exported " + kind - - val displayName = jsName.displayName - val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") - - report.error( - em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", - pos) - } - - /** Generates a call to the method represented by the given `exported` while using the formalArguments - * and potentially the argument array. - * - * Also inserts default parameters if required. - */ - private def genApplyForSingleExported(formalArgsRegistry: FormalArgsRegistry, - exported: Exported, static: Boolean): js.Tree = { - if (currentClassSym.isJSType && exported.sym.owner != currentClassSym.get) { - assert(!static, s"nonsensical JS super call in static export of ${exported.sym}") - genApplyForSingleExportedJSSuperCall(formalArgsRegistry, exported) - } else { - genApplyForSingleExportedNonJSSuperCall(formalArgsRegistry, exported, static) - } - } - - private def genApplyForSingleExportedJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported): js.Tree = { - implicit val pos = exported.pos - - val sym = exported.sym - assert(!sym.isClassConstructor, - s"Trying to genApplyForSingleExportedJSSuperCall for the constructor ${sym.fullName}") - - val allArgs = formalArgsRegistry.genAllArgsRefsForForwarder() - - val superClass = { - val superClassSym = currentClassSym.asClass.superClass - if (superClassSym.isNestedJSClass) - js.VarRef(js.LocalIdent(JSSuperClassParamName))(jstpe.AnyType) - else - js.LoadJSConstructor(encodeClassName(superClassSym)) - } - - val receiver = js.This()(currentThisType) - val nameTree = genExpr(sym.jsName) - - if (sym.isJSGetter) { - assert(allArgs.isEmpty, - s"getter symbol $sym does not have a getter signature") - js.JSSuperSelect(superClass, receiver, nameTree) - } else if (sym.isJSSetter) { - assert(allArgs.size == 1 && allArgs.head.isInstanceOf[js.Tree], - s"setter symbol $sym does not have a setter signature") - js.Assign(js.JSSuperSelect(superClass, receiver, nameTree), - allArgs.head.asInstanceOf[js.Tree]) - } else { - js.JSSuperMethodCall(superClass, receiver, nameTree, allArgs) - } - } - - private def genApplyForSingleExportedNonJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean): js.Tree = { - - implicit val pos = exported.pos - - val varDefs = new mutable.ListBuffer[js.VarDef] - - for ((param, i) <- exported.params.zipWithIndex) { - val rhs = genScalaArg(exported, i, formalArgsRegistry, param, static, captures = Nil)( - prevArgsCount => varDefs.take(prevArgsCount).toList.map(_.ref)) - - varDefs += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, rhs.tpe, mutable = false, rhs) - } - - val builtVarDefs = varDefs.result() - - val jsResult = genResult(exported, builtVarDefs.map(_.ref), static) - - js.Block(builtVarDefs :+ jsResult) - } - - /** Generates a Scala argument from dispatched JavaScript arguments - * (unboxing and default parameter handling). - */ - def genScalaArg(exported: Exported, paramIndex: Int, formalArgsRegistry: FormalArgsRegistry, - param: JSParamInfo, static: Boolean, captures: List[js.Tree])( - previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - - if (param.repeated) { - genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(paramIndex)) - } else { - val jsArg = formalArgsRegistry.genArgRef(paramIndex) - - // Unboxed argument (if it is defined) - val unboxedArg = unbox(jsArg, param.info) - - if (exported.hasDefaultAt(paramIndex)) { - // If argument is undefined and there is a default getter, call it - js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { - genCallDefaultGetter(exported.sym, paramIndex, static, captures)(previousArgsValues) - }, { - unboxedArg - })(unboxedArg.tpe) - } else { - // Otherwise, it is always the unboxed argument - unboxedArg - } - } - } - - def genCallDefaultGetter(sym: Symbol, paramIndex: Int, - static: Boolean, captures: List[js.Tree])( - previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { - - val targetSym = targetSymForDefaultGetter(sym) - val defaultGetterDenot = this.defaultGetterDenot(targetSym, sym, paramIndex) - - assert(defaultGetterDenot.exists, s"need default getter for method ${sym.fullName}") - assert(!defaultGetterDenot.isOverloaded, i"found overloaded default getter $defaultGetterDenot") - val defaultGetter = defaultGetterDenot.symbol - - val targetTree = { - if (sym.isClassConstructor || static) { - if (targetSym.isStatic) { - assert(captures.isEmpty, i"expected empty captures for ${targetSym.fullName} at $pos") - genLoadModule(targetSym) - } else { - assert(captures.sizeIs == 1, "expected exactly one capture") - - // Find the module accessor. We cannot use memberBasedOnFlags because of scala-js/scala-js#4526. - val outer = targetSym.originalOwner - val name = atPhase(typerPhase)(targetSym.name.unexpandedName).sourceModuleName - val modAccessor = outer.info.allMembers.find { denot => - denot.symbol.is(Module) && denot.name.unexpandedName == name - }.getOrElse { - throw new AssertionError(i"could not find module accessor for ${targetSym.fullName} at $pos") - }.symbol - - val receiver = captures.head - if (outer.isJSType) - genApplyJSClassMethod(receiver, modAccessor, Nil) - else - genApplyMethodMaybeStatically(receiver, modAccessor, Nil) - } - } else { - js.This()(currentThisType) - } - } - - // Pass previous arguments to defaultGetter - val defaultGetterArgs = previousArgsValues(defaultGetter.info.paramInfoss.head.size) - - val callGetter = if (targetSym.isJSType) { - if (defaultGetter.owner.isNonNativeJSClass) { - if (defaultGetter.hasAnnotation(jsdefn.JSOptionalAnnot)) - js.Undefined() - else - genApplyJSClassMethod(targetTree, defaultGetter, defaultGetterArgs) - } else if (defaultGetter.owner == targetSym) { - /* We get here if a non-native constructor has a native companion. - * This is reported on a per-class level. - */ - assert(sym.isClassConstructor, - s"got non-constructor method $sym with default method in JS native companion") - js.Undefined() - } else { - report.error( - "When overriding a native method with default arguments, " + - "the overriding method must explicitly repeat the default arguments.", - sym.srcPos) - js.Undefined() - } - } else { - genApplyMethod(targetTree, defaultGetter, defaultGetterArgs) - } - - // #15419 If the getter returns void, we must "box" it by returning undefined - if (callGetter.tpe == jstpe.NoType) - js.Block(callGetter, js.Undefined()) - else - callGetter - } - - private def targetSymForDefaultGetter(sym: Symbol): Symbol = - if (sym.isClassConstructor) sym.owner.companionModule.moduleClass - else sym.owner - - private def defaultGetterDenot(targetSym: Symbol, sym: Symbol, paramIndex: Int): Denotation = - targetSym.info.memberBasedOnFlags(DefaultGetterName(sym.name.asTermName, paramIndex), excluded = Bridge) - - private def defaultGetterDenot(sym: Symbol, paramIndex: Int): Denotation = - defaultGetterDenot(targetSymForDefaultGetter(sym), sym, paramIndex) - - /** Generate the final forwarding call to the exported method. */ - private def genResult(exported: Exported, args: List[js.Tree], static: Boolean)( - implicit pos: SourcePosition): js.Tree = { - - val sym = exported.sym - val currentClass = currentClassSym.get - - def receiver = - if (static) genLoadModule(sym.owner) - else js.This()(currentThisType) - - def boxIfNeeded(call: js.Tree): js.Tree = - box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) - - if (currentClass.isNonNativeJSClass) { - assert(sym.owner == currentClass, sym.fullName) - boxIfNeeded(genApplyJSClassMethod(receiver, sym, args)) - } else { - if (sym.isClassConstructor) - js.New(encodeClassName(currentClass), encodeMethodSym(sym), args) - else if (sym.isPrivate) - boxIfNeeded(genApplyMethodStatically(receiver, sym, args)) - else - boxIfNeeded(genApplyMethod(receiver, sym, args)) - } - } - - private def genThrowTypeError(msg: String = "No matching overload")(implicit pos: Position): js.Tree = - js.Throw(js.JSNew(js.JSGlobalRef("TypeError"), js.StringLiteral(msg) :: Nil)) - - abstract class Exported( - val sym: Symbol, - // Parameters participating in overload resolution. - val params: scala.collection.immutable.IndexedSeq[JSParamInfo] - ) { - assert(!params.exists(_.capture), "illegal capture params in Exported") - - private val paramsHasDefault = { - if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) { - Vector.empty - } else { - val targetSym = targetSymForDefaultGetter(sym) - params.indices.map(i => defaultGetterDenot(targetSym, sym, i).exists) - } - } - - def hasDefaultAt(paramIndex: Int): Boolean = - paramIndex < paramsHasDefault.size && paramsHasDefault(paramIndex) - - val hasRepeatedParam = params.nonEmpty && params.last.repeated - - val minArgc = { - // Find the first default param or repeated param - params - .indices - .find(i => hasDefaultAt(i) || params(i).repeated) - .getOrElse(params.size) - } - - val maxNonRepeatedArgc = if (hasRepeatedParam) params.size - 1 else params.size - - def pos: SourcePosition = sym.sourcePos - - def exportArgTypeAt(paramIndex: Int): Type = { - if (paramIndex < params.length) { - params(paramIndex).info - } else { - assert(hasRepeatedParam, i"$sym does not have varargs nor enough params for $paramIndex") - params.last.info - } - } - - def typeInfo: String = sym.info.toString - - def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree - } - - private class ExportedSymbol(sym: Symbol, static: Boolean) - extends Exported(sym, sym.jsParamInfos.toIndexedSeq) { - - def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree = - genApplyForSingleExported(formalArgsRegistry, this, static) - } - - // !!! Hash codes of RTTypeTest are meaningless because of InstanceOfTypeTest - private sealed abstract class RTTypeTest - - private case class PrimitiveTypeTest(tpe: jstpe.Type, rank: Int) extends RTTypeTest - - // !!! This class does not have a meaningful hash code - private case class InstanceOfTypeTest(tpe: Type) extends RTTypeTest { - override def equals(that: Any): Boolean = { - that match { - case InstanceOfTypeTest(thatTpe) => tpe =:= thatTpe - case _ => false - } - } - } - - private case object NoTypeTest extends RTTypeTest - - /** Very simple O(n²) topological sort for elements assumed to be distinct. */ - private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = { - @tailrec - def loop(coll: List[A], acc: List[A]): List[A] = { - if (coll.isEmpty) acc - else if (coll.tail.isEmpty) coll.head :: acc - else { - val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !lteq(x, y))) - assert(!rhs.isEmpty, s"cycle while ordering $coll") - loop(lhs ::: rhs.tail, rhs.head :: acc) - } - } - - loop(coll, Nil) - } - - private def typeTestForTpe(tpe: Type): RTTypeTest = { - tpe match { - case tpe: ErasedValueType => - InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) - - case _ => - import org.scalajs.ir.Names - - (toIRType(tpe): @unchecked) match { - case jstpe.AnyType => NoTypeTest - - case jstpe.NoType => PrimitiveTypeTest(jstpe.UndefType, 0) - case jstpe.BooleanType => PrimitiveTypeTest(jstpe.BooleanType, 1) - case jstpe.CharType => PrimitiveTypeTest(jstpe.CharType, 2) - case jstpe.ByteType => PrimitiveTypeTest(jstpe.ByteType, 3) - case jstpe.ShortType => PrimitiveTypeTest(jstpe.ShortType, 4) - case jstpe.IntType => PrimitiveTypeTest(jstpe.IntType, 5) - case jstpe.LongType => PrimitiveTypeTest(jstpe.LongType, 6) - case jstpe.FloatType => PrimitiveTypeTest(jstpe.FloatType, 7) - case jstpe.DoubleType => PrimitiveTypeTest(jstpe.DoubleType, 8) - - case jstpe.ClassType(Names.BoxedUnitClass) => PrimitiveTypeTest(jstpe.UndefType, 0) - case jstpe.ClassType(Names.BoxedStringClass) => PrimitiveTypeTest(jstpe.StringType, 9) - case jstpe.ClassType(_) => InstanceOfTypeTest(tpe) - - case jstpe.ArrayType(_) => InstanceOfTypeTest(tpe) - } - } - } - - // Group-by that does not rely on hashCode(), only equals() - O(n²) - private def groupByWithoutHashCode[A, B](coll: List[A])(f: A => B): List[(B, List[A])] = { - val m = new mutable.ArrayBuffer[(B, List[A])] - m.sizeHint(coll.length) - - for (elem <- coll) { - val key = f(elem) - val index = m.indexWhere(_._1 == key) - if (index < 0) - m += ((key, List(elem))) - else - m(index) = (key, elem :: m(index)._2) - } - - m.toList - } - - class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { - private val fixedParamNames: scala.collection.immutable.IndexedSeq[jsNames.LocalName] = - (0 until minArgc).toIndexedSeq.map(_ => freshLocalIdent("arg")(NoPosition).name) - - private val restParamName: jsNames.LocalName = - if (needsRestParam) freshLocalIdent("rest")(NoPosition).name - else null - - def genFormalArgs()(implicit pos: Position): (List[js.ParamDef], Option[js.ParamDef]) = { - val fixedParamDefs = fixedParamNames.toList.map { paramName => - js.ParamDef(js.LocalIdent(paramName), NoOriginalName, jstpe.AnyType, mutable = false) - } - - val restParam = { - if (needsRestParam) - Some(js.ParamDef(js.LocalIdent(restParamName), NoOriginalName, jstpe.AnyType, mutable = false)) - else - None - } - - (fixedParamDefs, restParam) - } - - def genArgRef(index: Int)(implicit pos: Position): js.Tree = { - if (index < minArgc) - js.VarRef(js.LocalIdent(fixedParamNames(index)))(jstpe.AnyType) - else - js.JSSelect(genRestArgRef(), js.IntLiteral(index - minArgc)) - } - - def genVarargRef(fixedParamCount: Int)(implicit pos: Position): js.Tree = { - assert(fixedParamCount >= minArgc, s"genVarargRef($fixedParamCount) with minArgc = $minArgc at $pos") - val restParam = genRestArgRef() - if (fixedParamCount == minArgc) - restParam - else - js.JSMethodApply(restParam, js.StringLiteral("slice"), List(js.IntLiteral(fixedParamCount - minArgc))) - } - - def genRestArgRef()(implicit pos: Position): js.Tree = { - assert(needsRestParam, s"trying to generate a reference to non-existent rest param at $pos") - js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - } - - def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = { - val fixedArgRefs = fixedParamNames.toList.map { paramName => - js.VarRef(js.LocalIdent(paramName))(jstpe.AnyType) - } - - if (needsRestParam) { - val restArgRef = js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - fixedArgRefs :+ js.JSSpread(restArgRef) - } else { - fixedArgRefs - } - } - } -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala deleted file mode 100644 index 2fd007165952..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala +++ /dev/null @@ -1,102 +0,0 @@ -package dotty.tools.backend.sjs - -import scala.language.unsafeNulls - -import java.net.{URI, URISyntaxException} - -import dotty.tools.dotc.core._ -import Contexts._ -import Decorators.em - -import dotty.tools.dotc.report - -import dotty.tools.dotc.util.{SourceFile, SourcePosition} -import dotty.tools.dotc.util.Spans.Span - -import org.scalajs.ir - -/** Conversion utilities from dotty Positions to IR Positions. */ -class JSPositions()(using Context) { - import JSPositions._ - - private val sourceURIMaps: List[URIMap] = { - ctx.settings.scalajsMapSourceURI.value.flatMap { option => - val uris = option.split("->") - if (uris.length != 1 && uris.length != 2) { - report.error("-scalajs-mapSourceURI needs one or two URIs as argument (separated by '->').") - Nil - } else { - try { - val from = new URI(uris.head) - val to = uris.lift(1).map(str => new URI(str)) - URIMap(from, to) :: Nil - } catch { - case e: URISyntaxException => - report.error(em"${e.getInput} is not a valid URI") - Nil - } - } - } - } - - private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = { - if (!span.exists) ir.Position.NoPosition - else { - // dotty positions and IR positions are both 0-based - val irSource = span2irPosCache.toIRSource(source) - val point = span.point - val line = source.offsetToLine(point) - val column = source.column(point) - ir.Position(irSource, line, column) - } - } - - /** Implicit conversion from dotty Span to ir.Position. */ - implicit def span2irPos(span: Span): ir.Position = - sourceAndSpan2irPos(ctx.compilationUnit.source, span) - - /** Implicitly materializes an ir.Position from an implicit dotty Span. */ - implicit def implicitSpan2irPos(implicit span: Span): ir.Position = - span2irPos(span) - - /** Implicitly materializes an ir.Position from an implicit dotty SourcePosition. */ - implicit def implicitSourcePos2irPos(implicit sourcePos: SourcePosition): ir.Position = - sourceAndSpan2irPos(sourcePos.source, sourcePos.span) - - private object span2irPosCache { - import dotty.tools.dotc.util._ - - private var lastDotcSource: SourceFile = null - private var lastIRSource: ir.Position.SourceFile = null - - def toIRSource(dotcSource: SourceFile): ir.Position.SourceFile = { - if (dotcSource != lastDotcSource) { - lastIRSource = convert(dotcSource) - lastDotcSource = dotcSource - } - lastIRSource - } - - private def convert(dotcSource: SourceFile): ir.Position.SourceFile = { - dotcSource.file.file match { - case null => - new java.net.URI( - "virtualfile", // Pseudo-Scheme - dotcSource.file.path, // Scheme specific part - null // Fragment - ) - case file => - val srcURI = file.toURI - sourceURIMaps.collectFirst { - case URIMap(from, to) if from.relativize(srcURI) != srcURI => - val relURI = from.relativize(srcURI) - to.fold(relURI)(_.resolve(relURI)) - }.getOrElse(srcURI) - } - } - } -} - -object JSPositions { - final case class URIMap(from: URI, to: Option[URI]) -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala deleted file mode 100644 index ce83f5e9e83b..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPrimitives.scala +++ /dev/null @@ -1,150 +0,0 @@ -package dotty.tools.backend.sjs - -import dotty.tools.dotc.core._ -import Names.TermName -import Types._ -import Contexts._ -import Symbols._ -import Decorators.em - -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.backend.jvm.DottyPrimitives -import dotty.tools.dotc.report -import dotty.tools.dotc.util.ReadOnlyMap - -object JSPrimitives { - - inline val FirstJSPrimitiveCode = 300 - - inline val DYNNEW = FirstJSPrimitiveCode + 1 // Instantiate a new JavaScript object - - inline val ARR_CREATE = DYNNEW + 1 // js.Array.apply (array literal syntax) - - inline val TYPEOF = ARR_CREATE + 1 // typeof x - inline val JS_NATIVE = TYPEOF + 1 // js.native. Marker method. Fails if tried to be emitted. - - inline val UNITVAL = JS_NATIVE + 1 // () value, which is undefined - - inline val JS_NEW_TARGET = UNITVAL + 1 // js.new.target - - inline val JS_IMPORT = JS_NEW_TARGET + 1 // js.import.apply(specifier) - inline val JS_IMPORT_META = JS_IMPORT + 1 // js.import.meta - - inline val CONSTRUCTOROF = JS_IMPORT_META + 1 // runtime.constructorOf(clazz) - inline val CREATE_INNER_JS_CLASS = CONSTRUCTOROF + 1 // runtime.createInnerJSClass - inline val CREATE_LOCAL_JS_CLASS = CREATE_INNER_JS_CLASS + 1 // runtime.createLocalJSClass - inline val WITH_CONTEXTUAL_JS_CLASS_VALUE = CREATE_LOCAL_JS_CLASS + 1 // runtime.withContextualJSClassValue - inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo - inline val DYNAMIC_IMPORT = LINKING_INFO + 1 // runtime.dynamicImport - - inline val STRICT_EQ = DYNAMIC_IMPORT + 1 // js.special.strictEquals - inline val IN = STRICT_EQ + 1 // js.special.in - inline val INSTANCEOF = IN + 1 // js.special.instanceof - inline val DELETE = INSTANCEOF + 1 // js.special.delete - inline val FORIN = DELETE + 1 // js.special.forin - inline val JS_THROW = FORIN + 1 // js.special.throw - inline val JS_TRY_CATCH = JS_THROW + 1 // js.special.tryCatch - inline val WRAP_AS_THROWABLE = JS_TRY_CATCH + 1 // js.special.wrapAsThrowable - inline val UNWRAP_FROM_THROWABLE = WRAP_AS_THROWABLE + 1 // js.special.unwrapFromThrowable - inline val DEBUGGER = UNWRAP_FROM_THROWABLE + 1 // js.special.debugger - - inline val THROW = DEBUGGER + 1 - - inline val UNION_FROM = THROW + 1 // js.|.from - inline val UNION_FROM_TYPE_CONSTRUCTOR = UNION_FROM + 1 // js.|.fromTypeConstructor - - inline val REFLECT_SELECTABLE_SELECTDYN = UNION_FROM_TYPE_CONSTRUCTOR + 1 // scala.reflect.Selectable.selectDynamic - inline val REFLECT_SELECTABLE_APPLYDYN = REFLECT_SELECTABLE_SELECTDYN + 1 // scala.reflect.Selectable.applyDynamic - - inline val LastJSPrimitiveCode = REFLECT_SELECTABLE_APPLYDYN - - def isJSPrimitive(code: Int): Boolean = - code >= FirstJSPrimitiveCode && code <= LastJSPrimitiveCode - -} - -class JSPrimitives(ictx: DetachedContext) extends DottyPrimitives(ictx) { - import JSPrimitives._ - - private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) - - override def getPrimitive(sym: Symbol): Int = - jsPrimitives.getOrElse(sym, super.getPrimitive(sym)) - - override def getPrimitive(app: Apply, tpe: Type)(using Context): Int = - jsPrimitives.getOrElse(app.fun.symbol, super.getPrimitive(app, tpe)) - - override def isPrimitive(sym: Symbol): Boolean = - jsPrimitives.contains(sym) || super.isPrimitive(sym) - - override def isPrimitive(fun: Tree): Boolean = - jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) - - /** Initialize the primitive map */ - private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { - - val primitives = MutableSymbolMap[Int]() - - // !!! Code duplicate with DottyPrimitives - /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { - assert(!(primitives contains s), "Duplicate primitive " + s) - primitives(s) = code - } - - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { - val alts = cls.info.member(method).alternatives.map(_.symbol) - if (alts.isEmpty) { - report.error(em"Unknown primitive method $cls.$method") - } else { - for (s <- alts) - addPrimitive(s, code) - } - } - - val jsdefn = JSDefinitions.jsdefn - - addPrimitive(jsdefn.JSDynamic_newInstance, DYNNEW) - - addPrimitive(jsdefn.JSArray_apply, ARR_CREATE) - - addPrimitive(jsdefn.JSPackage_typeOf, TYPEOF) - addPrimitive(jsdefn.JSPackage_native, JS_NATIVE) - - addPrimitive(defn.BoxedUnit_UNIT, UNITVAL) - - addPrimitive(jsdefn.JSNew_target, JS_NEW_TARGET) - - addPrimitive(jsdefn.JSImport_apply, JS_IMPORT) - addPrimitive(jsdefn.JSImport_meta, JS_IMPORT_META) - - addPrimitive(jsdefn.Runtime_constructorOf, CONSTRUCTOROF) - addPrimitive(jsdefn.Runtime_createInnerJSClass, CREATE_INNER_JS_CLASS) - addPrimitive(jsdefn.Runtime_createLocalJSClass, CREATE_LOCAL_JS_CLASS) - addPrimitive(jsdefn.Runtime_withContextualJSClassValue, WITH_CONTEXTUAL_JS_CLASS_VALUE) - addPrimitive(jsdefn.Runtime_linkingInfo, LINKING_INFO) - addPrimitive(jsdefn.Runtime_dynamicImport, DYNAMIC_IMPORT) - - addPrimitive(jsdefn.Special_strictEquals, STRICT_EQ) - addPrimitive(jsdefn.Special_in, IN) - addPrimitive(jsdefn.Special_instanceof, INSTANCEOF) - addPrimitive(jsdefn.Special_delete, DELETE) - addPrimitive(jsdefn.Special_forin, FORIN) - addPrimitive(jsdefn.Special_throw, JS_THROW) - addPrimitive(jsdefn.Special_tryCatch, JS_TRY_CATCH) - addPrimitive(jsdefn.Special_wrapAsThrowable, WRAP_AS_THROWABLE) - addPrimitive(jsdefn.Special_unwrapFromThrowable, UNWRAP_FROM_THROWABLE) - addPrimitive(jsdefn.Special_debugger, DEBUGGER) - - addPrimitive(defn.throwMethod, THROW) - - addPrimitive(jsdefn.PseudoUnion_from, UNION_FROM) - addPrimitive(jsdefn.PseudoUnion_fromTypeConstructor, UNION_FROM_TYPE_CONSTRUCTOR) - - addPrimitive(jsdefn.ReflectSelectable_selectDynamic, REFLECT_SELECTABLE_SELECTDYN) - addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) - - primitives - } - -} diff --git a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala b/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala deleted file mode 100644 index af7570a6edca..000000000000 --- a/tests/pos-with-compiler-cc/backend/sjs/ScopedVar.scala +++ /dev/null @@ -1,38 +0,0 @@ -package dotty.tools.backend.sjs - -class ScopedVar[A](init: A) extends Pure { - import ScopedVar.Assignment - - private[ScopedVar] var value = init - - def this()(implicit ev: Null <:< A) = this(ev(null)) - - def get: A = value - def :=(newValue: A): Assignment[A] = new Assignment(this, newValue) -} - -object ScopedVar { - class Assignment[T](scVar: ScopedVar[T], value: T) { - private[ScopedVar] def push(): AssignmentStackElement[T] = { - val stack = new AssignmentStackElement(scVar, scVar.value) - scVar.value = value - stack - } - } - - private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T) { - private[ScopedVar] def pop(): Unit = { - scVar.value = oldValue - } - } - - implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get - - def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = { - val stack = ass.map(_.push()) - try body - finally stack.reverse.foreach(_.pop()) - } - - final class VarBox[A](var value: A) -} diff --git a/tests/pos-with-compiler-cc/dotc/Bench.scala b/tests/pos-with-compiler-cc/dotc/Bench.scala deleted file mode 100644 index c9c032b0ae7d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Bench.scala +++ /dev/null @@ -1,64 +0,0 @@ -package dotty.tools -package dotc - -import core.Contexts._ -import reporting.Reporter -import io.AbstractFile - -import scala.annotation.internal.sharable - -/** A main class for running compiler benchmarks. Can instantiate a given - * number of compilers and run each (sequentially) a given number of times - * on the same sources. - */ -object Bench extends Driver: - - @sharable private var numRuns = 1 - - private def ntimes(n: Int)(op: => Reporter): Reporter = - (0 until n).foldLeft(emptyReporter)((_, _) => op) - - @sharable private var times: Array[Int] = _ - - override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - times = new Array[Int](numRuns) - var reporter: Reporter = emptyReporter - for i <- 0 until numRuns do - val start = System.nanoTime() - reporter = super.doCompile(compiler, files) - times(i) = ((System.nanoTime - start) / 1000000).toInt - println(s"time elapsed: ${times(i)}ms") - if ctx.settings.Xprompt.value then - print("hit to continue >") - System.in.nn.read() - println() - reporter - - def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { - val pos = args indexOf name - if (pos < 0) (default, args) - else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) - } - - def reportTimes() = - val best = times.sorted - val measured = numRuns / 3 - val avgBest = best.take(measured).sum / measured - val avgLast = times.reverse.take(measured).sum / measured - println(s"best out of $numRuns runs: ${best(0)}") - println(s"average out of best $measured: $avgBest") - println(s"average out of last $measured: $avgLast") - - override def process(args: Array[String], rootCtx: Context): Reporter = - val (numCompilers, args1) = extractNumArg(args, "#compilers") - val (numRuns, args2) = extractNumArg(args1, "#runs") - this.numRuns = numRuns - var reporter: Reporter = emptyReporter - for i <- 0 until numCompilers do - reporter = super.process(args2, rootCtx) - reportTimes() - reporter - -end Bench - - diff --git a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala b/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala deleted file mode 100644 index f70bda947129..000000000000 --- a/tests/pos-with-compiler-cc/dotc/CompilationUnit.scala +++ /dev/null @@ -1,167 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import SymDenotations.ClassDenotation -import Symbols._ -import util.{FreshNameCreator, SourceFile, NoSource} -import util.Spans.Span -import ast.{tpd, untpd} -import tpd.{Tree, TreeTraverser} -import ast.Trees.{Import, Ident} -import typer.Nullables -import transform.SymUtils._ -import core.Decorators._ -import config.{SourceVersion, Feature} -import StdNames.nme -import scala.annotation.internal.sharable -import language.experimental.pureFunctions - -class CompilationUnit protected (val source: SourceFile) { - - override def toString: String = source.toString - - var untpdTree: untpd.Tree = untpd.EmptyTree - - var tpdTree: tpd.Tree = tpd.EmptyTree - - /** Is this the compilation unit of a Java file */ - def isJava: Boolean = source.file.name.endsWith(".java") - - /** The source version for this unit, as determined by a language import */ - var sourceVersion: Option[SourceVersion] = None - - /** Pickled TASTY binaries, indexed by class. */ - var pickled: Map[ClassSymbol, () -> Array[Byte]] = Map() - - /** The fresh name creator for the current unit. - * FIXME(#7661): This is not fine-grained enough to enable reproducible builds, - * see https://github.com/scala/scala/commit/f50ec3c866263448d803139e119b33afb04ec2bc - */ - val freshNames: FreshNameCreator = new FreshNameCreator.Default - - /** Will be set to `true` if there are inline call that must be inlined after typer. - * The information is used in phase `Inlining` in order to avoid traversing trees that need no transformations. - */ - var needsInlining: Boolean = false - - /** Set to `true` if inliner added anonymous mirrors that need to be completed */ - var needsMirrorSupport: Boolean = false - - /** Will be set to `true` if contains `Quote`. - * The information is used in phase `Staging`/`Splicing`/`PickleQuotes` in order to avoid traversing trees that need no transformations. - */ - var needsStaging: Boolean = false - - /** Will be set to true if the unit contains a captureChecking language import */ - var needsCaptureChecking: Boolean = false - - /** Will be set to true if the unit contains a pureFunctions language import */ - var knowsPureFuns: Boolean = false - - var suspended: Boolean = false - var suspendedAtInliningPhase: Boolean = false - - /** Can this compilation unit be suspended */ - def isSuspendable: Boolean = true - - /** Suspends the compilation unit by thowing a SuspendException - * and recording the suspended compilation unit - */ - def suspend()(using Context): Nothing = - assert(isSuspendable) - if !suspended then - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") - suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then - suspendedAtInliningPhase = true - throw CompilationUnit.SuspendException() - - private var myAssignmentSpans: Map[Int, List[Span]] | Null = null - - /** A map from (name-) offsets of all local variables in this compilation unit - * that can be tracked for being not null to the list of spans of assignments - * to these variables. - */ - def assignmentSpans(using Context): Map[Int, List[Span]] = - if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans - myAssignmentSpans.nn -} - -@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { - - override def isJava: Boolean = false - - override def suspend()(using Context): Nothing = - throw CompilationUnit.SuspendException() - - override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty -} - -object CompilationUnit { - - class SuspendException extends Exception - - /** Make a compilation unit for top class `clsd` with the contents of the `unpickled` tree */ - def apply(clsd: ClassDenotation, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = - val file = clsd.symbol.associatedFile.nn - apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) - - /** Make a compilation unit, given picked bytes and unpickled tree */ - def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { - assert(!unpickled.isEmpty, unpickled) - val unit1 = new CompilationUnit(source) - unit1.tpdTree = unpickled - if (forceTrees) { - val force = new Force - force.traverse(unit1.tpdTree) - unit1.needsStaging = force.containsQuote - unit1.needsInlining = force.containsInline - } - unit1 - } - - /** Create a compilation unit corresponding to `source`. - * If `mustExist` is true, this will fail if `source` does not exist. - */ - def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { - val src = - if (!mustExist) - source - else if (source.file.isDirectory) { - report.error(em"expected file, received directory '${source.file.path}'") - NoSource - } - else if (!source.file.exists) { - report.error(em"source file not found: ${source.file.path}") - NoSource - } - else source - new CompilationUnit(src) - } - - /** Force the tree to be loaded */ - private class Force extends TreeTraverser { - var containsQuote = false - var containsInline = false - var containsCaptureChecking = false - def traverse(tree: Tree)(using Context): Unit = { - if (tree.symbol.isQuote) - containsQuote = true - if tree.symbol.is(Flags.Inline) then - containsInline = true - tree match - case Import(qual, selectors) => - tpd.languageImport(qual) match - case Some(prefix) => - for case untpd.ImportSelector(untpd.Ident(imported), untpd.EmptyTree, _) <- selectors do - Feature.handleGlobalLanguageImport(prefix, imported) - case _ => - case _ => - traverseChildren(tree) - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Compiler.scala b/tests/pos-with-compiler-cc/dotc/Compiler.scala deleted file mode 100644 index b121a47781e1..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Compiler.scala +++ /dev/null @@ -1,171 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import typer.{TyperPhase, RefChecks} -import cc.CheckCaptures -import parsing.Parser -import Phases.Phase -import transform._ -import dotty.tools.backend -import backend.jvm.{CollectSuperCalls, GenBCode} -import localopt.StringInterpolatorOpt - -/** The central class of the dotc compiler. The job of a compiler is to create - * runs, which process given `phases` in a given `rootContext`. - */ -class Compiler { - - /** Meta-ordering constraint: - * - * DenotTransformers that change the signature of their denotation's info must go - * after erasure. The reason is that denotations are permanently referred to by - * TermRefs which contain a signature. If the signature of a symbol would change, - * all refs to it would become outdated - they could not be dereferenced in the - * new phase. - * - * After erasure, signature changing denot-transformers are OK because signatures - * are never recomputed later than erasure. - */ - def phases: List[List[Phase]] = - frontendPhases ::: picklerPhases ::: transformPhases ::: backendPhases - - /** Phases dealing with the frontend up to trees ready for TASTY pickling */ - protected def frontendPhases: List[List[Phase]] = - List(new Parser) :: // Compiler frontend: scanner, parser - List(new TyperPhase) :: // Compiler frontend: namer, typer - List(new YCheckPositions) :: // YCheck positions - List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks - List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files - List(new PostTyper) :: // Additional checks and cleanups after type checking - List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks - List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols - Nil - - /** Phases dealing with TASTY tree pickling and unpickling */ - protected def picklerPhases: List[List[Phase]] = - List(new Pickler) :: // Generate TASTY info - List(new Inlining) :: // Inline and execute macros - List(new PostInlining) :: // Add mirror support for inlined code - List(new Staging) :: // Check staging levels and heal staged types - List(new Splicing) :: // Replace level 1 splices with holes - List(new PickleQuotes) :: // Turn quoted trees into explicit run-time data structures - Nil - - /** Phases dealing with the transformation from pickled trees to backend trees */ - protected def transformPhases: List[List[Phase]] = - List(new InstrumentCoverage) :: // Perform instrumentation for code coverage (if -coverage-out is set) - List(new FirstTransform, // Some transformations to put trees into a canonical form - new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars - new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes - new CookComments, // Cook the comments: expand variables, doc, etc. - new CheckStatic, // Check restrictions that apply to @static members - new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop - new BetaReduce, // Reduce closure applications - new InlineVals, // Check right hand-sides of an `inline val`s - new ExpandSAMs, // Expand single abstract method closures to anonymous classes - new ElimRepeated, // Rewrite vararg parameters and arguments - new RefChecks) :: // Various checks mostly related to abstract members and overriding - List(new init.Checker) :: // Check initialization of objects - List(new CrossVersionChecks, // Check issues related to deprecated and experimental - new ProtectedAccessors, // Add accessors for protected members - new ExtensionMethods, // Expand methods of value classes with extension methods - new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases - new ElimByName, // Map by-name parameters to functions - new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope - new ForwardDepChecks, // Check that there are no forward references to local vals - new SpecializeApplyMethods, // Adds specialized methods to FunctionN - new TryCatchPatterns, // Compile cases in try/catch - new PatternMatcher) :: // Compile pattern matches - List(new TestRecheck.Pre) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new TestRecheck) :: // Test only: run rechecker, enabled under -Yrecheck-test - List(new CheckCaptures.Pre) :: // Preparations for check captures phase, enabled under captureChecking - List(new CheckCaptures) :: // Check captures, enabled under captureChecking - List(new ElimOpaque, // Turn opaque into normal aliases - new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) - new ExplicitOuter, // Add accessors to outer classes from nested ones. - new ExplicitSelf, // Make references to non-trivial self types explicit as casts - new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats - List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions - new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` - new InlinePatterns, // Remove placeholders of inlined patterns - new VCInlineMethods, // Inlines calls to value class methods - new SeqLiterals, // Express vararg arguments as arrays - new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods - new Getters, // Replace non-private vals and vars with getter defs (fields are added later) - new SpecializeFunctions, // Specialized Function{0,1,2} by replacing super with specialized super - new SpecializeTuples, // Specializes Tuples by replacing tuple construction and selection trees - new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods - new CollectNullableFields, // Collect fields that can be nulled out after use in lazy initialization - new ElimOuterSelect, // Expand outer selections - new ResolveSuper, // Implement super accessors - new FunctionXXLForwarders, // Add forwarders for FunctionXXL apply method - new ParamForwarding, // Add forwarders for aliases of superclass parameters - new TupleOptimizations, // Optimize generic operations on tuples - new LetOverApply, // Lift blocks from receivers of applications - new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify. - List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements. - List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types - new PureStats, // Remove pure stats from blocks - new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations - new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference - new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` - new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` - new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses - new TailRec, // Rewrite tail recursion to loops - new CompleteJavaEnums, // Fill in constructors for Java enums - new Mixin, // Expand trait fields and trait initializers - new LazyVals, // Expand lazy vals - new Memoize, // Add private fields to getters and setters - new NonLocalReturns, // Expand non-local returns - new CapturedVars) :: // Represent vars captured by closures as heap objects - List(new Constructors, // Collect initialization code in primary constructors - // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it - new Instrumentation) :: // Count calls and allocations under -Yinstrument - List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments - // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here - new ElimStaticThis, // Replace `this` references to static objects by global identifiers - new CountOuterAccesses) :: // Identify outer accessors that can be dropped - List(new DropOuterAccessors, // Drop unused outer accessors - new CheckNoSuperThis, // Check that supercalls don't contain references to `this` - new Flatten, // Lift all inner classes to package scope - new TransformWildcards, // Replace wildcards with default values - new MoveStatics, // Move static methods from companion to the class itself - new ExpandPrivate, // Widen private definitions accessed from nested classes - new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group - new SelectStatic, // get rid of selects that would be compiled into GetStatic - new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) - new CollectEntryPoints, // Collect all entry points and save them in the context - new CollectSuperCalls, // Find classes that are called with super - new RepeatableAnnotations) :: // Aggregate repeatable annotations - Nil - - /** Generate the output of the compilation */ - protected def backendPhases: List[List[Phase]] = - List(new backend.sjs.GenSJSIR) :: // Generate .sjsir files for Scala.js (not enabled by default) - List(new GenBCode) :: // Generate JVM bytecode - Nil - - var runId: Int = 1 - def nextRunId: Int = { - runId += 1; runId - } - - def reset()(using Context): Unit = { - ctx.base.reset() - val run = ctx.run - if (run != null) run.reset() - } - - def newRun(using Context): Run = { - reset() - val rctx = - if ctx.settings.Xsemanticdb.value then - ctx.addMode(Mode.ReadPositions) - else - ctx - new Run(this, rctx) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Driver.scala b/tests/pos-with-compiler-cc/dotc/Driver.scala deleted file mode 100644 index b85f1365243b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Driver.scala +++ /dev/null @@ -1,207 +0,0 @@ -package dotty.tools.dotc - -import dotty.tools.FatalError -import config.CompilerCommand -import core.Comments.{ContextDoc, ContextDocstrings} -import core.Contexts._ -import core.{MacroClassLoader, TypeError} -import dotty.tools.dotc.ast.Positioned -import dotty.tools.io.AbstractFile -import reporting._ -import core.Decorators._ -import config.Feature - -import scala.util.control.NonFatal -import fromtasty.{TASTYCompiler, TastyFileUtil} - -/** Run the Dotty compiler. - * - * Extending this class lets you customize many aspect of the compilation - * process, but in most cases you only need to call [[process]] on the - * existing object [[Main]]. - */ -class Driver { - - protected def newCompiler(using Context): Compiler = - if (ctx.settings.fromTasty.value) new TASTYCompiler - else new Compiler - - protected def emptyReporter: Reporter = new StoreReporter(null) - - protected def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter = - if files.nonEmpty then - try - val run = compiler.newRun - run.compile(files) - finish(compiler, run) - catch - case ex: FatalError => - report.error(ex.getMessage.nn) // signals that we should fail compilation. - case ex: TypeError => - println(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}") - throw ex - case ex: Throwable => - println(s"$ex while compiling ${files.map(_.path).mkString(", ")}") - throw ex - ctx.reporter - - protected def finish(compiler: Compiler, run: Run)(using Context): Unit = - run.printSummary() - if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then - val suspendedUnits = run.suspendedUnits.toList - if (ctx.settings.XprintSuspension.value) - report.echo(i"compiling suspended $suspendedUnits%, %") - val run1 = compiler.newRun - for unit <- suspendedUnits do unit.suspended = false - run1.compileUnits(suspendedUnits) - finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) - - protected def initCtx: Context = (new ContextBase).initialCtx - - protected def sourcesRequired: Boolean = true - - protected def command: CompilerCommand = ScalacCommand - - /** Setup context with initialized settings from CLI arguments, then check if there are any settings that - * would change the default behaviour of the compiler. - * - * @return If there is no setting like `-help` preventing us from continuing compilation, - * this method returns a list of files to compile and an updated Context. - * If compilation should be interrupted, this method returns None. - */ - def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], DetachedContext)] = { - val ictx = rootCtx.fresh - val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) - ictx.setSettings(summary.sstate) - Feature.checkExperimentalSettings(using ictx) - MacroClassLoader.init(ictx) - Positioned.init(using ictx) - - inContext(ictx) { - if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then - ictx.setProperty(ContextDoc, new ContextDocstrings) - val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) - fileNamesOrNone.map { fileNames => - val files = fileNames.map(ctx.getFile) - (files, fromTastySetup(files).detach) - } - } - } - - /** Setup extra classpath of tasty and jar files */ - protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = - if ctx.settings.fromTasty.value then - val newEntries: List[String] = files - .flatMap { file => - if !file.exists then - report.error(em"File does not exist: ${file.path}") - None - else file.extension match - case "jar" => Some(file.path) - case "tasty" => - TastyFileUtil.getClassPath(file) match - case Some(classpath) => Some(classpath) - case _ => - report.error(em"Could not load classname from: ${file.path}") - None - case _ => - report.error(em"File extension is not `tasty` or `jar`: ${file.path}") - None - } - .distinct - val ctx1 = ctx.fresh - val fullClassPath = - (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) - ctx1.setSetting(ctx1.settings.classpath, fullClassPath) - else ctx - - /** Entry point to the compiler that can be conveniently used with Java reflection. - * - * This entry point can easily be used without depending on the `dotty` package, - * you only need to depend on `dotty-interfaces` and call this method using - * reflection. This allows you to write code that will work against multiple - * versions of dotty without recompilation. - * - * The trade-off is that you can only pass a SimpleReporter to this method - * and not a normal Reporter which is more powerful. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/InterfaceEntryPointTest.scala]] - * - * @param args Arguments to pass to the compiler. - * @param simple Used to log errors, warnings, and info messages. - * The default reporter is used if this is `null`. - * @param callback Used to execute custom code during the compilation - * process. No callbacks will be executed if this is `null`. - * @return - */ - final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, - callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { - val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) - process(args, reporter, callback) - } - - /** Principal entry point to the compiler. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] - * in method `runCompiler` - * - * @param args Arguments to pass to the compiler. - * @param reporter Used to log errors, warnings, and info messages. - * The default reporter is used if this is `null`. - * @param callback Used to execute custom code during the compilation - * process. No callbacks will be executed if this is `null`. - * @return The `Reporter` used. Use `Reporter#hasErrors` to check - * if compilation succeeded. - */ - final def process(args: Array[String], reporter: Reporter | Null = null, - callback: interfaces.CompilerCallback | Null = null): Reporter = { - val compileCtx = initCtx.fresh - if (reporter != null) - compileCtx.setReporter(reporter) - if (callback != null) - compileCtx.setCompilerCallback(callback) - process(args, compileCtx) - } - - /** Entry point to the compiler with no optional arguments. - * - * This overload is provided for compatibility reasons: the - * `RawCompiler` of sbt expects this method to exist and calls - * it using reflection. Keeping it means that we can change - * the other overloads without worrying about breaking compatibility - * with sbt. - */ - final def process(args: Array[String]): Reporter = - process(args, null: Reporter | Null, null: interfaces.CompilerCallback | Null) - - /** Entry point to the compiler using a custom `Context`. - * - * In most cases, you do not need a custom `Context` and should - * instead use one of the other overloads of `process`. However, - * the other overloads cannot be overridden, instead you - * should override this one which they call internally. - * - * Usage example: [[https://github.com/lampepfl/dotty/tree/master/compiler/test/dotty/tools/dotc/EntryPointsTest.scala.disabled]] - * in method `runCompilerWithContext` - * - * @param args Arguments to pass to the compiler. - * @param rootCtx The root Context to use. - * @return The `Reporter` used. Use `Reporter#hasErrors` to check - * if compilation succeeded. - */ - def process(args: Array[String], rootCtx: Context): Reporter = { - setup(args, rootCtx) match - case Some((files, compileCtx)) => - doCompile(newCompiler(using compileCtx), files)(using compileCtx) - case None => - rootCtx.reporter - } - - def main(args: Array[String]): Unit = { - // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, - // we may try to load it but fail with another StackOverflowError and lose the original exception, - // see . - val _ = NonFatal - sys.exit(if (process(args).hasErrors) 1 else 0) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Main.scala b/tests/pos-with-compiler-cc/dotc/Main.scala deleted file mode 100644 index 3288fded52a2..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Main.scala +++ /dev/null @@ -1,5 +0,0 @@ -package dotty.tools -package dotc - -/** Main class of the `dotc` batch compiler. */ -object Main extends Driver diff --git a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala b/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala deleted file mode 100644 index ae20d81226c9..000000000000 --- a/tests/pos-with-compiler-cc/dotc/MissingCoreLibraryException.scala +++ /dev/null @@ -1,9 +0,0 @@ -package dotty.tools.dotc - -import dotty.tools.FatalError - -class MissingCoreLibraryException(rootPackage: String) extends FatalError( - s"""Could not find package $rootPackage from compiler core libraries. - |Make sure the compiler core libraries are on the classpath. - """.stripMargin -) diff --git a/tests/pos-with-compiler-cc/dotc/Resident.scala b/tests/pos-with-compiler-cc/dotc/Resident.scala deleted file mode 100644 index 9ebeaaaeb1c2..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Resident.scala +++ /dev/null @@ -1,61 +0,0 @@ -package dotty.tools -package dotc - -import core.Contexts._ -import reporting.Reporter -import java.io.EOFException -import scala.annotation.tailrec - -/** A compiler which stays resident between runs. This is more of a PoC than - * something that's expected to be used often - * - * Usage: - * - * > scala dotty.tools.dotc.Resident - * - * dotc> "more options and files to compile" - * - * ... - * - * dotc> :reset // reset all options to the ones passed on the command line - * - * ... - * - * dotc> :q // quit - */ -class Resident extends Driver { - - object residentCompiler extends Compiler - - override def sourcesRequired: Boolean = false - - private val quit = ":q" - private val reset = ":reset" - private val prompt = "dotc> " - - private def getLine() = { - Console.print(prompt) - try scala.io.StdIn.readLine() catch { case _: EOFException => quit } - } - - final override def process(args: Array[String], rootCtx: Context): Reporter = { - @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { - setup(args, prevCtx) match - case Some((files, ctx)) => - inContext(ctx) { - doCompile(residentCompiler, files) - } - var nextCtx: DetachedContext = ctx - var line = getLine() - while (line == reset) { - nextCtx = rootCtx.detach - line = getLine() - } - if line.startsWith(quit) then ctx.reporter - else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) - case None => - prevCtx.reporter - } - loop(args, rootCtx) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/Run.scala b/tests/pos-with-compiler-cc/dotc/Run.scala deleted file mode 100644 index 96f8c6a7b06f..000000000000 --- a/tests/pos-with-compiler-cc/dotc/Run.scala +++ /dev/null @@ -1,404 +0,0 @@ -package dotty.tools -package dotc - -import core._ -import Contexts._ -import Periods._ -import Symbols._ -import Scopes._ -import Names.Name -import Denotations.Denotation -import typer.Typer -import typer.ImportInfo.withRootImports -import Decorators._ -import io.AbstractFile -import Phases.unfusedPhases - -import util._ -import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile} -import reporting.Diagnostic -import reporting.Diagnostic.Warning -import rewrites.Rewrites -import profile.Profiler -import printing.XprintMode -import typer.ImplicitRunInfo -import config.Feature -import StdNames.nme - -import java.io.{BufferedWriter, OutputStreamWriter} -import java.nio.charset.StandardCharsets - -import scala.collection.mutable -import scala.util.control.NonFatal -import scala.io.Codec -import annotation.constructorOnly -import annotation.unchecked.uncheckedCaptures - -/** A compiler run. Exports various methods to compile source files */ -class Run(comp: Compiler, @constructorOnly ictx0: Context) extends ImplicitRunInfo with ConstraintRunInfo { - - val ictx = ictx0.detach - - /** Default timeout to stop looking for further implicit suggestions, in ms. - * This is usually for the first import suggestion; subsequent suggestions - * may get smaller timeouts. @see ImportSuggestions.reduceTimeBudget - */ - private var myImportSuggestionBudget: Int = - Int.MinValue // sentinel value; means whatever is set in command line option - - def importSuggestionBudget = - if myImportSuggestionBudget == Int.MinValue then ictx.settings.XimportSuggestionTimeout.value - else myImportSuggestionBudget - - def importSuggestionBudget_=(x: Int) = - myImportSuggestionBudget = x - - /** If this variable is set to `true`, some core typer operations will - * return immediately. Currently these early abort operations are - * `Typer.typed` and `Implicits.typedImplicit`. - */ - @volatile var isCancelled = false - - private var compiling = false - - private var myUnits: List[CompilationUnit] = Nil - private var myUnitsCached: List[CompilationUnit] = Nil - private var myFiles: Set[AbstractFile] = _ - - // `@nowarn` annotations by source file, populated during typer - private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty - // source files whose `@nowarn` annotations are processed - private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty - // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them - private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty - - object suppressions: - // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the - // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. - // Same as scala/scala/commit/79ca1408c7. - def initSuspendedMessages(oldRun: Run | Null) = if oldRun != null then - mySuspendedMessages.clear() - mySuspendedMessages ++= oldRun.mySuspendedMessages - - def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) - - def addSuspendedMessage(warning: Warning) = - mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning - - def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = - mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { - case Some(s) => - s.markUsed() - if (s.verbose) Action.Verbose - else Action.Silent - case _ => - Action.Warning - } - - def addSuppression(sup: Suppression): Unit = - val source = sup.annotPos.source - mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup - - def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { - // sort suppressions. they are not added in any particular order because of lazy type completion - for (sups <- mySuppressions.get(source)) - mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) - mySuppressionsComplete += source - mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) - } - - def runFinished(hasErrors: Boolean): Unit = - // report suspended messages (in case the run finished before typer) - mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) - // report unused nowarns only if all all phases are done - if !hasErrors && ctx.settings.WunusedHas.nowarn then - for { - source <- mySuppressions.keysIterator.toList - sups <- mySuppressions.remove(source) - sup <- sups.reverse - } if (!sup.used) - report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) - - /** The compilation units currently being compiled, this may return different - * results over time. - */ - def units: List[CompilationUnit] = myUnits - - private def units_=(us: List[CompilationUnit]): Unit = - myUnits = us - - var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() - - def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = - if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then - val where = - if suspendedUnits.size == 1 then i"in ${suspendedUnits.head}." - else i"""among - | - | ${suspendedUnits.toList}%, % - |""" - val enableXprintSuspensionHint = - if ctx.settings.XprintSuspension.value then "" - else "\n\nCompiling with -Xprint-suspension gives more information." - report.error(em"""Cyclic macro dependencies $where - |Compilation stopped since no further progress can be made. - | - |To fix this, place macros in one set of files and their callers in another.$enableXprintSuspensionHint""") - - /** The files currently being compiled (active or suspended). - * This may return different results over time. - * These files do not have to be source files since it's possible to compile - * from TASTY. - */ - def files: Set[AbstractFile] = { - if (myUnits ne myUnitsCached) { - myUnitsCached = myUnits - myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet - } - myFiles - } - - /** The source files of all late entered symbols, as a set */ - private var lateFiles = mutable.Set[AbstractFile]() - - /** A cache for static references to packages and classes */ - val staticRefs = util.EqHashMap[Name, Denotation](initialCapacity = 1024) - - /** Actions that need to be performed at the end of the current compilation run */ - @uncheckedCaptures - private var finalizeActions = mutable.ListBuffer[() => Unit]() - - /** Will be set to true if any of the compiled compilation units contains - * a pureFunctions language import. - */ - var pureFunsImportEncountered = false - - /** Will be set to true if any of the compiled compilation units contains - * a captureChecking language import. - */ - var ccImportEncountered = false - - def compile(files: List[AbstractFile]): Unit = - try - val codec = Codec(runContext.settings.encoding.value) - val sources = files.map(runContext.getSource(_, codec)) - compileSources(sources) - catch - case NonFatal(ex) => - if units.nonEmpty then report.echo(i"exception occurred while compiling $units%, %") - else report.echo(s"exception occurred while compiling ${files.map(_.name).mkString(", ")}") - throw ex - - /** TODO: There's a fundamental design problem here: We assemble phases using `fusePhases` - * when we first build the compiler. But we modify them with -Yskip, -Ystop - * on each run. That modification needs to either transform the tree structure, - * or we need to assemble phases on each run, and take -Yskip, -Ystop into - * account. I think the latter would be preferable. - */ - def compileSources(sources: List[SourceFile]): Unit = - if (sources forall (_.exists)) { - units = sources.map(CompilationUnit(_)) - compileUnits() - } - - - def compileUnits(us: List[CompilationUnit]): Unit = { - units = us - compileUnits() - } - - def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { - units = us - compileUnits()(using ctx) - } - - var profile: Profile = NoProfile - - private def compileUnits()(using Context) = Stats.maybeMonitored { - if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized - ctx.base.checkSingleThreaded() - - compiling = true - - profile = - if ctx.settings.Vprofile.value - || !ctx.settings.VprofileSortedBy.value.isEmpty - || ctx.settings.VprofileDetails.value != 0 - then ActiveProfile(ctx.settings.VprofileDetails.value.max(0).min(1000)) - else NoProfile - - // If testing pickler, make sure to stop after pickling phase: - val stopAfter = - if (ctx.settings.YtestPickler.value) List("pickler") - else ctx.settings.YstopAfter.value - - val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) - val phases = ctx.base.fusePhases(pluginPlan, - ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) - ctx.base.usePhases(phases) - - def runPhases(using Context) = { - var lastPrintedTree: PrintedTree = NoPrintedTree - val profiler = ctx.profiler - var phasesWereAdjusted = false - - for (phase <- ctx.base.allPhases) - if (phase.isRunnable) - Stats.trackTime(s"$phase ms ") { - val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - units = phase.runOn(units) - profiler.afterPhase(phase, profileBefore) - if (ctx.settings.Xprint.value.containsPhase(phase)) - for (unit <- units) - lastPrintedTree = - printTree(lastPrintedTree)(using ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) - report.informTime(s"$phase ", start) - Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) - for (unit <- units) - Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) - ctx.typerState.gc() - } - if !phasesWereAdjusted then - phasesWereAdjusted = true - if !Feature.ccEnabledSomewhere then - ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase.prev) - ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) - - profiler.finished() - } - - val runCtx = ctx.fresh - runCtx.setProfiler(Profiler()) - unfusedPhases.foreach(_.initContext(runCtx)) - runPhases(using runCtx) - if (!ctx.reporter.hasErrors) - Rewrites.writeBack() - suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) - while (finalizeActions.nonEmpty) { - val action = finalizeActions.remove(0) - action() - } - compiling = false - } - - /** Enter top-level definitions of classes and objects contained in source file `file`. - * The newly added symbols replace any previously entered symbols. - * If `typeCheck = true`, also run typer on the compilation unit, and set - * `rootTreeOrProvider`. - */ - def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = - if (!files.contains(file) && !lateFiles.contains(file)) { - lateFiles += file - - val codec = Codec(ctx.settings.encoding.value) - val unit = CompilationUnit(ctx.getSource(file, codec)) - val unitCtx = runContext.fresh - .setCompilationUnit(unit) - .withRootImports - - def process()(using Context) = - ctx.typer.lateEnterUnit(doTypeCheck => - if typeCheck then - if compiling then finalizeActions += doTypeCheck - else doTypeCheck() - ) - - process()(using unitCtx) - } - - private sealed trait PrintedTree - private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree - private object NoPrintedTree extends PrintedTree - - private def printTree(last: PrintedTree)(using Context): PrintedTree = { - val unit = ctx.compilationUnit - val fusedPhase = ctx.phase.prevMega - val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" - val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree - val treeString = fusedPhase.show(tree) - - last match { - case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => - report.echo(s"$echoHeader: unchanged since $phase") - last - - case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => - val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) - report.echo(s"$echoHeader\n$diff\n") - SomePrintedTree(fusedPhase.phaseName, treeString) - - case _ => - report.echo(s"$echoHeader\n$treeString\n") - SomePrintedTree(fusedPhase.phaseName, treeString) - } - } - - def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { - def sourceFile(source: String, isJava: Boolean): SourceFile = { - val uuid = java.util.UUID.randomUUID().toString - val ext = if (isJava) "java" else "scala" - val name = s"compileFromString-$uuid.$ext" - SourceFile.virtual(name, source) - } - val sources = - scalaSources.map(sourceFile(_, isJava = false)) ++ - javaSources.map(sourceFile(_, isJava = true)) - - compileSources(sources) - } - - /** Print summary of warnings and errors encountered */ - def printSummary(): Unit = { - printMaxConstraint() - val r = runContext.reporter - if !r.errorsReported then - profile.printSummary() - r.summarizeUnreportedWarnings() - r.printSummary() - } - - override def reset(): Unit = { - super[ImplicitRunInfo].reset() - super[ConstraintRunInfo].reset() - myCtx = null - myUnits = Nil - myUnitsCached = Nil - } - - /** Produces the following contexts, from outermost to innermost - * - * bootStrap: A context with next available runId and a scope consisting of - * the RootPackage _root_ - * start A context with RootClass as owner and the necessary initializations - * for type checking. - * imports For each element of RootImports, an import context - */ - protected def rootContext(using Context): DetachedContext = { - ctx.initialize() - ctx.base.setPhasePlan(comp.phases) - val rootScope = new MutableScope(0) - val bootstrap = ctx.fresh - .setPeriod(Period(comp.nextRunId, FirstPhaseId)) - .setScope(rootScope) - rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) - var start = bootstrap.fresh - .setOwner(defn.RootClass) - .setTyper(new Typer) - .addMode(Mode.ImplicitsEnabled) - .setTyperState(ctx.typerState.fresh(ctx.reporter)) - if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then - start = start.addMode(Mode.SafeNulls) - ctx.initialize()(using start) // re-initialize the base context with start - - // `this` must be unchecked for safe initialization because by being passed to setRun during - // initialization, it is not yet considered fully initialized by the initialization checker - start.setRun(this: @unchecked).detach - } - - private var myCtx: DetachedContext | Null = rootContext(using ictx) - - /** The context created for this run */ - given runContext[Dummy_so_its_a_def]: DetachedContext = myCtx.nn - assert(runContext.runId <= Periods.MaxPossibleRunId) -} diff --git a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala b/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala deleted file mode 100644 index 2e0d9a08f25d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ScalacCommand.scala +++ /dev/null @@ -1,9 +0,0 @@ -package dotty.tools.dotc - -import config.Properties._ -import config.CompilerCommand - -object ScalacCommand extends CompilerCommand: - override def cmdName: String = "scalac" - override def versionMsg: String = s"Scala compiler $versionString -- $copyrightString" - override def ifErrorsMsg: String = " scalac -help gives more information" diff --git a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled b/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled deleted file mode 100644 index 6bf7530faf24..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/CheckTrees.scala.disabled +++ /dev/null @@ -1,258 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._ - -// TODO: revise, integrate in a checking phase. -object CheckTrees { - - import tpd._ - - def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg) - - def checkTypeArg(arg: Tree, bounds: TypeBounds)(using Context): Unit = { - check(arg.isValueType) - check(bounds contains arg.tpe) - } - - def escapingRefs(block: Block)(using Context): collection.Set[NamedType] = { - var hoisted: Set[Symbol] = Set() - lazy val locals = ctx.typeAssigner.localSyms(block.stats).toSet - def isLocal(sym: Symbol): Boolean = - (locals contains sym) && !isHoistableClass(sym) - def isHoistableClass(sym: Symbol) = - sym.isClass && { - (hoisted contains sym) || { - hoisted += sym - !classLeaks(sym.asClass) - } - } - def leakingTypes(tp: Type): collection.Set[NamedType] = - tp namedPartsWith (tp => isLocal(tp.symbol)) - def typeLeaks(tp: Type): Boolean = leakingTypes(tp).nonEmpty - def classLeaks(sym: ClassSymbol): Boolean = - (ctx.owner is Method) || // can't hoist classes out of method bodies - (sym.info.parents exists typeLeaks) || - (sym.decls.toList exists (t => typeLeaks(t.info))) - leakingTypes(block.tpe) - } - - def checkType(tree: Tree)(using Context): Unit = tree match { - case Ident(name) => - case Select(qualifier, name) => - check(qualifier.isValue) - check(qualifier.tpe =:= tree.tpe.normalizedPrefix) - val denot = qualifier.tpe.member(name) - check(denot.exists) - check(denot.hasAltWith(_.symbol == tree.symbol)) - case This(cls) => - case Super(qual, mixin) => - check(qual.isValue) - val cls = qual.tpe.typeSymbol - check(cls.isClass) - case Apply(fn, args) => - def checkArg(arg: Tree, name: Name, formal: Type): Unit = { - arg match { - case NamedArg(argName, _) => - check(argName == name) - case _ => - check(arg.isValue) - } - check(arg.tpe <:< formal) - } - val MethodType(paramNames, paramTypes) = fn.tpe.widen // checked already at construction - args.lazyZip(paramNames).lazyZip(paramTypes) foreach checkArg - case TypeApply(fn, args) => - val pt @ PolyType(_) = fn.tpe.widen // checked already at construction - args.lazyZip(pt.instantiateBounds(args map (_.tpe))) foreach checkTypeArg - case Literal(const: Constant) => - case New(tpt) => - check(tpt.isValueType) - val cls = tpt.tpe.typeSymbol - check(cls.isClass) - check(!(cls is AbstractOrTrait)) - case Pair(left, right) => - check(left.isValue) - check(right.isValue) - case Typed(expr, tpt) => - check(tpt.isValueType) - expr.tpe.widen match { - case tp: MethodType => - val cls = tpt.tpe.typeSymbol - check(cls.isClass) - check((cls is Trait) || - cls.primaryConstructor.info.paramTypess.flatten.isEmpty) - val absMembers = tpt.tpe.abstractTermMembers - check(absMembers.size == 1) - check(tp <:< absMembers.head.info) - case _ => - check(expr.isValueOrPattern) - check(expr.tpe <:< tpt.tpe.translateParameterized(defn.RepeatedParamClass, defn.SeqClass)) - } - case NamedArg(name, arg) => - case Assign(lhs, rhs) => - check(lhs.isValue); check(rhs.isValue) - lhs.tpe match { - case ltpe: TermRef => - check(ltpe.symbol is Mutable) - case _ => - check(false) - } - check(rhs.tpe <:< lhs.tpe.widen) - case tree @ Block(stats, expr) => - check(expr.isValue) - check(escapingRefs(tree).isEmpty) - case If(cond, thenp, elsep) => - check(cond.isValue); check(thenp.isValue); check(elsep.isValue) - check(cond.tpe isRef defn.BooleanClass) - case Closure(env, meth, target) => - meth.tpe.widen match { - case mt @ MethodType(_, paramTypes) => - if (target.isEmpty) { - check(env.length < paramTypes.length) - for ((arg, formal) <- env zip paramTypes) - check(arg.tpe <:< formal) - } - else - // env is stored in class, not method - target.tpe match { - case SAMType(targetMeth) => - check(mt <:< targetMeth.info) - } - } - case Match(selector, cases) => - check(selector.isValue) - // are any checks that relate selector and patterns desirable? - case CaseDef(pat, guard, body) => - check(pat.isValueOrPattern); check(guard.isValue); check(body.isValue) - check(guard.tpe.derivesFrom(defn.BooleanClass)) - case Return(expr, from) => - check(expr.isValue); check(from.isTerm) - check(from.tpe.termSymbol.isRealMethod) - case Try(block, handler, finalizer) => - check(block.isTerm) - check(finalizer.isTerm) - check(handler.isTerm) - check(handler.tpe derivesFrom defn.FunctionClass(1)) - check(handler.tpe.baseArgInfos(defn.FunctionClass(1)).head <:< defn.ThrowableType) - case Throw(expr) => - check(expr.isValue) - check(expr.tpe.derivesFrom(defn.ThrowableClass)) - case SeqLiteral(elems) => - val elemtp = tree.tpe.elemType - for (elem <- elems) { - check(elem.isValue) - check(elem.tpe <:< elemtp) - } - case TypeTree(original) => - if (!original.isEmpty) { - check(original.isValueType) - check(original.tpe == tree.tpe) - } - case SingletonTypeTree(ref) => - check(ref.isValue) - check(ref.symbol.isStable) - case SelectFromTypeTree(qualifier, name) => - check(qualifier.isValueType) - check(qualifier.tpe =:= tree.tpe.normalizedPrefix) - val denot = qualifier.tpe.member(name) - check(denot.exists) - check(denot.symbol == tree.symbol) - case AndTypeTree(left, right) => - check(left.isValueType); check(right.isValueType) - case OrTypeTree(left, right) => - check(left.isValueType); check(right.isValueType) - case RefinedTypeTree(tpt, refinements) => - check(tpt.isValueType) - def checkRefinements(forbidden: Set[Symbol], rs: List[Tree]): Unit = rs match { - case r :: rs1 => - val rsym = r.symbol - check(rsym.isTerm || rsym.isAbstractOrAliasType) - if (rsym.isAbstractType) check(tpt.tpe.member(rsym.name).exists) - check(rsym.info forallParts { - case nt: NamedType => !(forbidden contains nt.symbol) - case _ => true - }) - checkRefinements(forbidden - rsym, rs1) - case nil => - } - checkRefinements(ctx.typeAssigner.localSyms(refinements).toSet, refinements) - case AppliedTypeTree(tpt, args) => - check(tpt.isValueType) - val tparams = tpt.tpe.typeParams - check(sameLength(tparams, args)) - args.lazyZip(tparams map (_.info.bounds)) foreach checkTypeArg - case TypeBoundsTree(lo, hi) => - check(lo.isValueType); check(hi.isValueType) - check(lo.tpe <:< hi.tpe) - case Bind(sym, body) => - check(body.isValueOrPattern) - check(!(tree.symbol is Method)) - body match { - case Ident(nme.WILDCARD) => - case _ => check(body.tpe.widen =:= tree.symbol.info) - } - case Alternative(alts) => - for (alt <- alts) check(alt.isValueOrPattern) - case UnApply(fun, implicits, args) => // todo: review - check(fun.isTerm) - for (arg <- args) check(arg.isValueOrPattern) - val funtpe @ MethodType(_, _) = fun.tpe.widen - fun.symbol.name match { // check arg arity - case nme.unapplySeq => - // args need to be wrapped in (...: _*) - check(args.length == 1) - check(args.head.isInstanceOf[SeqLiteral]) - case nme.unapply => - val rtp = funtpe.resultType - if (rtp isRef defn.BooleanClass) - check(args.isEmpty) - else { - check(rtp isRef defn.OptionClass) - val normArgs = rtp.argTypesHi match { - case optionArg :: Nil => - optionArg.argTypesHi match { - case Nil => - optionArg :: Nil - case tupleArgs if defn.isTupleNType(optionArg) => - tupleArgs - } - case _ => - check(false) - Nil - } - check(sameLength(normArgs, args)) - } - } - case ValDef(mods, name, tpt, rhs) => - check(!(tree.symbol is Method)) - if (!rhs.isEmpty) { - check(rhs.isValue) - check(rhs.tpe <:< tpt.tpe) - } - case DefDef(mods, name, tparams, vparamss, tpt, rhs) => - check(tree.symbol is Method) - if (!rhs.isEmpty) { - check(rhs.isValue) - check(rhs.tpe <:< tpt.tpe) - } - case TypeDef(mods, name, tpt) => - check(tpt.isInstanceOf[Template] || tpt.tpe.isInstanceOf[TypeBounds]) - case Template(constr, parents, selfType, body) => - case Import(expr, selectors) => - check(expr.isValue) - check(expr.tpe.termSymbol.isStable) - case PackageDef(pid, stats) => - check(pid.isTerm) - check(pid.symbol is Package) - case Annotated(annot, arg) => - check(annot.isInstantiation) - check(annot.symbol.owner.isSubClass(defn.AnnotationClass)) - check(arg.isValueType || arg.isValue) - case EmptyTree => - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala b/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala deleted file mode 100644 index 390e58d89245..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Desugar.scala +++ /dev/null @@ -1,1979 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._ -import Symbols._, StdNames._, Trees._, ContextOps._ -import Decorators._, transform.SymUtils._ -import Annotations.Annotation -import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName, WildcardParamName} -import typer.{Namer, Checking} -import util.{Property, SourceFile, SourcePosition, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} -import config.SourceVersion._ -import collection.mutable.ListBuffer -import reporting._ -import annotation.constructorOnly -import printing.Formatting.hl -import config.Printers - -import scala.annotation.internal.sharable - -object desugar { - import untpd._ - import DesugarEnums._ - - /** An attachment for companion modules of classes that have a `derives` clause. - * The position value indicates the start position of the template of the - * deriving class. - */ - val DerivingCompanion: Property.Key[SourcePosition] = Property.Key() - - /** An attachment for match expressions generated from a PatDef or GenFrom. - * Value of key == one of IrrefutablePatDef, IrrefutableGenFrom - */ - val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey() - - /** A multi-line infix operation with the infix operator starting a new line. - * Used for explaining potential errors. - */ - val MultiLineInfix: Property.Key[Unit] = Property.StickyKey() - - /** An attachment key to indicate that a ValDef originated from parameter untupling. - */ - val UntupledParam: Property.Key[Unit] = Property.StickyKey() - - /** What static check should be applied to a Match? */ - enum MatchCheck { - case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom - } - - /** Is `name` the name of a method that can be invalidated as a compiler-generated - * case class method if it clashes with a user-defined method? - */ - def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { - case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true - case DefaultGetterName(nme.copy, _) => true - case _ => false - } - - /** Is `name` the name of a method that is added unconditionally to case classes? */ - def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = - isRetractableCaseClassMethodName(name) || name.isSelectorName - -// ----- DerivedTypeTrees ----------------------------------- - - class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) - } - - class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) - } - - class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) - } - - /** A type tree that computes its type from an existing parameter. */ - class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { - - /** Complete the appropriate constructors so that OriginalSymbol attachments are - * pushed to DerivedTypeTrees. - */ - override def ensureCompletions(using Context): Unit = { - def completeConstructor(sym: Symbol) = - sym.infoOrCompleter match { - case completer: Namer#ClassCompleter => - completer.completeConstructor(sym) - case _ => - } - - if (!ctx.owner.is(Package)) - if (ctx.owner.isClass) { - completeConstructor(ctx.owner) - if (ctx.owner.is(ModuleClass)) - completeConstructor(ctx.owner.linkedClass) - } - else ensureCompletions(using ctx.outer) - } - - /** Return info of original symbol, where all references to siblings of the - * original symbol (i.e. sibling and original symbol have the same owner) - * are rewired to same-named parameters or accessors in the scope enclosing - * the current scope. The current scope is the scope owned by the defined symbol - * itself, that's why we have to look one scope further out. If the resulting - * type is an alias type, dealias it. This is necessary because the - * accessor of a type parameter is a private type alias that cannot be accessed - * from subclasses. - */ - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { - val dctx = ctx.detach - val relocate = new TypeMap(using dctx) { - val originalOwner = sym.owner - def apply(tp: Type) = tp match { - case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => - val defctx = mapCtx.detach.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() - var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol - if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots - else { - def msg = - em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" - ErrorType(msg).assertingErrorsReported(msg) - } - case _ => - mapOver(tp) - } - } - tpd.TypeTree(relocate(sym.info)) - } - } - - /** A type definition copied from `tdef` with a rhs typetree derived from it */ - def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = - cpy.TypeDef(tdef)( - rhs = DerivedFromParamTree().withSpan(tdef.rhs.span).watching(tdef) - ) - - /** A derived type definition watching `sym` */ - def derivedTypeParamWithVariance(sym: TypeSymbol)(using Context): TypeDef = - val variance = VarianceFlags & sym.flags - TypeDef(sym.name, DerivedFromParamTree().watching(sym)).withFlags(TypeParam | Synthetic | variance) - - /** A value definition copied from `vdef` with a tpt typetree derived from it */ - def derivedTermParam(vdef: ValDef)(using Context): ValDef = - cpy.ValDef(vdef)( - tpt = DerivedFromParamTree().withSpan(vdef.tpt.span).watching(vdef)) - -// ----- Desugar methods ------------------------------------------------- - - /** Setter generation is needed for: - * - non-private class members - * - all trait members - * - all package object members - */ - def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { - val mods = valDef.mods - mods.is(Mutable) - && ctx.owner.isClass - && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) - } - - /** var x: Int = expr - * ==> - * def x: Int = expr - * def x_=($1: ): Unit = () - * - * Generate setter where needed - */ - def valDef(vdef0: ValDef)(using Context): Tree = - val vdef @ ValDef(_, tpt, rhs) = vdef0 - val valName = normalizeName(vdef, tpt).asTermName - var mods1 = vdef.mods - - def dropInto(tpt: Tree): Tree = tpt match - case Into(tpt1) => - mods1 = vdef.mods.withAddedAnnotation( - TypedSplice( - Annotation(defn.AllowConversionsAnnot).tree.withSpan(tpt.span.startPos))) - tpt1 - case ByNameTypeTree(tpt1) => - cpy.ByNameTypeTree(tpt)(dropInto(tpt1)) - case PostfixOp(tpt1, op) if op.name == tpnme.raw.STAR => - cpy.PostfixOp(tpt)(dropInto(tpt1), op) - case _ => - tpt - - val vdef1 = cpy.ValDef(vdef)(name = valName, tpt = dropInto(tpt)) - .withMods(mods1) - - if isSetterNeeded(vdef) then - val setterParam = makeSyntheticParameter(tpt = SetterParamTree().watching(vdef)) - // The rhs gets filled in later, when field is generated and getter has parameters (see Memoize miniphase) - val setterRhs = if (vdef.rhs.isEmpty) EmptyTree else unitLiteral - val setter = cpy.DefDef(vdef)( - name = valName.setterName, - paramss = (setterParam :: Nil) :: Nil, - tpt = TypeTree(defn.UnitType), - rhs = setterRhs - ).withMods((vdef.mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) - .dropEndMarker() // the end marker should only appear on the getter definition - Thicket(vdef1, setter) - else vdef1 - end valDef - - def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = - for (tpt <- tpts) yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = EvidenceParamName.fresh() - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - - def mapParamss(paramss: List[ParamClause]) - (mapTypeParam: TypeDef => TypeDef) - (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = - paramss.mapConserve { - case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) - case ValDefs(vparams) => vparams.mapConserve(mapTermParam) - case _ => unreachable() - } - - /** 1. Expand context bounds to evidence params. E.g., - * - * def f[T >: L <: H : B](params) - * ==> - * def f[T >: L <: H](params)(implicit evidence$0: B[T]) - * - * 2. Expand default arguments to default getters. E.g, - * - * def f[T: B](x: Int = 1)(y: String = x + "m") = ... - * ==> - * def f[T](x: Int)(y: String)(implicit evidence$0: B[T]) = ... - * def f$default$1[T] = 1 - * def f$default$2[T](x: Int) = x + "m" - */ - private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = - addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = ListBuffer[ValDef]() - - def desugarContextBounds(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, forPrimaryConstructor = isPrimaryConstructor) - tbounds - case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) - case _ => - rhs - - val paramssNoContextBounds = - mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) - }(identity) - - rhs match - case MacroTree(call) => - cpy.DefDef(meth)(rhs = call).withMods(meth.mods | Macro | Erased) - case _ => - addEvidenceParams( - cpy.DefDef(meth)( - name = normalizeName(meth, tpt).asTermName, - paramss = paramssNoContextBounds), - evidenceParamBuf.toList) - end elimContextBounds - - def addDefaultGetters(meth: DefDef)(using Context): Tree = - - /** The longest prefix of parameter lists in paramss whose total number of - * ValDefs does not exceed `n` - */ - def takeUpTo(paramss: List[ParamClause], n: Int): List[ParamClause] = paramss match - case ValDefs(vparams) :: paramss1 => - val len = vparams.length - if len <= n then vparams :: takeUpTo(paramss1, n - len) else Nil - case TypeDefs(tparams) :: paramss1 => - tparams :: takeUpTo(paramss1, n) - case _ => - Nil - - def dropContextBounds(tparam: TypeDef): TypeDef = - def dropInRhs(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, _) => - tbounds - case rhs @ LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, dropInRhs(body)) - case _ => - rhs - cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) - - def paramssNoRHS = mapParamss(meth.paramss)(identity) { - vparam => - if vparam.rhs.isEmpty then vparam - else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) - } - - def getterParamss(n: Int): List[ParamClause] = - mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) - } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) - } - - def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match - case ValDefs(vparam :: vparams) :: paramss1 => - def defaultGetter: DefDef = - DefDef( - name = DefaultGetterName(meth.name, n), - paramss = getterParamss(n), - tpt = TypeTree(), - rhs = vparam.rhs - ) - .withMods(Modifiers( - meth.mods.flags & (AccessFlags | Synthetic) | (vparam.mods.flags & Inline), - meth.mods.privateWithin)) - val rest = defaultGetters(vparams :: paramss1, n + 1) - if vparam.rhs.isEmpty then rest else defaultGetter :: rest - case _ :: paramss1 => // skip empty parameter lists and type parameters - defaultGetters(paramss1, n) - case Nil => - Nil - - val defGetters = defaultGetters(meth.paramss, 0) - if defGetters.isEmpty then meth - else Thicket(cpy.DefDef(meth)(paramss = paramssNoRHS) :: defGetters) - end addDefaultGetters - - /** Add an explicit ascription to the `expectedTpt` to every tail splice. - * - * - `'{ x }` -> `'{ x }` - * - `'{ $x }` -> `'{ $x: T }` - * - `'{ if (...) $x else $y }` -> `'{ if (...) ($x: T) else ($y: T) }` - * - * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` - */ - def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { - def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { - // Add the expected type as an ascription - case _: untpd.Splice => - untpd.Typed(tree, expectedTpt).withSpan(tree.span) - case Typed(expr: untpd.Splice, tpt) => - cpy.Typed(tree)(expr, untpd.makeAndType(tpt, expectedTpt).withSpan(tpt.span)) - - // Propagate down the expected type to the leafs of the expression - case Block(stats, expr) => - cpy.Block(tree)(stats, adaptToExpectedTpt(expr)) - case If(cond, thenp, elsep) => - cpy.If(tree)(cond, adaptToExpectedTpt(thenp), adaptToExpectedTpt(elsep)) - case untpd.Parens(expr) => - cpy.Parens(tree)(adaptToExpectedTpt(expr)) - case Match(selector, cases) => - val newCases = cases.map(cdef => cpy.CaseDef(cdef)(body = adaptToExpectedTpt(cdef.body))) - cpy.Match(tree)(selector, newCases) - case untpd.ParsedTry(expr, handler, finalizer) => - cpy.ParsedTry(tree)(adaptToExpectedTpt(expr), adaptToExpectedTpt(handler), finalizer) - - // Tree does not need to be ascribed - case _ => - tree - } - adaptToExpectedTpt(tree) - } - - /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. - */ - private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match - case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) - - /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ - private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(EvidenceParamName)) - case _ => - Nil - } - - @sharable private val synthetic = Modifiers(Synthetic) - - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & EmptyFlags | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) - } - - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } - - /** The expansion of a class definition. See inline comments for what is involved */ - def classDef(cdef: TypeDef)(using Context): Tree = { - val impl @ Template(constr0, _, self, _) = cdef.rhs: @unchecked - val className = normalizeName(cdef, impl).asTypeName - val parents = impl.parents - val mods = cdef.mods - val companionMods = mods - .withFlags((mods.flags & (AccessFlags | Final)).toCommonFlags) - .withMods(Nil) - .withAnnotations(Nil) - - var defaultGetters: List[Tree] = Nil - - def decompose(ddef: Tree): DefDef = ddef match { - case meth: DefDef => meth - case Thicket((meth: DefDef) :: defaults) => - defaultGetters = defaults - meth - } - - val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) - - // The original type and value parameters in the constructor already have the flags - // needed to be type members (i.e. param, and possibly also private and local unless - // prefixed by type or val). `tparams` and `vparamss` are the type parameters that - // go in `constr`, the constructor after desugaring. - - /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ - def isAnyVal(tree: Tree): Boolean = tree match { - case Ident(tpnme.AnyVal) => true - case Select(qual, tpnme.AnyVal) => isScala(qual) - case _ => false - } - def isScala(tree: Tree): Boolean = tree match { - case Ident(nme.scala) => true - case Select(Ident(nme.ROOTPKG), nme.scala) => true - case _ => false - } - - def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) - - val isObject = mods.is(Module) - val isCaseClass = mods.is(Case) && !isObject - val isCaseObject = mods.is(Case) && isObject - val isEnum = mods.isEnumClass && !mods.is(Module) - def isEnumCase = mods.isEnumCase - def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) - val isValueClass = parents.nonEmpty && isAnyVal(parents.head) - // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. - - val originalTparams = constr1.leadingTypeParams - val originalVparamss = asTermOnly(constr1.trailingParamss) - lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) - val impliedTparams = - if (isEnumCase) { - val tparamReferenced = typeParamIsReferenced( - enumClass.typeParams, originalTparams, originalVparamss, parents) - if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) - derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) - else originalTparams - } - else originalTparams - - if mods.is(Trait) then - for vparams <- originalVparamss; vparam <- vparams do - if isByNameType(vparam.tpt) then - report.error(em"implementation restriction: traits cannot have by name parameters", vparam.srcPos) - - // Annotations on class _type_ parameters are set on the derived parameters - // but not on the constructor parameters. The reverse is true for - // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) - val constrVparamss = - if (originalVparamss.isEmpty) { // ensure parameter list is non-empty - if (isCaseClass) - report.error(CaseClassMissingParamList(cdef), namePos) - ListOfNil - } - else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { - report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) - ListOfNil - } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) - val derivedTparams = - constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => - derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) - val derivedVparamss = - constrVparamss.nestedMap(vparam => - derivedTermParam(vparam).withAnnotations(Nil)) - - val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) - - val (normalizedBody, enumCases, enumCompanionRef) = { - // Add constructor type parameters and evidence implicit parameters - // to auxiliary constructors; set defaultGetters as a side effect. - def expandConstructor(tree: Tree) = tree match { - case ddef: DefDef if ddef.name.isConstructorName => - decompose( - defDef( - addEvidenceParams( - cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) - case stat => - stat - } - // The Identifiers defined by a case - def caseIds(tree: Tree): List[Ident] = tree match { - case tree: MemberDef => Ident(tree.name.toTermName) :: Nil - case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids - } - - val stats0 = impl.body.map(expandConstructor) - val stats = - if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then - stats0.filterConserve { - case ddef: DefDef => - ddef.name ne nme.getClass_ - case _ => - true - } - else - stats0 - - if (isEnum) { - val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) - if (enumCases.isEmpty) - report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) - else - enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) - val enumCompanionRef = TermRefTree() - val enumImport = - Import(enumCompanionRef, enumCases.flatMap(caseIds).map( - enumCase => - ImportSelector(enumCase.withSpan(enumCase.span.startPos)) - ) - ) - (enumImport :: enumStats, enumCases, enumCompanionRef) - } - else (stats, Nil, EmptyTree) - } - - def anyRef = ref(defn.AnyRefAlias.typeRef) - - val arity = constrVparamss.head.length - - val classTycon: Tree = TypeRefTree() // watching is set at end of method - - def appliedTypeTree(tycon: Tree, args: List[Tree]) = - (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) - .withSpan(cdef.span.startPos) - - def isHK(tparam: Tree): Boolean = tparam match { - case TypeDef(_, LambdaTypeTree(tparams, body)) => true - case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) - case _ => false - } - - def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { - val targs = for (tparam <- tparams) yield { - val targ = refOfDef(tparam) - def fullyApplied(tparam: Tree): Tree = tparam match { - case TypeDef(_, LambdaTypeTree(tparams, body)) => - AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) - case TypeDef(_, rhs: DerivedTypeTree) => - fullyApplied(rhs.watched) - case _ => - targ - } - if (widenHK) fullyApplied(tparam) else targ - } - appliedTypeTree(tycon, targs) - } - - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor - val classTypeRef = appliedRef(classTycon) - - // a reference to `enumClass`, with type parameters coming from the case constructor - lazy val enumClassTypeRef = - if (enumClass.typeParams.isEmpty) - enumClassRef - else if (originalTparams.isEmpty) - appliedRef(enumClassRef) - else { - report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) - , cdef.srcPos.startPos) - appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) - } - - // new C[Ts](paramss) - lazy val creatorExpr = - val vparamss = constrVparamss match - case (vparam :: _) :: _ if vparam.mods.is(Implicit) => // add a leading () to match class parameters - Nil :: constrVparamss - case _ => - if constrVparamss.nonEmpty && constrVparamss.forall { - case vparam :: _ => vparam.mods.is(Given) - case _ => false - } - then constrVparamss :+ Nil // add a trailing () to match class parameters - else constrVparamss - val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => - val app = Apply(nu, vparams.map(refOfDef)) - vparams match { - case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) - case _ => app - } - } - ensureApplied(nu) - - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags - - // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) - // def _1: T1 = this.p1 - // ... - // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) - // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = - // new C[...](p1, ..., pN)(moreParams) - val (caseClassMeths, enumScaffolding) = { - def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = - DefDef(name, Nil, tpt, rhs).withMods(synthetic) - - def productElemMeths = - val caseParams = derivedVparamss.head.toArray - val selectorNamesInBody = normalizedBody.collect { - case vdef: ValDef if vdef.name.isSelectorName => - vdef.name - case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => - ddef.name - } - for i <- List.range(0, arity) - selName = nme.selectorName(i) - if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) - yield syntheticProperty(selName, caseParams(i).tpt, - Select(This(EmptyTypeIdent), caseParams(i).name)) - - def enumCaseMeths = - if isEnumCase then - val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) - (ordinalMethLit(ordinal) :: Nil, scaffolding) - else (Nil, Nil) - def copyMeths = { - val hasRepeatedParam = constrVparamss.nestedExists { - case ValDef(_, tpt, _) => isRepeated(tpt) - } - if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued - else { - val copyFirstParams = derivedVparamss.head.map(vparam => - cpy.ValDef(vparam)(rhs = refOfDef(vparam))) - val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => - cpy.ValDef(vparam)(rhs = EmptyTree)) - DefDef( - nme.copy, - joinParams(derivedTparams, copyFirstParams :: copyRestParamss), - TypeTree(), - creatorExpr - ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil - } - } - - if isCaseClass then - val (enumMeths, enumScaffolding) = enumCaseMeths - (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) - else (Nil, Nil) - } - - var parents1: List[untpd.Tree] = parents // !cc! need explicit type to make capture checking pass - if (isEnumCase && parents.isEmpty) - parents1 = enumClassTypeRef :: Nil - if (isNonEnumCase) - parents1 = parents1 :+ scalaDot(str.Product.toTypeName) :+ scalaDot(nme.Serializable.toTypeName) - if (isEnum) - parents1 = parents1 :+ ref(defn.EnumClass) - - // derived type classes of non-module classes go to their companions - val (clsDerived, companionDerived) = - if (mods.is(Module)) (impl.derived, Nil) else (Nil, impl.derived) - - // The thicket which is the desugared version of the companion object - // synthetic object C extends parentTpt derives class-derived { defs } - def companionDefs(parentTpt: Tree, defs: List[Tree]) = { - val mdefs = moduleDef( - ModuleDef( - className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) - .withMods(companionMods | Synthetic)) - .withSpan(cdef.span).toList - if (companionDerived.nonEmpty) - for (case modClsDef @ TypeDef(_, _) <- mdefs) - modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) - mdefs - } - - val companionMembers = defaultGetters ::: enumCases - - // The companion object definitions, if a companion is needed, Nil otherwise. - // companion definitions include: - // 1. If class is a case class case class C[Ts](p1: T1, ..., pN: TN)(moreParams): - // def apply[Ts](p1: T1, ..., pN: TN)(moreParams) = new C[Ts](p1, ..., pN)(moreParams) (unless C is abstract) - // def unapply[Ts]($1: C[Ts]) = $1 // if not repeated - // def unapplySeq[Ts]($1: C[Ts]) = $1 // if repeated - // 2. The default getters of the constructor - // The parent of the companion object of a non-parameterized case class - // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C - // For all other classes, the parent is AnyRef. - val companions = - if (isCaseClass) { - val applyMeths = - if (mods.is(Abstract)) Nil - else { - val appMods = - Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) - val appParamss = - derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => - ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) - DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) - .withMods(appMods) :: Nil - } - val unapplyMeth = { - val hasRepeatedParam = constrVparamss.head.exists { - case ValDef(_, tpt, _) => isRepeated(tpt) - } - val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply - val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) - val unapplyRHS = if (arity == 0) Literal(Constant(true)) else Ident(unapplyParam.name) - val unapplyResTp = if (arity == 0) Literal(Constant(true)) else TypeTree() - DefDef( - methName, - joinParams(derivedTparams, (unapplyParam :: Nil) :: Nil), - unapplyResTp, - unapplyRHS - ).withMods(synthetic) - } - val toStringMeth = - DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) - - companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) - } - else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) - companionDefs(anyRef, companionMembers) - else if (isValueClass) - companionDefs(anyRef, Nil) - else Nil - - enumCompanionRef match { - case ref: TermRefTree => // have the enum import watch the companion object - val (modVal: ValDef) :: _ = companions: @unchecked - ref.watching(modVal) - case _ => - } - - // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method - // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = - // new C[Ts](p11, ..., p1N) ... (pM1, ..., pMN) = - val implicitWrappers = - if (!mods.isOneOf(GivenOrImplicit)) - Nil - else if (ctx.owner.is(Package)) { - report.error(TopLevelImplicitClass(cdef), cdef.srcPos) - Nil - } - else if (mods.is(Trait)) { - report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) - Nil - } - else if (isCaseClass) { - report.error(ImplicitCaseClass(cdef), cdef.srcPos) - Nil - } - else if (arity != 1 && !mods.is(Given)) { - report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) - Nil - } - else { - val defParamss = constrVparamss match { - case Nil :: paramss => - paramss // drop leading () that got inserted by class - // TODO: drop this once we do not silently insert empty class parameters anymore - case paramss => paramss - } - // implicit wrapper is typechecked in same scope as constructor, so - // we can reuse the constructor parameters; no derived params are needed. - DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) - .withSpan(cdef.span) :: Nil - } - - val self1 = { - val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt - if (self.isEmpty) self - else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) - } - - val cdef1 = addEnumFlags { - val tparamAccessors = { - val impliedTparamsIt = impliedTparams.iterator - derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) - } - val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags - val vparamAccessors = { - val originalVparamsIt = originalVparamss.iterator.flatten - derivedVparamss match { - case first :: rest => - first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ - rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) - case _ => - Nil - } - } - if mods.isAllOf(Given | Inline | Transparent) then - report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods - cpy.TypeDef(cdef: TypeDef)( - name = className, - rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, - tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) - ).withMods(classMods) - } - - // install the watch on classTycon - classTycon match { - case tycon: DerivedTypeTree => tycon.watching(cdef1) - case _ => - } - - flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) - }.showing(i"desugared: $cdef --> $result", Printers.desugar) - - /** Expand - * - * package object name { body } - * - * to: - * - * package name { - * object `package` { body } - * } - */ - def packageModuleDef(mdef: ModuleDef)(using Context): Tree = - val impl = mdef.impl - val mods = mdef.mods - val moduleName = normalizeName(mdef, impl).asTermName - if mods.is(Package) then - checkPackageName(mdef) - PackageDef(Ident(moduleName), - cpy.ModuleDef(mdef)(nme.PACKAGE, impl).withMods(mods &~ Package) :: Nil) - else - mdef - - /** Expand - * - * object name extends parents { self => body } - * - * to: - * - * val name: name$ = New(name$) - * final class name$ extends parents { self: name.type => body } - */ - def moduleDef(mdef: ModuleDef)(using Context): Tree = { - val impl = mdef.impl - val mods = mdef.mods - val moduleName = normalizeName(mdef, impl).asTermName - def isEnumCase = mods.isEnumCase - Checking.checkWellFormedModule(mdef) - - if (mods.is(Package)) - packageModuleDef(mdef) - else if (isEnumCase) { - typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) - // used to check there are no illegal references to enum's type parameters in parents - expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) - } - else { - val clsName = moduleName.moduleClassName - val clsRef = Ident(clsName) - val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) - .withMods(mods.toTermFlags & RetainedModuleValFlags | ModuleValCreationFlags) - .withSpan(mdef.span.startPos) - val ValDef(selfName, selfTpt, _) = impl.self - val selfMods = impl.self.mods - if (!selfTpt.isEmpty) report.error(ObjectMayNotHaveSelfType(mdef), impl.self.srcPos) - val clsSelf = ValDef(selfName, SingletonTypeTree(Ident(moduleName)), impl.self.rhs) - .withMods(selfMods) - .withSpan(impl.self.span.orElse(impl.span.startPos)) - val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) - val cls = TypeDef(clsName, clsTmpl) - .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) - .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def - Thicket(modul, classDef(cls).withSpan(mdef.span)) - } - } - - def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = - cpy.DefDef(mdef)( - name = normalizeName(mdef, mdef.tpt).asTermName, - paramss = - if mdef.name.isRightAssocOperatorName then - val (typaramss, paramss) = mdef.paramss.span(isTypeParamClause) // first extract type parameters - - paramss match - case params :: paramss1 => // `params` must have a single parameter and without `given` flag - - def badRightAssoc(problem: String) = - report.error(em"right-associative extension method $problem", mdef.srcPos) - extParamss ++ mdef.paramss - - params match - case ValDefs(vparam :: Nil) => - if !vparam.mods.is(Given) then - // we merge the extension parameters with the method parameters, - // swapping the operator arguments: - // e.g. - // extension [A](using B)(c: C)(using D) - // def %:[E](f: F)(g: G)(using H): Res = ??? - // will be encoded as - // def %:[A](using B)[E](f: F)(c: C)(using D)(g: G)(using H): Res = ??? - val (leadingUsing, otherExtParamss) = extParamss.span(isUsingOrTypeParamClause) - leadingUsing ::: typaramss ::: params :: otherExtParamss ::: paramss1 - else - badRightAssoc("cannot start with using clause") - case _ => - badRightAssoc("must start with a single parameter") - case _ => - // no value parameters, so not an infix operator. - extParamss ++ mdef.paramss - else - extParamss ++ mdef.paramss - ).withMods(mdef.mods | ExtensionMethod) - - /** Transform extension construct to list of extension methods */ - def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { - ext.methods map { - case exp: Export => exp - case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) - } - } - /** Transforms - * - * type t >: Low <: Hi - * to - * - * @patternType type $T >: Low <: Hi - * - * if the type has a pattern variable name - */ - def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { - assert(ctx.mode.is(Mode.QuotedPattern)) - if tree.name.isVarPattern && !tree.isBackquoted then - val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) - val mods = tree.mods.withAddedAnnotation(patternTypeAnnot) - tree.withMods(mods) - else if tree.name.startsWith("$") && !tree.isBackquoted then - report.error( - """Quoted pattern variable names starting with $ are not supported anymore. - |Use lower cases type pattern name instead. - |""".stripMargin, - tree.srcPos) - tree - else tree - } - - def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = - - def check(name: Name, errSpan: Span): Unit = name match - case name: SimpleName if !errSpan.isSynthetic && name.exists(Chars.willBeEncoded) => - report.warning(em"The package name `$name` will be encoded on the classpath, and can lead to undefined behaviour.", mdef.source.atSpan(errSpan)) - case _ => - - def loop(part: RefTree): Unit = part match - case part @ Ident(name) => check(name, part.span) - case part @ Select(qual: RefTree, name) => - check(name, part.nameSpan) - loop(qual) - case _ => - - mdef match - case pdef: PackageDef => loop(pdef.pid) - case mdef: ModuleDef if mdef.mods.is(Package) => check(mdef.name, mdef.nameSpan) - case _ => - end checkPackageName - - /** The normalized name of `mdef`. This means - * 1. Check that the name does not redefine a Scala core class. - * If it does redefine, issue an error and return a mangled name instead - * of the original one. - * 2. If the name is missing (this can be the case for instance definitions), - * invent one instead. - */ - def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { - var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) - def errPos = mdef.source.atSpan(mdef.nameSpan) - if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { - val kind = if (name.isTypeName) "class" else "object" - report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) - name = name.errorName - } - name - } - - /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = - val str = impl match - case impl: Template => - if impl.parents.isEmpty then - report.error(AnonymousInstanceCannotBeEmpty(impl), impl.srcPos) - nme.ERROR.toString - else - impl.parents.map(inventTypeName(_)).mkString("given_", "_", "") - case impl: Tree => - "given_" ++ inventTypeName(impl) - str.toTermName.asSimpleName - - private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { - private def extractArgs(args: List[Tree])(using Context): String = - args.map(argNameExtractor.apply("", _)).mkString("_") - override def apply(x: String, tree: Tree)(using Context): String = - if (x.isEmpty) - tree match { - case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) - case tree: RefTree => - if tree.name.isTypeName then tree.name.toString - else s"${tree.name}_type" - case tree: TypeDef => tree.name.toString - case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => - s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" - case InfixOp(left, op, right) => - if followArgs then s"${op.name}_${extractArgs(List(left, right))}" - else op.name.toString - case tree: LambdaTypeTree => - apply(x, tree.body) - case tree: Tuple => - extractArgs(tree.trees) - case tree: Function if tree.args.nonEmpty => - if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" - else "Function" - case _ => foldOver(x, tree) - } - else x - } - private val typeNameExtractor = NameExtractor(followArgs = true) - private val argNameExtractor = NameExtractor(followArgs = false) - - private def inventTypeName(tree: Tree)(using Context): String = typeNameExtractor("", tree) - - /**This will check if this def tree is marked to define enum lookup methods, - * this is not recommended to call more than once per tree - */ - private def definesEnumLookupMethods(ddef: DefTree): Boolean = - ddef.removeAttachment(DefinesEnumLookupMethods).isDefined - - /** val p1, ..., pN: T = E - * ==> - * makePatDef[[val p1: T1 = E]]; ...; makePatDef[[val pN: TN = E]] - * - * case e1, ..., eN - * ==> - * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) - */ - def patDef(pdef: PatDef)(using Context): Tree = flatTree { - val PatDef(mods, pats, tpt, rhs) = pdef - if mods.isEnumCase then - def expand(id: Ident, definesLookups: Boolean) = - expandSimpleEnumCase(id.name.asTermName, mods, definesLookups, - Span(id.span.start, id.span.end, id.span.start)) - - val ids = pats.asInstanceOf[List[Ident]] - if definesEnumLookupMethods(pdef) then - ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil - else - ids.map(expand(_, false)) - else { - val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) - pats1 map (makePatDef(pdef, mods, _, rhs)) - } - } - - /** The selector of a match, which depends of the given `checkMode`. - * @param sel the original selector - * @return if `checkMode` is - * - None : sel @unchecked - * - Exhaustive : sel - * - IrrefutablePatDef, - * IrrefutableGenFrom: sel with attachment `CheckIrrefutable -> checkMode` - */ - def makeSelector(sel: Tree, checkMode: MatchCheck)(using Context): Tree = - checkMode match - case MatchCheck.None => - Annotated(sel, New(ref(defn.UncheckedAnnot.typeRef))) - - case MatchCheck.Exhaustive => - sel - - case MatchCheck.IrrefutablePatDef | MatchCheck.IrrefutableGenFrom => - // TODO: use `pushAttachment` and investigate duplicate attachment - sel.withAttachment(CheckIrrefutable, checkMode) - sel - end match - - /** If `pat` is a variable pattern, - * - * val/var/lazy val p = e - * - * Otherwise, in case there is exactly one variable x_1 in pattern - * val/var/lazy val p = e ==> val/var/lazy val x_1 = (e: @unchecked) match (case p => (x_1)) - * - * in case there are zero or more than one variables in pattern - * val/var/lazy p = e ==> private[this] synthetic [lazy] val t$ = (e: @unchecked) match (case p => (x_1, ..., x_N)) - * val/var/def x_1 = t$._1 - * ... - * val/var/def x_N = t$._N - * If the original pattern variable carries a type annotation, so does the corresponding - * ValDef or DefDef. - */ - def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { - case IdPattern(id, tpt) => - val id1 = - if id.name == nme.WILDCARD - then cpy.Ident(id)(WildcardParamName.fresh()) - else id - derivedValDef(original, id1, tpt, rhs, mods) - case _ => - - def filterWildcardGivenBinding(givenPat: Bind): Boolean = - givenPat.name != nme.WILDCARD - - def errorOnGivenBinding(bind: Bind)(using Context): Boolean = - report.error( - em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, - |please bind to an identifier and use an alias given.""", bind) - false - - def isTuplePattern(arity: Int): Boolean = pat match { - case Tuple(pats) if pats.size == arity => - pats.forall(isVarPattern) - case _ => false - } - val isMatchingTuple: Tree => Boolean = { - case Tuple(es) => isTuplePattern(es.length) - case _ => false - } - - // We can only optimize `val pat = if (...) e1 else e2` if: - // - `e1` and `e2` are both tuples of arity N - // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` - val tupleOptimizable = forallResults(rhs, isMatchingTuple) - - val inAliasGenerator = original match - case _: GenAlias => true - case _ => false - - val vars = - if (tupleOptimizable) // include `_` - pat match - case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } - else - getVariables( - tree = pat, - shouldAddGiven = - if inAliasGenerator then - filterWildcardGivenBinding - else - errorOnGivenBinding - ) // no `_` - - val ids = for ((named, _) <- vars) yield Ident(named.name) - val matchExpr = - if (tupleOptimizable) rhs - else - val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) - Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) - vars match { - case Nil if !mods.is(Lazy) => - matchExpr - case (named, tpt) :: Nil => - derivedValDef(original, named, tpt, matchExpr, mods) - case _ => - val tmpName = UniqueName.fresh() - val patMods = - mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) - val firstDef = - ValDef(tmpName, TypeTree(), matchExpr) - .withSpan(pat.span.union(rhs.span)).withMods(patMods) - val useSelectors = vars.length <= 22 - def selector(n: Int) = - if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) - else Apply(Select(Ident(tmpName), nme.apply), Literal(Constant(n)) :: Nil) - val restDefs = - for (((named, tpt), n) <- vars.zipWithIndex if named.name != nme.WILDCARD) - yield - if mods.is(Lazy) then - DefDef(named.name.asTermName, Nil, tpt, selector(n)) - .withMods(mods &~ Lazy) - .withSpan(named.span) - else - valDef( - ValDef(named.name.asTermName, tpt, selector(n)) - .withMods(mods) - .withSpan(named.span) - ) - flatTree(firstDef :: restDefs) - } - } - - /** Expand variable identifier x to x @ _ */ - def patternVar(tree: Tree)(using Context): Bind = { - val Ident(name) = unsplice(tree): @unchecked - Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) - } - - /** The type of tests that check whether a MemberDef is OK for some flag. - * The test succeeds if the partial function is defined and returns true. - */ - type MemberDefTest = PartialFunction[MemberDef, Boolean] - - val legalOpaque: MemberDefTest = { - case TypeDef(_, rhs) => - def rhsOK(tree: Tree): Boolean = tree match { - case bounds: TypeBoundsTree => !bounds.alias.isEmpty - case _: Template | _: MatchTypeTree => false - case LambdaTypeTree(_, body) => rhsOK(body) - case _ => true - } - rhsOK(rhs) - } - - def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = - def check(rhs: Tree): MemberDef = rhs match - case bounds: TypeBoundsTree if bounds.alias.isEmpty => - report.error(em"opaque type must have a right-hand side", tree.srcPos) - tree.withMods(tree.mods.withoutFlags(Opaque)) - case LambdaTypeTree(_, body) => check(body) - case _ => tree - if !tree.mods.is(Opaque) then tree - else tree match - case TypeDef(_, rhs) => check(rhs) - case _ => tree - - /** Check that modifiers are legal for the definition `tree`. - * Right now, we only check for `opaque`. TODO: Move other modifier checks here. - */ - def checkModifiers(tree: Tree)(using Context): Tree = tree match { - case tree: MemberDef => - var tested: MemberDef = tree - def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = - if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { - report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) - tested.withMods(tested.mods.withoutFlags(flag)) - } else tested - tested = checkOpaqueAlias(tested) - tested = checkApplicable(Opaque, legalOpaque) - tested - case _ => - tree - } - - def defTree(tree: Tree)(using Context): Tree = - checkModifiers(tree) match { - case tree: ValDef => valDef(tree) - case tree: TypeDef => - if (tree.isClassDef) classDef(tree) - else if (ctx.mode.is(Mode.QuotedPattern)) quotedPatternTypeDef(tree) - else tree - case tree: DefDef => - if (tree.name.isConstructorName) tree // was already handled by enclosing classDef - else defDef(tree) - case tree: ModuleDef => moduleDef(tree) - case tree: PatDef => patDef(tree) - } - - /** { stats; } - * ==> - * { stats; () } - */ - def block(tree: Block)(using Context): Block = tree.expr match { - case EmptyTree => - cpy.Block(tree)(tree.stats, - unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) - case _ => - tree - } - - /** Translate infix operation expression - * - * l op r ==> l.op(r) if op is left-associative - * ==> r.op(l) if op is right-associative - */ - def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { - def assignToNamedArg(arg: Tree) = arg match { - case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) - case _ => arg - } - def makeOp(fn: Tree, arg: Tree, selectPos: Span) = - val sel = Select(fn, op.name).withSpan(selectPos) - if (left.sourcePos.endLine < op.sourcePos.startLine) - sel.pushAttachment(MultiLineInfix, ()) - arg match - case Parens(arg) => - Apply(sel, assignToNamedArg(arg) :: Nil) - case Tuple(args) if args.exists(_.isInstanceOf[Assign]) => - Apply(sel, args.mapConserve(assignToNamedArg)) - case Tuple(args) => - Apply(sel, arg :: Nil).setApplyKind(ApplyKind.InfixTuple) - case _ => - Apply(sel, arg :: Nil) - - if op.name.isRightAssocOperatorName then - makeOp(right, left, Span(op.span.start, right.span.end)) - else - makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) - } - - /** Translate throws type `A throws E1 | ... | En` to - * $throws[... $throws[A, E1] ... , En]. - */ - def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match - case Parens(excepts1) => - throws(tpt, op, excepts1) - case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => - throws(throws(tpt, op, l), bar, r) - case e => - AppliedTypeTree( - TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) - - /** Translate tuple expressions of arity <= 22 - * - * () ==> () - * (t) ==> t - * (t1, ..., tN) ==> TupleN(t1, ..., tN) - */ - def smallTuple(tree: Tuple)(using Context): Tree = { - val ts = tree.trees - val arity = ts.length - assert(arity <= Definitions.MaxTupleArity) - def tupleTypeRef = defn.TupleType(arity).nn - if (arity == 0) - if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral - else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) - else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) - } - - private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match - case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true - case stat: ModuleDef => - stat.mods.isOneOf(GivenOrImplicit) - case stat: TypeDef => - !stat.isClassDef || stat.mods.isOneOf(GivenOrImplicit) - case _ => - false - - /** Assuming `src` contains top-level definition, returns the name that should - * be using for the package object that will wrap them. - */ - def packageObjectName(src: SourceFile): TermName = - val fileName = src.file.name - val sourceName = fileName.take(fileName.lastIndexOf('.')) - (sourceName ++ str.TOPLEVEL_SUFFIX).toTermName - - /** Group all definitions that can't be at the toplevel in - * an object named `$package` where `` is the name of the source file. - * Definitions that can't be at the toplevel are: - * - * - all pattern, value and method definitions - * - non-class type definitions - * - implicit classes and objects - * - "companion objects" of wrapped type definitions - * (i.e. objects having the same name as a wrapped type) - */ - def packageDef(pdef: PackageDef)(using Context): PackageDef = { - checkPackageName(pdef) - val wrappedTypeNames = pdef.stats.collectCC { - case stat: TypeDef if isTopLevelDef(stat) => stat.name - } - def inPackageObject(stat: Tree) = - isTopLevelDef(stat) || { - stat match - case stat: ModuleDef => - wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) - case _ => - false - } - val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) - if (nestedStats.isEmpty) pdef - else { - val name = packageObjectName(ctx.source) - val grouped = - ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) - .withMods(Modifiers(Synthetic)) - cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) - } - } - - /** Make closure corresponding to function. - * params => body - * ==> - * def $anonfun(params) = body - * Closure($anonfun) - */ - def makeClosure(params: List[ValDef], body: Tree, tpt: Tree | Null = null, isContextual: Boolean, span: Span)(using Context): Block = - Block( - DefDef(nme.ANON_FUN, params :: Nil, if (tpt == null) TypeTree() else tpt, body) - .withSpan(span) - .withMods(synthetic | Artifact), - Closure(Nil, Ident(nme.ANON_FUN), if (isContextual) ContextualEmptyTree else EmptyTree)) - - /** If `nparams` == 1, expand partial function - * - * { cases } - * ==> - * x$1 => (x$1 @unchecked?) match { cases } - * - * If `nparams` != 1, expand instead to - * - * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } - */ - def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { - val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) - val selector = makeTuple(params.map(p => Ident(p.name))) - Function(params, Match(makeSelector(selector, checkMode), cases)) - } - - /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: - * - * (x$1: (T1, ..., Tn)) => { - * def x1: T1 = x$1._1 - * ... - * def xn: Tn = x$1._n - * body - * } - * - * or if `isGenericTuple` - * - * (x$1: (T1, ... Tn) => { - * def x1: T1 = x$1.apply(0) - * ... - * def xn: Tn = x$1.apply(n-1) - * body - * } - * - * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription - * in the selector. - */ - def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { - val param = makeSyntheticParameter( - tpt = - if params.exists(_.tpt.isEmpty) then TypeTree() - else Tuple(params.map(_.tpt))) - def selector(n: Int) = - if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) - else Select(refOfDef(param), nme.selectorName(n)) - val vdefs = - params.zipWithIndex.map { - case (param, idx) => - ValDef(param.name, param.tpt, selector(idx)) - .withSpan(param.span) - .withAttachment(UntupledParam, ()) - .withFlags(Synthetic) - } - Function(param :: Nil, Block(vdefs, body)) - } - - /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, - * skipping elements that are not convertible. - */ - def patternsToParams(elems: List[Tree])(using Context): List[ValDef] = - def toParam(elem: Tree, tpt: Tree): Tree = - elem match - case Annotated(elem1, _) => toParam(elem1, tpt) - case Typed(elem1, tpt1) => toParam(elem1, tpt1) - case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) - case _ => EmptyTree - elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { - case vd: ValDef => vd - } - - def makeContextualFunction(formals: List[Tree], body: Tree, isErased: Boolean)(using Context): Function = { - val mods = if (isErased) Given | Erased else Given - val params = makeImplicitParameters(formals, mods) - FunctionWithMods(params, body, Modifiers(mods)) - } - - private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { - val vdef = ValDef(named.name.asTermName, tpt, rhs) - .withMods(mods) - .withSpan(original.span.withPoint(named.span.start)) - val mayNeedSetter = valDef(vdef) - mayNeedSetter - } - - private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = - DefDef(named.name.asTermName, Nil, tpt, rhs) - .withMods(mods) - .withSpan(original.span.withPoint(named.span.start)) - - /** Main desugaring method */ - def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { - - /** Create tree for for-comprehension `` or - * `` where mapName and flatMapName are chosen - * corresponding to whether this is a for-do or a for-yield. - * The creation performs the following rewrite rules: - * - * 1. - * - * for (P <- G) E ==> G.foreach (P => E) - * - * Here and in the following (P => E) is interpreted as the function (P => E) - * if P is a variable pattern and as the partial function { case P => E } otherwise. - * - * 2. - * - * for (P <- G) yield E ==> G.map (P => E) - * - * 3. - * - * for (P_1 <- G_1; P_2 <- G_2; ...) ... - * ==> - * G_1.flatMap (P_1 => for (P_2 <- G_2; ...) ...) - * - * 4. - * - * for (P <- G; E; ...) ... - * => - * for (P <- G.filter (P => E); ...) ... - * - * 5. For any N: - * - * for (P_1 <- G; P_2 = E_2; val P_N = E_N; ...) - * ==> - * for (TupleN(P_1, P_2, ... P_N) <- - * for (x_1 @ P_1 <- G) yield { - * val x_2 @ P_2 = E_2 - * ... - * val x_N & P_N = E_N - * TupleN(x_1, ..., x_N) - * } ...) - * - * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated - * and the variable constituting P_i is used instead of x_i - * - * @param mapName The name to be used for maps (either map or foreach) - * @param flatMapName The name to be used for flatMaps (either flatMap or foreach) - * @param enums The enumerators in the for expression - * @param body The body of the for expression - */ - def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { - - /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. - * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. - * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be - * irrefutable if `gen`'s checkMode is GenCheckMode.Check. - */ - def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { - case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => - Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) - case _ => - val matchCheckMode = - if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom - else MatchCheck.None - makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) - } - - /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap - * it in a Bind with a fresh name. Return the transformed pattern, and the identifier - * that refers to the bound variable for the pattern. Wildcard Binds are - * also replaced by Binds with fresh names. - */ - def makeIdPat(pat: Tree): (Tree, Ident) = pat match { - case bind @ Bind(name, pat1) => - if name == nme.WILDCARD then - val name = UniqueName.fresh() - (cpy.Bind(pat)(name, pat1).withMods(bind.mods), Ident(name)) - else (pat, Ident(name)) - case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => (id, id) - case Typed(id: Ident, _) if isVarPattern(id) && id.name != nme.WILDCARD => (pat, id) - case _ => - val name = UniqueName.fresh() - (Bind(name, pat), Ident(name)) - } - - /** Make a pattern filter: - * rhs.withFilter { case pat => true case _ => false } - * - * On handling irrefutable patterns: - * The idea is to wait until the pattern matcher sees a call - * - * xs withFilter { cases } - * - * where cases can be proven to be refutable i.e. cases would be - * equivalent to { case _ => true } - * - * In that case, compile to - * - * xs withFilter alwaysTrue - * - * where `alwaysTrue` is a predefined function value: - * - * val alwaysTrue: Any => Boolean = true - * - * In the libraries operations can take advantage of alwaysTrue to shortcircuit the - * withFilter call. - * - * def withFilter(f: Elem => Boolean) = - * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this - * else real withFilter - */ - def makePatFilter(rhs: Tree, pat: Tree): Tree = { - val cases = List( - CaseDef(pat, EmptyTree, Literal(Constant(true))), - CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) - Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) - } - - /** Is pattern `pat` irrefutable when matched against `rhs`? - * We only can do a simple syntactic check here; a more refined check - * is done later in the pattern matcher (see discussion in @makePatFilter). - */ - def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { - def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { - case Tuple(trees) => (pats corresponds trees)(isIrrefutable) - case Parens(rhs1) => matchesTuple(pats, rhs1) - case Block(_, rhs1) => matchesTuple(pats, rhs1) - case If(_, thenp, elsep) => matchesTuple(pats, thenp) && matchesTuple(pats, elsep) - case Match(_, cases) => cases forall (matchesTuple(pats, _)) - case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) - case Throw(_) => true - case _ => false - } - pat match { - case Bind(_, pat1) => isIrrefutable(pat1, rhs) - case Parens(pat1) => isIrrefutable(pat1, rhs) - case Tuple(pats) => matchesTuple(pats, rhs) - case _ => isVarPattern(pat) - } - } - - /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, - * these are all considered irrefutable. - */ - def isVarBinding(pat: Tree): Boolean = pat match - case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) - case IdPattern(_) => true - case _ => false - - def needsNoFilter(gen: GenFrom): Boolean = gen.checkMode match - case GenCheckMode.FilterAlways => false // pattern was prefixed by `case` - case GenCheckMode.FilterNow | GenCheckMode.CheckAndFilter => isVarBinding(gen.pat) || isIrrefutable(gen.pat, gen.expr) - case GenCheckMode.Check => true - case GenCheckMode.Ignore => true - - /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when - * matched against `rhs`. - */ - def rhsSelect(gen: GenFrom, name: TermName) = { - val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) - Select(rhs, name) - } - - enums match { - case (gen: GenFrom) :: Nil => - Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) - case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => - val cont = makeFor(mapName, flatMapName, rest, body) - Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) - case (gen: GenFrom) :: (rest @ GenAlias(_, _) :: _) => - val (valeqs, rest1) = rest.span(_.isInstanceOf[GenAlias]) - val pats = valeqs map { case GenAlias(pat, _) => pat } - val rhss = valeqs map { case GenAlias(_, rhs) => rhs } - val (defpat0, id0) = makeIdPat(gen.pat) - val (defpats, ids) = (pats map makeIdPat).unzip - val pdefs = valeqs.lazyZip(defpats).lazyZip(rhss).map { (valeq, defpat, rhs) => - val mods = defpat match - case defTree: DefTree => defTree.mods - case _ => Modifiers() - makePatDef(valeq, mods, defpat, rhs) - } - val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, gen.expr, gen.checkMode) :: Nil, Block(pdefs, makeTuple(id0 :: ids))) - val allpats = gen.pat :: pats - val vfrom1 = GenFrom(makeTuple(allpats), rhs1, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, vfrom1 :: rest1, body) - case (gen: GenFrom) :: test :: rest => - val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, GenCheckMode.Ignore) - makeFor(mapName, flatMapName, genFrom :: rest, body) - case _ => - EmptyTree //may happen for erroneous input - } - } - - def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { - case Parens(body1) => - makePolyFunction(targs, body1, pt) - case Block(Nil, body1) => - makePolyFunction(targs, body1, pt) - case Function(vargs, res) => - assert(targs.nonEmpty) - // TODO: Figure out if we need a `PolyFunctionWithMods` instead. - val mods = body match { - case body: FunctionWithMods => body.mods - case _ => untpd.EmptyModifiers - } - val polyFunctionTpt = ref(defn.PolyFunctionType) - val applyTParams = targs.asInstanceOf[List[TypeDef]] - if (ctx.mode.is(Mode.Type)) { - // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R - // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } - - val applyVParams = vargs.zipWithIndex.map { - case (p: ValDef, _) => p.withAddedFlags(mods.flags) - case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags) - } - RefinedTypeTree(polyFunctionTpt, List( - DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) - )) - } - else { - // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body - // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R - // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } - // where R2 is R, with all references to S_1..S_M replaced with T1..T_M. - - def typeTree(tp: Type) = tp match - case RefinedType(parent, nme.apply, PolyType(_, mt)) if parent.typeSymbol eq defn.PolyFunctionClass => - var bail = false - def mapper(tp: Type, topLevel: Boolean = false): Tree = tp match - case tp: TypeRef => ref(tp) - case tp: TypeParamRef => Ident(applyTParams(tp.paramNum).name) - case AppliedType(tycon, args) => AppliedTypeTree(mapper(tycon), args.map(mapper(_))) - case _ => if topLevel then TypeTree() else { bail = true; genericEmptyTree } - val mapped = mapper(mt.resultType, topLevel = true) - if bail then TypeTree() else mapped - case _ => TypeTree() - - val applyVParams = vargs.asInstanceOf[List[ValDef]] - .map(varg => varg.withAddedFlags(mods.flags | Param)) - New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, - List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) - )) - } - case _ => - // may happen for erroneous input. An error will already have been reported. - assert(ctx.reporter.errorsReported) - EmptyTree - } - - // begin desugar - - // Special case for `Parens` desugaring: unlike all the desugarings below, - // its output is not a new tree but an existing one whose position should - // be preserved, so we shouldn't call `withPos` on it. - tree match { - case Parens(t) => - return t - case _ => - } - - val desugared = tree match { - case PolyFunction(targs, body) => - makePolyFunction(targs, body, pt) orElse tree - case SymbolLit(str) => - Apply( - ref(defn.ScalaSymbolClass.companionModule.termRef), - Literal(Constant(str)) :: Nil) - case InterpolatedString(id, segments) => - val strs = segments map { - case ts: Thicket => ts.trees.head - case t => t - } - val elems = segments flatMap { - case ts: Thicket => ts.trees.tail - case t => Nil - } map { (t: Tree) => t match - // !cc! explicitly typed parameter (t: Tree) is needed since otherwise - // we get an error similar to #16268. (The explicit type constrains the type of `segments` - // which is otherwise List[{*} tree]) - case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." - case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala - case t => t - } - // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) - Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) - case PostfixOp(t, op) => - if (ctx.mode is Mode.Type) && !isBackquoted(op) && op.name == tpnme.raw.STAR then - if ctx.isJava then - AppliedTypeTree(ref(defn.RepeatedParamType), t) - else - Annotated( - AppliedTypeTree(ref(defn.SeqType), t), - New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) - else - assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) - Select(t, op.name) - case PrefixOp(op, t) => - val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme - Select(t, nspace.UNARY_PREFIX ++ op.name) - case ForDo(enums, body) => - makeFor(nme.foreach, nme.foreach, enums, body) orElse tree - case ForYield(enums, body) => - makeFor(nme.map, nme.flatMap, enums, body) orElse tree - case PatDef(mods, pats, tpt, rhs) => - val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) - flatTree(pats1 map (makePatDef(tree, mods, _, rhs))) - case ext: ExtMethods => - Block(List(ext), Literal(Constant(())).withSpan(ext.span)) - case CapturingTypeTree(refs, parent) => - // convert `{refs} T` to `T @retains refs` - // `{refs}-> T` to `-> (T @retainsByName refs)` - def annotate(annotName: TypeName, tp: Tree) = - Annotated(tp, New(scalaAnnotationDot(annotName), List(refs))) - parent match - case ByNameTypeTree(restpt) => - cpy.ByNameTypeTree(parent)(annotate(tpnme.retainsByName, restpt)) - case _ => - annotate(tpnme.retains, parent) - } - desugared.withSpan(tree.span) - } - - /** Turn a fucntion value `handlerFun` into a catch case for a try. - * If `handlerFun` is a partial function, translate to - * - * case ex => - * val ev$1 = handlerFun - * if ev$1.isDefinedAt(ex) then ev$1.apply(ex) else throw ex - * - * Otherwise translate to - * - * case ex => handlerFun.apply(ex) - */ - def makeTryCase(handlerFun: tpd.Tree)(using Context): CaseDef = - val handler = TypedSplice(handlerFun) - val excId = Ident(nme.DEFAULT_EXCEPTION_NAME) - val rhs = - if handlerFun.tpe.widen.isRef(defn.PartialFunctionClass) then - val tmpName = UniqueName.fresh() - val tmpId = Ident(tmpName) - val init = ValDef(tmpName, TypeTree(), handler) - val test = If( - Apply(Select(tmpId, nme.isDefinedAt), excId), - Apply(Select(tmpId, nme.apply), excId), - Throw(excId)) - Block(init :: Nil, test) - else - Apply(Select(handler, nme.apply), excId) - CaseDef(excId, EmptyTree, rhs) - - /** Create a class definition with the same info as the refined type given by `parent` - * and `refinements`. - * - * parent { refinements } - * ==> - * trait extends core { this: self => refinements } - * - * Here, `core` is the (possibly parameterized) class part of `parent`. - * If `parent` is the same as `core`, self is empty. Otherwise `self` is `parent`. - * - * Example: Given - * - * class C - * type T1 = C { type T <: A } - * - * the refined type - * - * T1 { type T <: B } - * - * is expanded to - * - * trait extends C { this: T1 => type T <: A } - * - * The result of this method is used for validity checking, is thrown away afterwards. - * @param parent The type of `parent` - */ - def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { - def stripToCore(tp: Type): List[Type] = tp match { - case tp: AppliedType => tp :: Nil - case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type - case tp: TypeProxy => stripToCore(tp.underlying) - case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) - case _ => defn.AnyType :: Nil - } - val parentCores = stripToCore(parent.tpe) - val untpdParent = TypedSplice(parent) - val (classParents, self) = - if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) - else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParents, Nil, self, refinements) - TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) - } - - /** Returns list of all pattern variables, possibly with their types, - * without duplicates - */ - private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { - val buf = ListBuffer[VarInfo]() - def seenName(name: Name) = buf exists (_._1.name == name) - def add(named: NameTree, t: Tree): Unit = - if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) - def collect(tree: Tree): Unit = tree match { - case tree @ Bind(nme.WILDCARD, tree1) => - if tree.mods.is(Given) then - val Typed(_, tpt) = tree1: @unchecked - if shouldAddGiven(tree) then - add(tree, tpt) - collect(tree1) - case tree @ Bind(_, Typed(tree1, tpt)) => - if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then - add(tree, tpt) - collect(tree1) - case tree @ Bind(_, tree1) => - add(tree, TypeTree()) - collect(tree1) - case Typed(id: Ident, t) if isVarPattern(id) && id.name != nme.WILDCARD && !isWildcardStarArg(tree) => - add(id, t) - case id: Ident if isVarPattern(id) && id.name != nme.WILDCARD => - add(id, TypeTree()) - case Apply(_, args) => - args foreach collect - case Typed(expr, _) => - collect(expr) - case NamedArg(_, arg) => - collect(arg) - case SeqLiteral(elems, _) => - elems foreach collect - case Alternative(trees) => - for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) - report.error(IllegalVariableInPatternAlternative(vble.symbol.name), vble.srcPos) - case Annotated(arg, _) => - collect(arg) - case InterpolatedString(_, segments) => - segments foreach collect - case InfixOp(left, _, right) => - collect(left) - collect(right) - case PrefixOp(_, od) => - collect(od) - case Parens(tree) => - collect(tree) - case Tuple(trees) => - trees foreach collect - case Thicket(trees) => - trees foreach collect - case Block(Nil, expr) => - collect(expr) - case Quote(expr) => - new UntypedTreeTraverser { - def traverse(tree: untpd.Tree)(using Context): Unit = tree match { - case Splice(expr) => collect(expr) - case _ => traverseChildren(tree) - } - }.traverse(expr) - case CapturingTypeTree(refs, parent) => - collect(parent) - case _ => - } - collect(tree) - buf.toList - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala b/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala deleted file mode 100644 index a1c3c0ed0775..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/DesugarEnums.scala +++ /dev/null @@ -1,310 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._ -import Symbols._, StdNames._, Trees._ -import Decorators._ -import util.{Property, SourceFile} -import typer.ErrorReporting._ -import transform.SyntheticMembers.ExtendsSingletonMirror - -import scala.annotation.internal.sharable - -/** Helper methods to desugar enums */ -object DesugarEnums { - import untpd._ - - enum CaseKind: - case Simple, Object, Class - - final case class EnumConstraints(minKind: CaseKind, maxKind: CaseKind, enumCases: List[(Int, RefTree)]): - require(minKind.ordinal <= maxKind.ordinal && !(cached && enumCases.isEmpty)) - def requiresCreator = minKind == CaseKind.Simple - def isEnumeration = maxKind.ordinal < CaseKind.Class.ordinal - def cached = minKind.ordinal < CaseKind.Class.ordinal - end EnumConstraints - - /** Attachment containing the number of enum cases, the smallest kind that was seen so far, - * and a list of all the value cases with their ordinals. - */ - val EnumCaseCount: Property.Key[(Int, CaseKind, CaseKind, List[(Int, TermName)])] = Property.Key() - - /** Attachment signalling that when this definition is desugared, it should add any additional - * lookup methods for enums. - */ - val DefinesEnumLookupMethods: Property.Key[Unit] = Property.Key() - - /** The enumeration class that belongs to an enum case. This works no matter - * whether the case is still in the enum class or it has been transferred to the - * companion object. - */ - def enumClass(using Context): Symbol = { - val cls = ctx.owner - if (cls.is(Module)) cls.linkedClass else cls - } - - def enumCompanion(using Context): Symbol = { - val cls = ctx.owner - if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule - } - - /** Is `tree` an (untyped) enum case? */ - def isEnumCase(tree: Tree)(using Context): Boolean = tree match { - case tree: MemberDef => tree.mods.isEnumCase - case PatDef(mods, _, _, _) => mods.isEnumCase - case _ => false - } - - /** A reference to the enum class `E`, possibly followed by type arguments. - * Each covariant type parameter is approximated by its lower bound. - * Each contravariant type parameter is approximated by its upper bound. - * It is an error if a type parameter is non-variant, or if its approximation - * refers to pther type parameters. - */ - def interpolatedEnumParent(span: Span)(using Context): Tree = { - val tparams = enumClass.typeParams - def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp - val targs = tparams map { tparam => - if (tparam.is(Covariant) && isGround(tparam.info.bounds.lo)) - tparam.info.bounds.lo - else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) - tparam.info.bounds.hi - else { - def problem = - if (!tparam.isOneOf(VarianceFlags)) "is invariant" - else "has bounds that depend on a type parameter in the same parameter list" - errorType(em"""cannot determine type argument for enum parent $enumClass, - |type parameter $tparam $problem""", ctx.source.atSpan(span)) - } - } - TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) - } - - /** A type tree referring to `enumClass` */ - def enumClassRef(using Context): Tree = - if (enumClass.exists) TypeTree(enumClass.typeRef) else TypeTree() - - /** Add implied flags to an enum class or an enum case */ - def addEnumFlags(cdef: TypeDef)(using Context): TypeDef = - if (cdef.mods.isEnumClass) cdef.withMods(cdef.mods.withAddedFlags(Abstract | Sealed, cdef.span)) - else if (isEnumCase(cdef)) cdef.withMods(cdef.mods.withAddedFlags(Final, cdef.span)) - else cdef - - private def valuesDot(name: PreName)(implicit src: SourceFile) = - Select(Ident(nme.DOLLAR_VALUES), name.toTermName) - - private def ArrayLiteral(values: List[Tree], tpt: Tree)(using Context): Tree = - val clazzOf = TypeApply(ref(defn.Predef_classOf.termRef), tpt :: Nil) - val ctag = Apply(TypeApply(ref(defn.ClassTagModule_apply.termRef), tpt :: Nil), clazzOf :: Nil) - val apply = Select(ref(defn.ArrayModule.termRef), nme.apply) - Apply(Apply(TypeApply(apply, tpt :: Nil), values), ctag :: Nil) - - /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: - * - * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) - * def values = $values.clone - * def valueOf($name: String) = $name match { - * case "e_0" => this.e_0 - * ... - * case "e_n" => this.e_n - * case _ => throw new IllegalArgumentException("case not found: " + $name) - * } - */ - private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { - val rawEnumClassRef = rawRef(enumClass.typeRef) - extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) - - val privateValuesDef = - ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) - .withFlags(Private | Synthetic) - - val valuesDef = - DefDef(nme.values, Nil, defn.ArrayType.ofRawEnum, valuesDot(nme.clone_)) - .withFlags(Synthetic) - - val valuesOfBody: Tree = - val defaultCase = - val msg = Apply(Select(Literal(Constant("enum case not found: ")), nme.PLUS), Ident(nme.nameDollar)) - CaseDef(Ident(nme.WILDCARD), EmptyTree, - Throw(New(TypeTree(defn.IllegalArgumentExceptionType), List(msg :: Nil)))) - val stringCases = enumValues.map(enumValue => - CaseDef(Literal(Constant(enumValue.name.toString)), EmptyTree, enumValue) - ) ::: defaultCase :: Nil - Match(Ident(nme.nameDollar), stringCases) - val valueOfDef = DefDef(nme.valueOf, List(param(nme.nameDollar, defn.StringType) :: Nil), - TypeTree(), valuesOfBody) - .withFlags(Synthetic) - - privateValuesDef :: - valuesDef :: - valueOfDef :: Nil - } - - private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = - def scaffolding: List[Tree] = - if constraints.isEnumeration then enumScaffolding(constraints.enumCases.map(_._2)) else Nil - def valueCtor: List[Tree] = if constraints.requiresCreator then enumValueCreator :: Nil else Nil - def fromOrdinal: Tree = - def throwArg(ordinal: Tree) = - Throw(New(TypeTree(defn.NoSuchElementExceptionType), List(Select(ordinal, nme.toString_) :: Nil))) - if !constraints.cached then - fromOrdinalMeth(throwArg) - else - def default(ordinal: Tree) = - CaseDef(Ident(nme.WILDCARD), EmptyTree, throwArg(ordinal)) - if constraints.isEnumeration then - fromOrdinalMeth(ordinal => - Try(Apply(valuesDot(nme.apply), ordinal), default(ordinal) :: Nil, EmptyTree)) - else - fromOrdinalMeth(ordinal => - Match(ordinal, - constraints.enumCases.map((i, enumValue) => CaseDef(Literal(Constant(i)), EmptyTree, enumValue)) - :+ default(ordinal))) - - if !enumClass.exists then - // in the case of a double definition of an enum that only defines class cases (see tests/neg/i4470c.scala) - // it seems `enumClass` might be `NoSymbol`; in this case we provide no scaffolding. - Nil - else - scaffolding ::: valueCtor ::: fromOrdinal :: Nil - end enumLookupMethods - - /** A creation method for a value of enum type `E`, which is defined as follows: - * - * private def $new(_$ordinal: Int, $name: String) = new E with scala.runtime.EnumValue { - * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` - * } - */ - private def enumValueCreator(using Context) = { - val creator = New(Template( - constr = emptyConstructor, - parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, - derived = Nil, - self = EmptyValDef, - body = Nil - ).withAttachment(ExtendsSingletonMirror, ())) - DefDef(nme.DOLLAR_NEW, - List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), - TypeTree(), creator).withFlags(Private | Synthetic) - } - - /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has - * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? - * Issues an error if that is the case but the reference is illegal. - * The reference could be illegal for two reasons: - * - explicit type parameters are given - * - it's a value case, i.e. no value parameters are given - */ - def typeParamIsReferenced( - enumTypeParams: List[TypeSymbol], - caseTypeParams: List[TypeDef], - vparamss: List[List[ValDef]], - parents: List[Tree])(using Context): Boolean = { - - object searchRef extends UntypedTreeAccumulator[Boolean] { - var tparamNames = enumTypeParams.map(_.name).toSet[Name] - def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { - val saved = tparamNames - tparamNames = tparamNames -- binders.map(_.name) - try op - finally tparamNames = saved - } - def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { - tree match { - case Ident(name) => - val matches = tparamNames.contains(name) - if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) - report.error(em"illegal reference to type parameter $name from enum case", tree.srcPos) - matches - case LambdaTypeTree(lambdaParams, body) => - underBinders(lambdaParams, foldOver(x, tree)) - case RefinedTypeTree(parent, refinements) => - val refinementDefs = refinements collect { case r: MemberDef => r } - underBinders(refinementDefs, foldOver(x, tree)) - case _ => foldOver(x, tree) - } - } - def apply(tree: Tree)(using Context): Boolean = - underBinders(caseTypeParams, apply(false, tree)) - } - - def typeHasRef(tpt: Tree) = searchRef(tpt) - def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) - def parentHasRef(parent: Tree): Boolean = parent match { - case Apply(fn, _) => parentHasRef(fn) - case TypeApply(_, targs) => targs.exists(typeHasRef) - case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) - case New(tpt) => typeHasRef(tpt) - case parent => parent.isType && typeHasRef(parent) - } - - vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) - } - - /** A pair consisting of - * - the next enum tag - * - scaffolding containing the necessary definitions for singleton enum cases - * unless that scaffolding was already generated by a previous call to `nextEnumKind`. - */ - def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { - val (ordinal, seenMinKind, seenMaxKind, seenCases) = - ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) - val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind - val maxKind = if kind.ordinal > seenMaxKind.ordinal then kind else seenMaxKind - val cases = name match - case name: TermName => (ordinal, name) :: seenCases - case _ => seenCases - if definesLookups then - val thisRef = This(EmptyTypeIdent) - val cachedValues = cases.reverse.map((i, name) => (i, Select(thisRef, name))) - (ordinal, enumLookupMethods(EnumConstraints(minKind, maxKind, cachedValues))) - else - ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) - (ordinal, Nil) - } - - def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) - def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) - - def ordinalMeth(body: Tree)(using Context): DefDef = - DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) - - def ordinalMethLit(ord: Int)(using Context): DefDef = - ordinalMeth(Literal(Constant(ord))) - - def fromOrdinalMeth(body: Tree => Tree)(using Context): DefDef = - DefDef(nme.fromOrdinal, (param(nme.ordinal, defn.IntType) :: Nil) :: Nil, - rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) - - /** Expand a module definition representing a parameterless enum case */ - def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { - assert(impl.body.isEmpty) - if (!enumClass.exists) EmptyTree - else if (impl.parents.isEmpty) - expandSimpleEnumCase(name, mods, definesLookups, span) - else { - val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) - val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) - .withAttachment(ExtendsSingletonMirror, ()) - val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) - flatTree(vdef :: scaffolding).withSpan(span) - } - } - - /** Expand a simple enum case */ - def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = - if (!enumClass.exists) EmptyTree - else if (enumClass.typeParams.nonEmpty) { - val parent = interpolatedEnumParent(span) - val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) - expandEnumModule(name, impl, mods, definesLookups, span) - } - else { - val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) - val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) - val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) - flatTree(vdef :: scaffolding).withSpan(span) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala b/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala deleted file mode 100644 index c0cf2c0d1b81..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/MainProxies.scala +++ /dev/null @@ -1,449 +0,0 @@ -package dotty.tools.dotc -package ast - -import core._ -import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._ -import StdNames.{nme, tpnme} -import ast.Trees._ -import Names.Name -import Comments.Comment -import NameKinds.DefaultGetterName -import Annotations.Annotation - -object MainProxies { - - /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ - def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - mainAnnotationProxies(stats) ++ mainProxies(stats) - } - - /** Generate proxy classes for @main functions. - * A function like - * - * @main def f(x: S, ys: T*) = ... - * - * would be translated to something like - * - * import CommandLineParser._ - * class f { - * @static def main(args: Array[String]): Unit = - * try - * f( - * parseArgument[S](args, 0), - * parseRemainingArguments[T](args, 1): _* - * ) - * catch case err: ParseError => showError(err) - * } - */ - private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ - def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { - case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => - stat.symbol :: Nil - case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => - mainMethods(impl.body) - case _ => - Nil - } - mainMethods(stats).flatMap(mainProxy) - } - - import untpd._ - private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { - val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span - def pos = mainFun.sourcePos - val argsRef = Ident(nme.args) - - def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = - if (mt.isImplicitMethod) { - report.error(em"@main method cannot have implicit parameters", pos) - call - } - else { - val args = mt.paramInfos.zipWithIndex map { - (formal, n) => - val (parserSym, formalElem) = - if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) - else (defn.CLP_parseArgument, formal) - val arg = Apply( - TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), - argsRef :: Literal(Constant(idx + n)) :: Nil) - if (formal.isRepeatedParam) repeated(arg) else arg - } - val call1 = Apply(call, args) - mt.resType match { - case restpe: MethodType => - if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) - report.error(em"varargs parameter of @main method must come last", pos) - addArgs(call1, restpe, idx + args.length) - case _ => - call1 - } - } - - var result: List[TypeDef] = Nil - if (!mainFun.owner.isStaticOwner) - report.error(em"@main method is not statically accessible", pos) - else { - var call = ref(mainFun.termRef) - mainFun.info match { - case _: ExprType => - case mt: MethodType => - call = addArgs(call, mt, 0) - case _: PolyType => - report.error(em"@main method cannot have type parameters", pos) - case _ => - report.error(em"@main can only annotate a method", pos) - } - val errVar = Ident(nme.error) - val handler = CaseDef( - Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), - EmptyTree, - Apply(ref(defn.CLP_showError.termRef), errVar :: Nil)) - val body = Try(call, handler :: Nil, EmptyTree) - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnot)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic | Synthetic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - - if (!ctx.reporter.hasErrors) - result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil - } - result - } - - private type DefaultValueSymbols = Map[Int, Symbol] - private type ParameterAnnotationss = Seq[Seq[Annotation]] - - /** - * Generate proxy classes for main functions. - * A function like - * - * /** - * * Lorem ipsum dolor sit amet - * * consectetur adipiscing elit. - * * - * * @param x my param x - * * @param ys all my params y - * */ - * @myMain(80) def f( - * @myMain.Alias("myX") x: S, - * y: S, - * ys: T* - * ) = ... - * - * would be translated to something like - * - * final class f { - * static def main(args: Array[String]): Unit = { - * val annotation = new myMain(80) - * val info = new Info( - * name = "f", - * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", - * parameters = Seq( - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), - * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), - * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) - * ) - * ), - * val command = annotation.command(info, args) - * if command.isDefined then - * val cmd = command.get - * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) - * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) - * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) - * annotation.run(() => f(args0(), args1(), args2()*)) - * } - * } - */ - private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd._ - - /** - * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this - * point of the compilation, they must be explicitly passed by [[mainProxy]]. - */ - def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = - scope match { - case TypeDef(_, template: Template) => - template.body.flatMap((_: Tree) match { - case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => - val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked - List(index -> dd.symbol) - case _ => Nil - }).toMap - case _ => Map.empty - } - - /** Computes the list of main methods present in the code. */ - def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { - case stat: DefDef => - val sym = stat.symbol - sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { - case Nil => - Nil - case _ :: Nil => - val paramAnnotations = stat.paramss.flatMap(_.map( - valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) - )) - (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil - case mainAnnot :: others => - report.error(em"method cannot have multiple main annotations", mainAnnot.tree) - Nil - } - case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => - mainMethods(stat, impl.body) - case _ => - Nil - } - - // Assuming that the top-level object was already generated, all main methods will have a scope - mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) - } - - private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { - val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get - def pos = mainFun.sourcePos - - val documentation = new Documentation(docComment) - - /** () => value */ - def unitToValue(value: Tree): Tree = - val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) - Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) - - /** Generate a list of trees containing the ParamInfo instantiations. - * - * A ParamInfo has the following shape - * ``` - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) - * ``` - */ - def parameterInfos(mt: MethodType): List[Tree] = - extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = - Apply(Select(tree, sym.name), args) - - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val param = paramName.toString - val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias - val paramType = paramType0.dealias - val paramTypeOwner = paramType.typeSymbol.owner - val paramTypeStr = - if paramTypeOwner == defn.EmptyPackageClass then paramType.show - else paramTypeOwner.showFullName + "." + paramType.show - val hasDefault = defaultValueSymbols.contains(idx) - val isRepeated = formal.isRepeatedParam - val paramDoc = documentation.argDocs.getOrElse(param, "") - val paramAnnots = - val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList - Apply(ref(defn.SeqModule.termRef), annotationTrees) - - val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) - .map(value => Literal(Constant(value))) - - New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) - - end parameterInfos - - /** - * Creates a list of references and definitions of arguments. - * The goal is to create the - * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` - * part of the code. - */ - def argValDefs(mt: MethodType): List[ValDef] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argName = nme.args ++ idx.toString - val isRepeated = formal.isRepeatedParam - val formalType = if isRepeated then formal.argTypes.head else formal - val getterName = if isRepeated then nme.varargGetter else nme.argGetter - val defaultValueGetterOpt = defaultValueSymbols.get(idx) match - case None => ref(defn.NoneModule.termRef) - case Some(dvSym) => - val value = unitToValue(ref(dvSym.termRef)) - Apply(ref(defn.SomeClass.companionModule.termRef), value) - val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) - val index = Literal(Constant(idx)) - val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) - val argGetter = - if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) - else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) - ValDef(argName, TypeTree(), argGetter) - end argValDefs - - - /** Create a list of argument references that will be passed as argument to the main method. - * `args0`, ...`argn*` - */ - def argRefs(mt: MethodType): List[Tree] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) - if formal.isRepeatedParam then repeated(argRef) else argRef - end argRefs - - - /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ - def instantiateAnnotation(annot: Annotation): Tree = - val argss = { - def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { - case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) - case _ => acc - } - - def extractArgs(args: List[tpd.Tree]): List[Tree] = - args.flatMap { - case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) - case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler - case arg => List(TypedSplice(arg)) - } - - recurse(annot.tree, Nil) - } - - New(TypeTree(annot.symbol.typeRef), argss) - end instantiateAnnotation - - def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = - val cmdInfo = - val nameTree = Literal(Constant(mainFun.showName)) - val docTree = Literal(Constant(documentation.mainDoc)) - val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) - New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) - - val annotVal = ValDef( - nme.annotation, - TypeTree(), - instantiateAnnotation(mainAnnot) - ) - val infoVal = ValDef( - nme.info, - TypeTree(), - cmdInfo - ) - val command = ValDef( - nme.command, - TypeTree(), - Apply( - Select(Ident(nme.annotation), nme.command), - List(Ident(nme.info), Ident(nme.args)) - ) - ) - val argsVal = ValDef( - nme.cmd, - TypeTree(), - Select(Ident(nme.command), nme.get) - ) - val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) - val body0 = If( - Select(Ident(nme.command), nme.isDefined), - Block(argsVal :: args, run), - EmptyTree - ) - val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` - - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnotationClass)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - mainCls.withSpan(mainAnnot.tree.span.toSynthetic) - end generateMainClass - - if (!mainFun.owner.isStaticOwner) - report.error(em"main method is not statically accessible", pos) - None - else mainFun.info match { - case _: ExprType => - Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) - case mt: MethodType => - if (mt.isImplicitMethod) - report.error(em"main method cannot have implicit parameters", pos) - None - else mt.resType match - case restpe: MethodType => - report.error(em"main method cannot be curried", pos) - None - case _ => - Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) - case _: PolyType => - report.error(em"main method cannot have type parameters", pos) - None - case _ => - report.error(em"main can only annotate a method", pos) - None - } - } - - /** A class responsible for extracting the docstrings of a method. */ - private class Documentation(docComment: Option[Comment]): - import util.CommentParsing._ - - /** The main part of the documentation. */ - lazy val mainDoc: String = _mainDoc - /** The parameters identified by @param. Maps from parameter name to its documentation. */ - lazy val argDocs: Map[String, String] = _argDocs - - private var _mainDoc: String = "" - private var _argDocs: Map[String, String] = Map() - - docComment match { - case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw - case None => - } - - private def cleanComment(raw: String): String = - var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq - lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) - var s = lines.foldLeft("") { - case ("", s2) => s2 - case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines - case (s1, "") => s1 + '\n' - case (s1, s2) if s1.last == '\n' => s1 + s2 - case (s1, s2) => s1 + ' ' + s2 - } - s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn - - private def parseDocComment(raw: String): Unit = - // Positions of the sections (@) in the docstring - val tidx: List[(Int, Int)] = tagIndex(raw) - - // Parse main comment - var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn - _mainDoc = cleanComment(mainComment) - - // Parse arguments comments - val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) - val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) - val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) - _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap - end Documentation -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala b/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala deleted file mode 100644 index 054ffe66f323..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/NavigateAST.scala +++ /dev/null @@ -1,129 +0,0 @@ -package dotty.tools.dotc -package ast - -import core.Contexts._ -import core.Decorators._ -import util.Spans._ -import Trees.{MemberDef, DefTree, WithLazyField} -import dotty.tools.dotc.core.Types.AnnotatedType -import dotty.tools.dotc.core.Types.ImportType -import dotty.tools.dotc.core.Types.Type - -/** Utility functions to go from typed to untyped ASTs */ -// TODO: Handle trees with mixed source files -object NavigateAST { - - /** The untyped tree corresponding to typed tree `tree` in the compilation - * unit specified by `ctx` - */ - def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = - untypedPath(tree, exactMatch = true) match { - case (utree: untpd.Tree) :: _ => - utree - case _ => - val loosePath = untypedPath(tree, exactMatch = false) - throw new - Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} - |best matching path =\n$loosePath%\n====\n% - |path positions = ${loosePath.map(_.sourcePos)}""") - } - - /** The reverse path of untyped trees starting with a tree that closest matches - * `tree` and ending in the untyped tree at the root of the compilation unit - * specified by `ctx`. - * @param exactMatch If `true`, the path must start with a node that exactly - * matches `tree`, or `Nil` is returned. - * If `false` the path might start with a node enclosing - * the logical position of `tree`. - * Note: A complication concerns member definitions. ValDefs and DefDefs - * have after desugaring a position that spans just the name of the symbol being - * defined and nothing else. So we look instead for an untyped tree approximating the - * envelope of the definition, and declare success if we find another DefTree. - */ - def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = - tree match { - case tree: MemberDef[?] => - untypedPath(tree.span) match { - case path @ (last: DefTree[?]) :: _ => path - case path if !exactMatch => path - case _ => Nil - } - case _ => - untypedPath(tree.span) match { - case (path @ last :: _) if last.span == tree.span || !exactMatch => path - case _ => Nil - } - } - - /** The reverse part of the untyped root of the compilation unit of `ctx` to - * the given `span`. - */ - def untypedPath(span: Span)(using Context): List[Positioned] = - pathTo(span, List(ctx.compilationUnit.untpdTree)) - - - /** The reverse path from any node in `from` to the node that closest encloses `span`, - * or `Nil` if no such path exists. If a non-empty path is returned it starts with - * the node closest enclosing `span` and ends with one of the nodes in `from`. - * - * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes - * do not correspond to code the user wrote since their start and - * end point are the same, so this is useful when trying to reconcile - * nodes with source code. - */ - def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { - def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { - var bestFit: List[Positioned] = path - while (it.hasNext) { - val path1 = it.next() match { - case p: Positioned => singlePath(p, path) - case m: untpd.Modifiers => childPath(m.productIterator, path) - case xs: List[?] => childPath(xs.iterator, path) - case _ => path - } - if ((path1 ne path) && - ((bestFit eq path) || - bestFit.head.span != path1.head.span && - bestFit.head.span.contains(path1.head.span))) - bestFit = path1 - } - bestFit - } - /* - * Annotations trees are located in the Type - */ - def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = - t match { - case ann: AnnotatedType => - unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) - case imp: ImportType => - childPath(imp.expr.productIterator, path) - case other => - path - } - def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = - if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { - // FIXME: We shouldn't be manually forcing trees here, we should replace - // our usage of `productIterator` by something in `Positioned` that takes - // care of low-level details like this for us. - p match { - case p: WithLazyField[?] => - p.forceIfLazy - case _ => - } - val iterator = p match - case defdef: DefTree[?] => - p.productIterator ++ defdef.mods.productIterator - case _ => - p.productIterator - childPath(iterator, p :: path) - } - else { - p match { - case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) - case _ => path - } - } - childPath(from.iterator, Nil) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala b/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala deleted file mode 100644 index 7b558c65e425..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Positioned.scala +++ /dev/null @@ -1,246 +0,0 @@ -package dotty.tools -package dotc -package ast - -import util.Spans._ -import util.{SourceFile, SourcePosition, SrcPos} -import core.Contexts._ -import core.Decorators._ -import core.NameOps._ -import core.Flags.{JavaDefined, ExtensionMethod} -import core.StdNames.nme -import ast.Trees.mods -import annotation.constructorOnly -import annotation.internal.sharable - -/** A base class for things that have positions (currently: modifiers and trees) - */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable, Pure { - import Positioned.{ids, nextId, debugId} - - private var mySpan: Span = _ - - private var mySource: SourceFile = src - - /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id - * is set, -1 otherwise. - */ - def uniqueId: Int = - if ids != null && ids.nn.containsKey(this) then ids.nn.get(this).nn else -1 - - private def allocateId() = - if ids != null then - val ownId = nextId - nextId += 1 - ids.nn.put(this: @unchecked, ownId) - if ownId == debugId then - println(s"Debug tree (id=$debugId) creation \n${this: @unchecked}\n") - Thread.dumpStack() - - allocateId() - - /** The span part of the item's position */ - def span: Span = mySpan - - def span_=(span: Span): Unit = - mySpan = span - - span = envelope(src) - - def source: SourceFile = mySource - - def sourcePos(using Context): SourcePosition = source.atSpan(span) - - /** This positioned item, widened to `SrcPos`. Used to make clear we only need the - * position, typically for error reporting. - */ - final def srcPos: SrcPos = this - - /** A positioned item like this one with given `span`. - * If the positioned item is source-derived, a clone is returned. - * If the positioned item is synthetic, the position is updated - * destructively and the item itself is returned. - */ - def withSpan(span: Span): this.type = - if (span == mySpan) this - else { - val newpd: this.type = - if !mySpan.exists then - if span.exists then envelope(source, span.startPos) // fill in children spans - this - else - cloneIn(source) - newpd.span = span - newpd - } - - /** The union of startSpan and the spans of all positioned children that - * have the same source as this node, except that Inlined nodes only - * consider their `call` child. - * - * Side effect: Any descendants without spans have but with the same source as this - * node have their span set to the end position of the envelope of all children to - * the left, or, if that one does not exist, to the start position of the envelope - * of all children to the right. - */ - def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { - case Trees.Inlined(call, _, _) => - call.span - case _ => - def include(span: Span, x: Any): Span = x match { - case p: Positioned => - if (p.source != src) span - else if (p.span.exists) span.union(p.span) - else if (span.exists) { - if (span.end != MaxOffset) - p.span = p.envelope(src, span.endPos) - span - } - else // No span available to assign yet, signal this by returning a span with MaxOffset end - Span(MaxOffset, MaxOffset) - case m: untpd.Modifiers => - include(include(span, m.mods), m.annotations) - case y :: ys => - include(include(span, y), ys) - case _ => span - } - val limit = productArity - def includeChildren(span: Span, n: Int): Span = - if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) - else span - val span1 = includeChildren(startSpan, 0) - val span2 = - if (!span1.exists || span1.end != MaxOffset) - span1 - else if (span1.start == MaxOffset) - // No positioned child was found - NoSpan - else - ///println(s"revisit $uniqueId with $span1") - // We have some children left whose span could not be assigned. - // Go through it again with the known start position. - includeChildren(span1.startPos, 0) - span2.toSynthetic - } - - /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ - def cloneIn(src: SourceFile): this.type = { - val newpd: this.type = clone.asInstanceOf[this.type] - newpd.allocateId() - newpd.mySource = src - newpd - } - - def contains(that: Positioned): Boolean = { - def isParent(x: Any): Boolean = x match { - case x: Positioned => - x.contains(that) - case m: untpd.Modifiers => - m.mods.exists(isParent) || m.annotations.exists(isParent) - case xs: List[?] => - xs.exists(isParent) - case _ => - false - } - (this eq that) || - (this.span contains that.span) && { - var n = productArity - var found = false - while (!found && n > 0) { - n -= 1 - found = isParent(productElement(n)) - } - found - } - } - - /** Check that all positioned items in this tree satisfy the following conditions: - * - Parent spans contain child spans - * - If item is a non-empty tree, it has a position - */ - def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { - import untpd._ - var lastPositioned: Positioned | Null = null - var lastSpan = NoSpan - def check(p: Any): Unit = p match { - case p: Positioned => - assert(span contains p.span, - i"""position error, parent span does not contain child span - |parent = $this # $uniqueId, - |parent span = $span, - |child = $p # ${p.uniqueId}, - |child span = ${p.span}""".stripMargin) - p match { - case tree: Tree if !tree.isEmpty => - assert(tree.span.exists, - s"position error: position not set for $tree # ${tree.uniqueId}") - case _ => - } - if nonOverlapping then - this match { - case _: XMLBlock => - // FIXME: Trees generated by the XML parser do not satisfy `checkPos` - case _: WildcardFunction - if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] => - // ignore transition from last wildcard parameter to body - case _ => - assert(!lastSpan.exists || !p.span.exists || lastSpan.end <= p.span.start, - i"""position error, child positions overlap or in wrong order - |parent = $this - |1st child = $lastPositioned - |1st child span = $lastSpan - |2nd child = $p - |2nd child span = ${p.span}""".stripMargin) - } - lastPositioned = p - lastSpan = p.span - p.checkPos(nonOverlapping) - case m: untpd.Modifiers => - m.annotations.foreach(check) - m.mods.foreach(check) - case xs: List[?] => - xs.foreach(check) - case _ => - } - this match { - case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => - // Special treatment for constructors coming from Java: - // Leave out leading type params, they are copied with wrong positions from parent class - check(tree.mods) - check(tree.trailingParamss) - case tree: DefDef if tree.mods.is(ExtensionMethod) => - tree.paramss match - case vparams1 :: vparams2 :: rest if tree.name.isRightAssocOperatorName => - // omit check for right-associatiove extension methods; their parameters were swapped - case _ => - check(tree.paramss) - check(tree.tpt) - check(tree.rhs) - case _ => - val end = productArity - var n = 0 - while (n < end) { - check(productElement(n)) - n += 1 - } - } - } - catch { - case ex: AssertionError => - println(i"error while checking $this") - throw ex - } -} - -object Positioned { - @sharable private var debugId = Int.MinValue - @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null - @sharable private var nextId: Int = 0 - - def init(using Context): Unit = - debugId = ctx.settings.YdebugTreeWithId.value - if ids == null && ctx.settings.YshowTreeIds.value - || debugId != ctx.settings.YdebugTreeWithId.default - then - ids = java.util.WeakHashMap() -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala deleted file mode 100644 index b650a0088de4..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeInfo.scala +++ /dev/null @@ -1,1070 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Flags._, Trees._, Types._, Contexts._ -import Names._, StdNames._, NameOps._, Symbols._ -import typer.ConstFold -import reporting.trace -import dotty.tools.dotc.transform.SymUtils._ -import Decorators._ -import Constants.Constant -import scala.collection.mutable - -import scala.annotation.tailrec - -trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => - - def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree - - def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { - case DefDef(_, _, _, EmptyTree) - | ValDef(_, _, EmptyTree) - | TypeDef(_, _) => true - case _ => false - } - - def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { - case Apply(fn, _ :: _) => - unsplice(fn) match { - case Select(_, name) if name.isOpAssignmentName => true - case _ => false - } - case _ => false - } - - class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { - def foreach(f: (Symbol, Tree) => Unit): Boolean = { - def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { - case Nil => args.isEmpty - case param :: params1 => - if (param.info.isRepeatedParam) { - for (arg <- args) f(param, arg) - true - } - else args match { - case Nil => false - case arg :: args1 => - f(param, args.head) - recur(params1, args1) - } - } - recur(params, args) - } - def zipped: List[(Symbol, Tree)] = map((_, _)) - def map[R](f: (Symbol, Tree) => R): List[R] = { - val b = List.newBuilder[R] - foreach(b += f(_, _)) - b.result() - } - } - - /** The method part of an application node, possibly enclosed in a block - * with only valdefs as statements. the reason for also considering blocks - * is that named arguments can transform a call into a block, e.g. - * (b = foo, a = bar) - * is transformed to - * { val x$1 = foo - * val x$2 = bar - * (x$2, x$1) - * } - */ - def methPart(tree: Tree): Tree = stripApply(tree) match { - case TypeApply(fn, _) => methPart(fn) - case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed - case Block(stats, expr) => methPart(expr) - case mp => mp - } - - /** If this is an application, its function part, stripping all - * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. - */ - def stripApply(tree: Tree): Tree = unsplice(tree) match { - case Apply(fn, _) => stripApply(fn) - case _ => tree - } - - /** If this is a block, its expression part */ - def stripBlock(tree: Tree): Tree = unsplice(tree) match { - case Block(_, expr) => stripBlock(expr) - case Inlined(_, _, expr) => stripBlock(expr) - case _ => tree - } - - def stripInlined(tree: Tree): Tree = unsplice(tree) match { - case Inlined(_, _, expr) => stripInlined(expr) - case _ => tree - } - - def stripAnnotated(tree: Tree): Tree = tree match { - case Annotated(arg, _) => arg - case _ => tree - } - - /** The number of arguments in an application */ - def numArgs(tree: Tree): Int = unsplice(tree) match { - case Apply(fn, args) => numArgs(fn) + args.length - case TypeApply(fn, _) => numArgs(fn) - case Block(_, expr) => numArgs(expr) - case _ => 0 - } - - /** All term arguments of an application in a single flattened list */ - def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { - case Apply(fn, args) => allArguments(fn) ::: args - case TypeApply(fn, _) => allArguments(fn) - case Block(_, expr) => allArguments(expr) - case _ => Nil - } - - /** Is tree explicitly parameterized with type arguments? */ - def hasExplicitTypeArgs(tree: Tree): Boolean = tree match - case TypeApply(tycon, args) => - args.exists(arg => !arg.span.isZeroExtent && !tycon.span.contains(arg.span)) - case _ => false - - /** Is tree a path? */ - def isPath(tree: Tree): Boolean = unsplice(tree) match { - case Ident(_) | This(_) | Super(_, _) => true - case Select(qual, _) => isPath(qual) - case _ => false - } - - /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the - * same object? - */ - def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true - case _ => false - } - - /** Is tree a super constructor call? - */ - def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Select(Super(_, _), nme.CONSTRUCTOR) => true - case _ => false - } - - def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { - case Select(Super(_, _), _) => true - case _ => false - } - - def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { - case Ident(nme.CONSTRUCTOR) - | Select(This(_), nme.CONSTRUCTOR) - | Select(Super(_, _), nme.CONSTRUCTOR) => true - case _ => false - } - - /** Is tree a backquoted identifier or definition */ - def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) - - /** Is tree a variable pattern? */ - def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { - case x: Ident => x.name.isVarPattern && !isBackquoted(x) - case _ => false - } - - /** The first constructor definition in `stats` */ - def firstConstructor(stats: List[Tree]): Tree = stats match { - case (meth: DefDef) :: _ if meth.name.isConstructorName => meth - case stat :: stats => firstConstructor(stats) - case nil => EmptyTree - } - - /** Is tpt a vararg type of the form T* or => T*? */ - def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { - case tpt: TypeTree => tpt.typeOpt.isRepeatedParam - case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true - case _ => false - } - - /** Is this argument node of the form *, or is it a reference to - * such an argument ? The latter case can happen when an argument is lifted. - */ - def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { - case Typed(Ident(nme.WILDCARD_STAR), _) => true - case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true - case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam - case NamedArg(_, arg) => isWildcardStarArg(arg) - case arg => arg.typeOpt.widen.isRepeatedParam - } - - /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ - def isByNameType(tree: Tree)(using Context): Boolean = - stripByNameType(tree) ne tree - - /** Strip `=> T` to `T` and (under pureFunctions) `{refs}-> T` to `T` */ - def stripByNameType(tree: Tree)(using Context): Tree = unsplice(tree) match - case ByNameTypeTree(t1) => t1 - case untpd.CapturingTypeTree(_, parent) => - val parent1 = stripByNameType(parent) - if parent1 eq parent then tree else parent1 - case _ => tree - - /** All type and value parameter symbols of this DefDef */ - def allParamSyms(ddef: DefDef)(using Context): List[Symbol] = - ddef.paramss.flatten.map(_.symbol) - - /** Does this argument list end with an argument of the form : _* ? */ - def isWildcardStarArgList(trees: List[Tree])(using Context): Boolean = - trees.nonEmpty && isWildcardStarArg(trees.last) - - /** Is the argument a wildcard argument of the form `_` or `x @ _`? - */ - def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { - case Ident(nme.WILDCARD) => true - case _ => false - } - - /** Does this list contain a named argument tree? */ - def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg - val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] - - /** Is this pattern node a catch-all (wildcard or variable) pattern? */ - def isDefaultCase(cdef: CaseDef): Boolean = cdef match { - case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) - case _ => false - } - - /** Does this CaseDef catch Throwable? */ - def catchesThrowable(cdef: CaseDef)(using Context): Boolean = - catchesAllOf(cdef, defn.ThrowableType) - - /** Does this CaseDef catch everything of a certain Type? */ - def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = - isDefaultCase(cdef) || - cdef.guard.isEmpty && { - unbind(cdef.pat) match { - case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt - case _ => false - } - } - - /** Is this case guarded? */ - def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree - - /** Is this parameter list a using clause? */ - def isUsingClause(params: ParamClause)(using Context): Boolean = params match - case ValDefs(vparam :: _) => - val sym = vparam.symbol - if sym.exists then sym.is(Given) else vparam.mods.is(Given) - case _ => - false - - def isUsingOrTypeParamClause(params: ParamClause)(using Context): Boolean = params match - case TypeDefs(_) => true - case _ => isUsingClause(params) - - def isTypeParamClause(params: ParamClause)(using Context): Boolean = params match - case TypeDefs(_) => true - case _ => false - - private val languageSubCategories = Set(nme.experimental, nme.deprecated) - - /** If `path` looks like a language import, `Some(name)` where name - * is `experimental` if that sub-module is imported, and the empty - * term name otherwise. - */ - def languageImport(path: Tree): Option[TermName] = path match - case Select(p1, name: TermName) if languageSubCategories.contains(name) => - languageImport(p1) match - case Some(EmptyTermName) => Some(name) - case _ => None - case p1: RefTree if p1.name == nme.language => - p1.qualifier match - case EmptyTree => Some(EmptyTermName) - case p2: RefTree if p2.name == nme.scala => - p2.qualifier match - case EmptyTree => Some(EmptyTermName) - case Ident(nme.ROOTPKG) => Some(EmptyTermName) - case _ => None - case _ => None - case _ => None - - /** The underlying pattern ignoring any bindings */ - def unbind(x: Tree): Tree = unsplice(x) match { - case Bind(_, y) => unbind(y) - case y => y - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class with these parents can have as flags. - */ - def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { - case Nil => NoInitsInterface - case Apply(_, _ :: _) :: _ => EmptyFlags - case _ :: parents1 => parentsKind(parents1) - } - - /** Checks whether predicate `p` is true for all result parts of this expression, - * where we zoom into Ifs, Matches, and Blocks. - */ - def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { - case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) - case Match(_, cases) => cases forall (c => forallResults(c.body, p)) - case Block(_, expr) => forallResults(expr, p) - case _ => p(tree) - } -} - -trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => - import untpd._ - - /** The underlying tree when stripping any TypedSplice or Parens nodes */ - override def unsplice(tree: Tree): Tree = tree match { - case TypedSplice(tree1) => tree1 - case Parens(tree1) => unsplice(tree1) - case _ => tree - } - - def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { - case Function(args, _) => - if (args.exists { - case ValDef(_, tpt, _) => tpt.isEmpty - case _ => false - }) Some(tree) - else None - case Match(EmptyTree, _) => - Some(tree) - case Block(Nil, expr) => - functionWithUnknownParamType(expr) - case _ => - None - } - - def isFunctionWithUnknownParamType(tree: Tree): Boolean = - functionWithUnknownParamType(tree).isDefined - - def isFunction(tree: Tree): Boolean = tree match - case Function(_, _) | Match(EmptyTree, _) => true - case Block(Nil, expr) => isFunction(expr) - case _ => false - - /** Is `tree` an context function or closure, possibly nested in a block? */ - def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { - case tree: FunctionWithMods => tree.mods.is(Given) - case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) - case Closure(_, meth, _) => true - case Block(Nil, expr) => isContextualClosure(expr) - case Block(DefDef(nme.ANON_FUN, params :: _, _, _) :: Nil, cl: Closure) => - if params.isEmpty then - cl.tpt.eq(untpd.ContextualEmptyTree) || defn.isContextFunctionType(cl.tpt.typeOpt) - else - isUsingClause(params) - case _ => false - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class enclosing this statement can have as flags. - */ - private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { - case EmptyTree | _: Import => NoInitsInterface - case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface - case tree: DefDef => - if tree.unforcedRhs == EmptyTree - && tree.paramss.forall { - case ValDefs(vparams) => vparams.forall(_.rhs.isEmpty) - case _ => true - } - then - NoInitsInterface - else if tree.mods.is(Given) && tree.paramss.isEmpty then - EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl - else - NoInits - case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags - case _ => EmptyFlags - } - - /** The largest subset of {NoInits, PureInterface} that a - * trait or class with this body can have as flags. - */ - def bodyKind(body: List[Tree])(using Context): FlagSet = - body.foldLeft(NoInitsInterface)((fs, stat) => fs & defKind(stat)) - - /** Info of a variable in a pattern: The named tree and its type */ - type VarInfo = (NameTree, Tree) - - /** An extractor for trees of the form `id` or `id: T` */ - object IdPattern { - def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { - case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) - case Typed(id: Ident, tpt) => Some((id, tpt)) - case _ => None - } - } - - /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `{*}-> T`. - * Only trees of the form `=> T` are matched; trees written directly as `{*}-> T` - * are ignored by the extractor. - */ - object ImpureByNameTypeTree: - - def apply(tp: ByNameTypeTree)(using Context): untpd.CapturingTypeTree = - untpd.CapturingTypeTree( - untpd.captureRoot.withSpan(tp.span.startPos) :: Nil, tp) - - def unapply(tp: Tree)(using Context): Option[ByNameTypeTree] = tp match - case untpd.CapturingTypeTree(id @ Select(_, nme.CAPTURE_ROOT) :: Nil, bntp: ByNameTypeTree) - if id.span == bntp.span.startPos => Some(bntp) - case _ => None - end ImpureByNameTypeTree -} - -trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => - import TreeInfo._ - import tpd._ - - /** The purity level of this statement. - * @return Pure if statement has no side effects - * Idempotent if running the statement a second time has no side effects - * Impure otherwise - */ - def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { - case EmptyTree - | TypeDef(_, _) - | Import(_, _) - | DefDef(_, _, _, _) => - Pure - case vdef @ ValDef(_, _, _) => - if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs) `min` Pure - case _ => - Impure - // TODO: It seem like this should be exprPurity(tree) - // But if we do that the repl/vars test break. Need to figure out why that's the case. - } - - /** The purity level of this expression. See docs for PurityLevel for what that means - * - * Note that purity and idempotency are treated differently. - * References to modules and lazy vals are impure (side-effecting) both because - * side-effecting code may be executed and because the first reference - * takes a different code path than all to follow; but they are idempotent - * because running the expression a second time gives the cached result. - */ - def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { - case EmptyTree - | This(_) - | Super(_, _) - | Literal(_) => - PurePath - case Ident(_) => - refPurity(tree) - case Select(qual, _) => - if (tree.symbol.is(Erased)) Pure - else refPurity(tree) `min` exprPurity(qual) - case New(_) | Closure(_, _, _) => - Pure - case TypeApply(fn, _) => - if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of || fn.symbol == defn.Predef_classOf) Pure else exprPurity(fn) - case Apply(fn, args) => - if isPureApply(tree, fn) then - minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure - else if fn.symbol.is(Erased) then - Pure - else if fn.symbol.isStableMember /* && fn.symbol.is(Lazy) */ then - minOf(exprPurity(fn), args.map(exprPurity)) `min` Idempotent - else - Impure - case Typed(expr, _) => - exprPurity(expr) - case Block(stats, expr) => - minOf(exprPurity(expr), stats.map(statPurity)) - case Inlined(_, bindings, expr) => - minOf(exprPurity(expr), bindings.map(statPurity)) - case NamedArg(_, expr) => - exprPurity(expr) - case _ => - Impure - } - - private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) - - def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { - case tpe: ConstantType => exprPurity(tree) >= Pure - case _ => exprPurity(tree) == PurePath - } - - def isPureExpr(tree: Tree)(using Context): Boolean = - exprPurity(tree) >= Pure - - def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { - case tpe: ConstantType => exprPurity(tree) >= Idempotent - case _ => exprPurity(tree) >= IdempotentPath - } - - def isIdempotentExpr(tree: Tree)(using Context): Boolean = - exprPurity(tree) >= Idempotent - - def isPureBinding(tree: Tree)(using Context): Boolean = statPurity(tree) >= Pure - - /** Is the application `tree` with function part `fn` known to be pure? - * Function value and arguments can still be impure. - */ - def isPureApply(tree: Tree, fn: Tree)(using Context): Boolean = - def isKnownPureOp(sym: Symbol) = - sym.owner.isPrimitiveValueClass - || sym.owner == defn.StringClass - || defn.pureMethods.contains(sym) - tree.tpe.isInstanceOf[ConstantType] && tree.symbol != NoSymbol && isKnownPureOp(tree.symbol) // A constant expression with pure arguments is pure. - || fn.symbol.isStableMember && !fn.symbol.is(Lazy) // constructors of no-inits classes are stable - - /** The purity level of this reference. - * @return - * PurePath if reference is (nonlazy and stable) - * or to a parameterized function - * or its type is a constant type - * IdempotentPath if reference is lazy and stable - * Impure otherwise - * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable - * flags set. - */ - def refPurity(tree: Tree)(using Context): PurityLevel = { - val sym = tree.symbol - if (!tree.hasType) Impure - else if !tree.tpe.widen.isParameterless then PurePath - else if sym.is(Erased) then PurePath - else if tree.tpe.isInstanceOf[ConstantType] then PurePath - else if (!sym.isStableMember) Impure - else if (sym.is(Module)) - if (sym.moduleClass.isNoInitsRealClass) PurePath else IdempotentPath - else if (sym.is(Lazy)) IdempotentPath - else if sym.isAllOf(InlineParam) then Impure - else PurePath - } - - def isPureRef(tree: Tree)(using Context): Boolean = - refPurity(tree) == PurePath - def isIdempotentRef(tree: Tree)(using Context): Boolean = - refPurity(tree) >= IdempotentPath - - /** (1) If `tree` is a constant expression, its value as a Literal, - * or `tree` itself otherwise. - * - * Note: Demanding idempotency instead of purity in literalize is strictly speaking too loose. - * Example - * - * object O { final val x = 42; println("43") } - * O.x - * - * Strictly speaking we can't replace `O.x` with `42`. But this would make - * most expressions non-constant. Maybe we can change the spec to accept this - * kind of eliding behavior. Or else enforce true purity in the compiler. - * The choice will be affected by what we will do with `inline` and with - * Singleton type bounds (see SIP 23). Presumably - * - * object O1 { val x: Singleton = 42; println("43") } - * object O2 { inline val x = 42; println("43") } - * - * should behave differently. - * - * O1.x should have the same effect as { println("43"); 42 } - * - * whereas - * - * O2.x = 42 - * - * Revisit this issue once we have standardized on `inline`. Then we can demand - * purity of the prefix unless the selection goes to a inline val. - * - * Note: This method should be applied to all term tree nodes that are not literals, - * that can be idempotent, and that can have constant types. So far, only nodes - * of the following classes qualify: - * - * Ident - * Select - * TypeApply - * - * (2) A primitive unary operator expression `pre.op` where `op` is one of `+`, `-`, `~`, `!` - * that has a constant type `ConstantType(v)` but that is not a constant expression - * (i.e. `pre` has side-effects) is translated to - * - * { pre; v } - * - * (3) An expression `pre.getClass[..]()` that has a constant type `ConstantType(v)` but where - * `pre` has side-effects is translated to: - * - * { pre; v } - * - * This avoids the situation where we have a Select node that does not have a symbol. - */ - def constToLiteral(tree: Tree)(using Context): Tree = { - assert(!tree.isType) - val tree1 = ConstFold(tree) - tree1.tpe.widenTermRefExpr.dealias.normalized match { - case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => - // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave - // blocks returning a class literal alone, even if they're idempotent. - tree1 - case ConstantType(value) => - def dropOp(t: Tree): Tree = t match - case Select(pre, _) if t.tpe.isInstanceOf[ConstantType] => - // it's a primitive unary operator - pre - case Apply(TypeApply(Select(pre, nme.getClass_), _), Nil) => - pre - case _ => - tree1 - - val countsAsPure = - if dropOp(tree1).symbol.isInlineVal - then isIdempotentExpr(tree1) - else isPureExpr(tree1) - - if countsAsPure then Literal(value).withSpan(tree.span) - else - val pre = dropOp(tree1) - if pre eq tree1 then tree1 - else - // it's a primitive unary operator or getClass call; - // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` - Block(pre :: Nil, Literal(value)).withSpan(tree.span) - case _ => tree1 - } - } - - def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match - case Inlined(call, _, _) => isExtMethodApply(call) - case tree @ Select(qual, nme.apply) => tree.symbol.is(ExtensionMethod) || isExtMethodApply(qual) - case tree => tree.symbol.is(ExtensionMethod) - - /** Is symbol potentially a getter of a mutable variable? - */ - def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { - def maybeGetterType(tpe: Type): Boolean = tpe match { - case _: ExprType => true - case tpe: MethodType => tpe.isImplicitMethod - case tpe: PolyType => maybeGetterType(tpe.resultType) - case _ => false - } - sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) - } - - /** Is tree a reference to a mutable variable, or to a potential getter - * that has a setter in the same class? - */ - def isVariableOrGetter(tree: Tree)(using Context): Boolean = { - def sym = tree.symbol - def isVar = sym.is(Mutable) - def isGetter = - mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists - - unsplice(tree) match { - case Ident(_) => isVar - case Select(_, _) => isVar || isGetter - case Apply(_, _) => - methPart(tree) match { - case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists - case _ => false - } - case _ => false - } - } - - /** Is tree a `this` node which belongs to `enclClass`? */ - def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { - case This(_) => tree.symbol == enclClass - case _ => false - } - - /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ - def stripCast(tree: Tree)(using Context): Tree = { - def isCast(sel: Tree) = sel.symbol.isTypeCast - unsplice(tree) match { - case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => - stripCast(inner) - case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => - stripCast(inner) - case t => - t - } - } - - /** The type arguments of a possibly curried call */ - def typeArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case TypeApply(fn, args) => loop(fn, args :: argss) - case Apply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The term arguments of a possibly curried call */ - def termArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case Apply(fn, args) => loop(fn, args :: argss) - case TypeApply(fn, args) => loop(fn, argss) - case _ => argss - loop(tree, Nil) - - /** The type and term arguments of a possibly curried call, in the order they are given */ - def allArgss(tree: Tree): List[List[Tree]] = - @tailrec - def loop(tree: Tree, argss: List[List[Tree]]): List[List[Tree]] = tree match - case tree: GenericApply => loop(tree.fun, tree.args :: argss) - case _ => argss - loop(tree, Nil) - - /** The function part of a possibly curried call. Unlike `methPart` this one does - * not decompose blocks - */ - def funPart(tree: Tree): Tree = tree match - case tree: GenericApply => funPart(tree.fun) - case tree => tree - - /** Decompose a template body into parameters and other statements */ - def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = - body.partition { - case stat: TypeDef => stat.symbol is Flags.Param - case stat: ValOrDefDef => - stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter - case _ => false - } - - /** An extractor for closures, either contained in a block or standalone. - */ - object closure { - def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { - case Block(_, expr) => unapply(expr) - case Closure(env, meth, tpt) => Some(env, meth, tpt) - case Typed(expr, _) => unapply(expr) - case _ => None - } - } - - /** An extractor for def of a closure contained the block of the closure. */ - object closureDef { - def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { - case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => - Some(meth) - case Block(Nil, expr) => - unapply(expr) - case _ => - None - } - } - - /** If tree is a closure, its body, otherwise tree itself */ - def closureBody(tree: Tree)(using Context): Tree = tree match { - case closureDef(meth) => meth.rhs - case _ => tree - } - - /** The variables defined by a pattern, in reverse order of their appearance. */ - def patVars(tree: Tree)(using Context): List[Symbol] = { - val acc = new TreeAccumulator[List[Symbol]] { - def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { - case Bind(_, body) => apply(tree.symbol :: syms, body) - case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) - case _ => foldOver(syms, tree) - } - } - acc(Nil, tree) - } - - /** Is this pattern node a catch-all or type-test pattern? */ - def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { - case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => - isSimpleThrowable(tpt.tpe) - case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => - isSimpleThrowable(tpt.tpe) - case _ => - isDefaultCase(cdef) - } - - private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { - case tp @ TypeRef(pre, _) => - (pre == NoPrefix || pre.typeSymbol.isStatic) && - (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) - case _ => - false - } - - /** The symbols defined locally in a statement list */ - def localSyms(stats: List[Tree])(using Context): List[Symbol] = - val locals = new mutable.ListBuffer[Symbol] - for stat <- stats do - if stat.isDef && stat.symbol.exists then locals += stat.symbol - locals.toList - - /** If `tree` is a DefTree, the symbol defined by it, otherwise NoSymbol */ - def definedSym(tree: Tree)(using Context): Symbol = - if (tree.isDef) tree.symbol else NoSymbol - - /** Going from child to parent, the path of tree nodes that starts - * with a definition of symbol `sym` and ends with `root`, or Nil - * if no such path exists. - * Pre: `sym` must have a position. - */ - def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { - require(sym.span.exists, sym) - object accum extends TreeAccumulator[List[Tree]] { - def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = - if (tree.span.contains(sym.span)) - if (definedSym(tree) == sym) tree :: x - else { - val x1 = foldOver(x, tree) - if (x1 ne x) tree :: x1 else x1 - } - else x - } - accum(Nil, root) - } - - /** The top level classes in this tree, including only those module classes that - * are not a linked class of some other class in the result. - */ - def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { - case PackageDef(_, stats) => stats.flatMap(topLevelClasses) - case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil - case _ => Nil - } - - /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ - def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { - case PackageDef(pid, stats) => - val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) - val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) - if isEffectivelyEmpty then Nil - else cpy.PackageDef(tree)(pid, slicedStats) :: Nil - case tdef: TypeDef => - val sym = tdef.symbol - assert(sym.isClass) - if (cls == sym || cls == sym.linkedClass) tdef :: Nil - else Nil - case vdef: ValDef => - val sym = vdef.symbol - assert(sym.is(Module)) - if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil - else Nil - case tree => - tree :: Nil - } - - /** The statement sequence that contains a definition of `sym`, or Nil - * if none was found. - * For a tree to be found, The symbol must have a position and its definition - * tree must be reachable from come tree stored in an enclosing context. - */ - def definingStats(sym: Symbol)(using Context): List[Tree] = - if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil - else defPath(sym, ctx.compilationUnit.tpdTree) match { - case defn :: encl :: _ => - def verify(stats: List[Tree]) = - if (stats exists (definedSym(_) == sym)) stats else Nil - encl match { - case Block(stats, _) => verify(stats) - case encl: Template => verify(encl.body) - case PackageDef(_, stats) => verify(stats) - case _ => Nil - } - case nil => - Nil - } - - /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` - * otherwise the empty list. - */ - def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { - case Block(Nil, expr) => tupleArgs(expr) - case Inlined(_, Nil, expr) => tupleArgs(expr) - case Apply(fn: NameTree, args) - if fn.name == nme.apply && - fn.symbol.owner.is(Module) && - defn.isTupleClass(fn.symbol.owner.companionClass) => args - case _ => Nil - } - - /** The qualifier part of a Select or Ident. - * For an Ident, this is the `This` of the current class. - */ - def qualifier(tree: Tree)(using Context): Tree = tree match { - case Select(qual, _) => qual - case tree: Ident => desugarIdentPrefix(tree) - case _ => This(ctx.owner.enclosingClass.asClass) - } - - /** Is this a (potentially applied) selection of a member of a structural type - * that is not a member of an underlying class or trait? - */ - def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { - def isStructuralTermSelect(tree: Select) = - def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match - case RefinedType(parent, rname, rinfo) => - rname == tree.name || hasRefinement(parent) - case tp: TypeProxy => - hasRefinement(tp.superType) - case tp: AndType => - hasRefinement(tp.tp1) || hasRefinement(tp.tp2) - case tp: OrType => - hasRefinement(tp.tp1) || hasRefinement(tp.tp2) - case _ => - false - !tree.symbol.exists - && tree.isTerm - && { - val qualType = tree.qualifier.tpe - hasRefinement(qualType) && !qualType.derivesFrom(defn.PolyFunctionClass) - } - def loop(tree: Tree): Boolean = tree match - case TypeApply(fun, _) => - loop(fun) - case Apply(fun, _) => - loop(fun) - case tree: Select => - isStructuralTermSelect(tree) - case _ => - false - loop(tree) - } - - /** Return a pair consisting of (supercall, rest) - * - * - supercall: the superclass call, excluding trait constr calls - * - * The supercall is always the first statement (if it exists) - */ - final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = - constrStats.toList match { - case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) - case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) - case stats => (Nil, stats) - } - - /** Structural tree comparison (since == on trees is reference equality). - * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported - */ - extension (t1: Tree) { - def === (t2: Tree)(using Context): Boolean = (t1, t2) match { - case (t1: Ident, t2: Ident) => - t1.symbol == t2.symbol - case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => - t1.symbol == t2.symbol && q1 === q2 - case (Literal(c1), Literal(c2)) => - c1 == c2 - case (Apply(f1, as1), Apply(f2, as2)) => - f1 === f2 && as1.corresponds(as2)(_ === _) - case (TypeApply(f1, ts1), TypeApply(f2, ts2)) => - f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) - case _ => - false - } - def hash(using Context): Int = - t1.getClass.hashCode * 37 + { - t1 match { - case t1: Ident => t1.symbol.hashCode - case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash - case Literal(c1) => c1.hashCode - case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) - case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) - case _ => t1.hashCode - } - } - } - - def assertAllPositioned(tree: Tree)(using Context): Unit = - tree.foreachSubTree { - case t: WithoutTypeOrPos[_] => - case t => assert(t.span.exists, i"$t") - } - - /** Extractors for quotes */ - object Quoted { - /** Extracts the content of a quoted tree. - * The result can be the contents of a term or type quote, which - * will return a term or type tree respectively. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol == defn.QuotedRuntime_exprQuote then - // quoted.runtime.Expr.quote[T]() - Some(tree.args.head) - else if tree.symbol == defn.QuotedTypeModule_of then - // quoted.Type.of[](quotes) - val TypeApply(_, body :: _) = tree.fun: @unchecked - Some(body) - else None - } - - /** Extractors for splices */ - object Spliced { - /** Extracts the content of a spliced expression tree. - * The result can be the contents of a term splice, which - * will return a term tree. - */ - def unapply(tree: tpd.Apply)(using Context): Option[tpd.Tree] = - if tree.symbol.isExprSplice then Some(tree.args.head) else None - } - - /** Extractors for type splices */ - object SplicedType { - /** Extracts the content of a spliced type tree. - * The result can be the contents of a type splice, which - * will return a type tree. - */ - def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = - if tree.symbol.isTypeSplice then Some(tree.qualifier) else None - } - - /** Extractor for not-null assertions. - * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. - */ - object AssertNotNull : - def apply(tree: tpd.Tree, tpnn: Type)(using Context): tpd.Tree = - tree.select(defn.Any_typeCast).appliedToType(AndType(tree.tpe, tpnn)) - - def unapply(tree: tpd.TypeApply)(using Context): Option[tpd.Tree] = tree match - case TypeApply(Select(qual: RefTree, nme.asInstanceOfPM), arg :: Nil) => - arg.tpe match - case AndType(ref, nn1) if qual.tpe eq ref => - qual.tpe.widen match - case OrNull(nn2) if nn1 eq nn2 => - Some(qual) - case _ => None - case _ => None - case _ => None - end AssertNotNull - - object ConstantValue { - def unapply(tree: Tree)(using Context): Option[Any] = - tree match - case Typed(expr, _) => unapply(expr) - case Inlined(_, Nil, expr) => unapply(expr) - case Block(Nil, expr) => unapply(expr) - case _ => - tree.tpe.widenTermRefExpr.normalized match - case ConstantType(Constant(x)) => Some(x) - case _ => None - } -} - -object TreeInfo { - /** A purity level is represented as a bitset (expressed as an Int) */ - class PurityLevel(val x: Int) extends AnyVal { - /** `this` contains the bits of `that` */ - def >= (that: PurityLevel): Boolean = (x & that.x) == that.x - - /** The intersection of the bits of `this` and `that` */ - def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) - } - - /** An expression is a stable path. Requires that expression is at least idempotent */ - val Path: PurityLevel = new PurityLevel(4) - - /** The expression has no side effects */ - val Pure: PurityLevel = new PurityLevel(3) - - /** Running the expression a second time has no side effects. Implied by `Pure`. */ - val Idempotent: PurityLevel = new PurityLevel(1) - - val Impure: PurityLevel = new PurityLevel(0) - - /** A stable path that is evaluated without side effects */ - val PurePath: PurityLevel = new PurityLevel(Pure.x | Path.x) - - /** A stable path that is also idempotent */ - val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala deleted file mode 100644 index caf8d68442f6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeMapWithImplicits.scala +++ /dev/null @@ -1,82 +0,0 @@ -package dotty.tools.dotc -package ast - -import Trees._ -import core.Contexts._ -import core.ContextOps.enter -import core.Flags._ -import core.Symbols._ -import core.TypeError - -/** A TreeMap that maintains the necessary infrastructure to support - * contextual implicit searches (type-scope implicits are supported anyway). - * - * This incudes implicits defined in scope as well as imported implicits. - */ -class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { - import tpd._ - - def transformSelf(vd: ValDef)(using Context): ValDef = - cpy.ValDef(vd)(tpt = transform(vd.tpt)) - - private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { - val nestedCtx = ctx.fresh.setNewScope - defs foreach { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) - case _ => - } - nestedCtx - } - - private def patternScopeCtx(pattern: Tree)(using Context): Context = { - val nestedCtx = ctx.fresh.setNewScope - new TreeTraverser { - def traverse(tree: Tree)(using Context): Unit = { - tree match { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => - nestedCtx.enter(d.symbol) - case _ => - } - traverseChildren(tree) - } - }.traverse(pattern) - nestedCtx - } - - override def transform(tree: Tree)(using Context): Tree = { - try tree match { - case Block(stats, expr) => - super.transform(tree)(using nestedScopeCtx(stats)) - case tree: DefDef => - inContext(localCtx(tree)) { - cpy.DefDef(tree)( - tree.name, - transformParamss(tree.paramss), - transform(tree.tpt), - transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) - } - case impl @ Template(constr, parents, self, _) => - cpy.Template(tree)( - transformSub(constr), - transform(parents)(using ctx.superCallContext), - Nil, - transformSelf(self), - transformStats(impl.body, tree.symbol)) - case tree: CaseDef => - val patCtx = patternScopeCtx(tree.pat)(using ctx) - cpy.CaseDef(tree)( - transform(tree.pat), - transform(tree.guard)(using patCtx), - transform(tree.body)(using patCtx) - ) - case _ => - super.transform(tree) - } - catch { - case ex: TypeError => - report.error(ex, tree.srcPos) - tree - } - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala b/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala deleted file mode 100644 index 3b250118f9b3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/TreeTypeMap.scala +++ /dev/null @@ -1,232 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Contexts._, Flags._ -import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant -import Decorators._ -import dotty.tools.dotc.transform.SymUtils._ -import language.experimental.pureFunctions - -/** A map that applies three functions and a substitution together to a tree and - * makes sure they are coordinated so that the result is well-typed. The functions are - * @param typeMap A function from Type to Type that gets applied to the - * type of every tree node and to all locally defined symbols, - * followed by the substitution [substFrom := substTo]. - * @param treeMap A transformer that translates all encountered subtrees in - * prefix traversal orders - * @param oldOwners Previous owners. If a top-level local symbol in the mapped tree - * has one of these as an owner, the owner is replaced by the corresponding - * symbol in `newOwners`. - * @param newOwners New owners, replacing previous owners. - * @param substFrom The symbols that need to be substituted. - * @param substTo The substitution targets. - * - * The reason the substitution is broken out from the rest of the type map is - * that all symbols have to be substituted at the same time. If we do not do this, - * we risk data races on named types. Example: Say we have `outer#1.inner#2` and we - * have two substitutions S1 = [outer#1 := outer#3], S2 = [inner#2 := inner#4] where - * hashtags precede symbol ids. If we do S1 first, we get outer#2.inner#3. If we then - * do S2 we get outer#2.inner#4. But that means that the named type outer#2.inner - * gets two different denotations in the same period. Hence, if -Yno-double-bindings is - * set, we would get a data race assertion error. - */ -class TreeTypeMap( - val typeMap: Type -> Type = IdentityTypeMap, - val treeMap: tpd.Tree -> tpd.Tree = identity[tpd.Tree](_), // !cc! need explicit instantiation of default argument - val oldOwners: List[Symbol] = Nil, - val newOwners: List[Symbol] = Nil, - val substFrom: List[Symbol] = Nil, - val substTo: List[Symbol] = Nil, - cpy: tpd.TreeCopier = tpd.cpy)(using DetachedContext) extends tpd.TreeMap(cpy) { - import tpd._ - - def copy( - typeMap: Type -> Type, - treeMap: tpd.Tree -> tpd.Tree, - oldOwners: List[Symbol], - newOwners: List[Symbol], - substFrom: List[Symbol], - substTo: List[Symbol])(using Context): TreeTypeMap = - new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) - - /** If `sym` is one of `oldOwners`, replace by corresponding symbol in `newOwners` */ - def mapOwner(sym: Symbol): Symbol = sym.subst(oldOwners, newOwners) - - /** Replace occurrences of `This(oldOwner)` in some prefix of a type - * by the corresponding `This(newOwner)`. - */ - private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { - private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { - case Nil => tp - case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) - case _ :: from1 => mapPrefix(from1, to.tail, tp) - } - def apply(tp: Type): Type = tp match { - case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) - case _ => mapOver(tp) - } - } - - def mapType(tp: Type): Type = - mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) - - private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = - if (prevStats.isEmpty) assert(newStats.isEmpty) - else { - prevStats.head match { - case pdef: MemberDef => - val prevSym = pdef.symbol - val newSym = newStats.head.symbol - val newCls = newSym.owner.asClass - if (prevSym != newSym) newCls.replace(prevSym, newSym) - case _ => - } - updateDecls(prevStats.tail, newStats.tail) - } - - def transformInlined(tree: tpd.Inlined)(using Context): tpd.Tree = - val Inlined(call, bindings, expanded) = tree - val (tmap1, bindings1) = transformDefs(bindings) - val expanded1 = tmap1.transform(expanded) - cpy.Inlined(tree)(call, bindings1, expanded1) - - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = treeMap(tree) match { - case impl @ Template(constr, parents, self, _) => - val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) - cpy.Template(impl)( - constr = tmap.transformSub(constr), - parents = parents.mapconserve(transform), - self = tmap.transformSub(self), - body = impl.body mapconserve - (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) - ).withType(tmap.mapType(impl.tpe)) - case tree1 => - tree1.withType(mapType(tree1.tpe)) match { - case id: Ident if tpd.needsSelect(id.tpe) => - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) - case ddef @ DefDef(name, paramss, tpt, _) => - val (tmap1, paramss1) = transformAllParamss(paramss) - val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) - res.symbol.setParamssFromDefs(paramss1) - res.symbol.transformAnnotations { - case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) - case ann => ann - } - res - case tdef @ LambdaTypeTree(tparams, body) => - val (tmap1, tparams1) = transformDefs(tparams) - cpy.LambdaTypeTree(tdef)(tparams1, tmap1.transform(body)) - case blk @ Block(stats, expr) => - val (tmap1, stats1) = transformDefs(stats) - val expr1 = tmap1.transform(expr) - cpy.Block(blk)(stats1, expr1) - case inlined: Inlined => - transformInlined(inlined) - case cdef @ CaseDef(pat, guard, rhs) => - val tmap = withMappedSyms(patVars(pat)) - val pat1 = tmap.transform(pat) - val guard1 = tmap.transform(guard) - val rhs1 = tmap.transform(rhs) - cpy.CaseDef(cdef)(pat1, guard1, rhs1) - case labeled @ Labeled(bind, expr) => - val tmap = withMappedSyms(bind.symbol :: Nil) - val bind1 = tmap.transformSub(bind) - val expr1 = tmap.transform(expr) - cpy.Labeled(labeled)(bind1, expr1) - case tree @ Hole(_, _, args, content, tpt) => - val args1 = args.mapConserve(transform) - val content1 = transform(content) - val tpt1 = transform(tpt) - cpy.Hole(tree)(args = args1, content = content1, tpt = tpt1) - case lit @ Literal(Constant(tpe: Type)) => - cpy.Literal(lit)(Constant(mapType(tpe))) - case tree1 => - super.transform(tree1) - } - } - - override def transformStats(trees: List[tpd.Tree], exprOwner: Symbol)(using Context): List[Tree] = - transformDefs(trees)._2 - - def transformDefs[TT <: tpd.Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { - val tmap = withMappedSyms(tpd.localSyms(trees)) - (tmap, tmap.transformSub(trees)) - } - - private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match - case params :: paramss1 => - val (tmap1, params1: ParamClause) = ((params: @unchecked) match - case ValDefs(vparams) => transformDefs(vparams) - case TypeDefs(tparams) => transformDefs(tparams) - ): @unchecked - val (tmap2, paramss2) = tmap1.transformAllParamss(paramss1) - (tmap2, params1 :: paramss2) - case nil => - (this, paramss) - - def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree] - - def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree)) - - /** The current tree map composed with a substitution [from -> to] */ - def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = - if (from eq to) this - else { - // assert that substitution stays idempotent, assuming its parts are - // TODO: It might be better to cater for the asserted-away conditions, by - // setting up a proper substitution abstraction with a compose operator that - // guarantees idempotence. But this might be too inefficient in some cases. - // We'll cross that bridge when we need to. - assert(!from.exists(substTo contains _)) - assert(!to.exists(substFrom contains _)) - assert(!from.exists(newOwners contains _)) - assert(!to.exists(oldOwners contains _)) - copy( - typeMap, - treeMap, - from ++ oldOwners, - to ++ newOwners, - from ++ substFrom, - to ++ substTo) - } - - /** Apply `typeMap` and `ownerMap` to given symbols `syms` - * and return a treemap that contains the substitution - * between original and mapped symbols. - */ - def withMappedSyms(syms: List[Symbol]): TreeTypeMap = - withMappedSyms(syms, mapSymbols(syms, this)) - - /** The tree map with the substitution between originals `syms` - * and mapped symbols `mapped`. Also goes into mapped classes - * and substitutes their declarations. - */ - def withMappedSyms(syms: List[Symbol], mapped: List[Symbol]): TreeTypeMap = - if syms eq mapped then this - else - val substMap = withSubstitution(syms, mapped) - lazy val origCls = mapped.zip(syms).filter(_._1.isClass).toMap - mapped.filter(_.isClass).foldLeft(substMap) { (tmap, cls) => - val origDcls = cls.info.decls.toList.filterNot(_.is(TypeParam)) - val tmap0 = tmap.withSubstitution(origCls(cls).typeParams, cls.typeParams) - val mappedDcls = mapSymbols(origDcls, tmap0, mapAlways = true) - val tmap1 = tmap.withMappedSyms( - origCls(cls).typeParams ::: origDcls, - cls.typeParams ::: mappedDcls) - origDcls.lazyZip(mappedDcls).foreach(cls.asClass.replace) - tmap1 - } - - override def toString = - def showSyms(syms: List[Symbol]) = - syms.map(sym => s"$sym#${sym.id}").mkString(", ") - s"""TreeTypeMap( - |typeMap = $typeMap - |treeMap = $treeMap - |oldOwners = ${showSyms(oldOwners)} - |newOwners = ${showSyms(newOwners)} - |substFrom = ${showSyms(substFrom)} - |substTo = ${showSyms(substTo)}""".stripMargin -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala b/tests/pos-with-compiler-cc/dotc/ast/Trees.scala deleted file mode 100644 index 0b1842603316..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/Trees.scala +++ /dev/null @@ -1,1787 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._ -import typer.{ ConstFold, ProtoTypes } -import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._ -import collection.mutable.ListBuffer -import printing.Printer -import printing.Texts.Text -import util.{Stats, Attachment, Property, SourceFile, NoSource, SrcPos, SourcePosition} -import config.Config -import config.Printers.overload -import annotation.internal.sharable -import annotation.unchecked.uncheckedVariance -import annotation.constructorOnly -import compiletime.uninitialized -import Decorators._ -import annotation.retains -import language.experimental.pureFunctions - -object Trees { - - type Untyped = Type | Null - - /** The total number of created tree nodes, maintained if Stats.enabled */ - @sharable var ntrees: Int = 0 - - /** Property key for trees with documentation strings attached */ - val DocComment: Property.StickyKey[Comments.Comment] = Property.StickyKey() - - /** Property key for backquoted identifiers and definitions */ - val Backquoted: Property.StickyKey[Unit] = Property.StickyKey() - - /** Trees take a parameter indicating what the type of their `tpe` field - * is. Two choices: `Type` or `Untyped`. - * Untyped trees have type `Tree[Untyped]`. - * - * Tree typing uses a copy-on-write implementation: - * - * - You can never observe a `tpe` which is `null` (throws an exception) - * - So when creating a typed tree with `withType` we can re-use - * the existing tree transparently, assigning its `tpe` field. - * - It is impossible to embed untyped trees in typed ones. - * - Typed trees can be embedded in untyped ones provided they are rooted - * in a TypedSplice node. - * - Type checking an untyped tree should remove all embedded `TypedSplice` - * nodes. - */ - abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { - - if (Stats.enabled) ntrees += 1 - - /** The type constructor at the root of the tree */ - type ThisTree[T <: Untyped] <: Tree[T] - - protected var myTpe: T @uncheckedVariance = uninitialized - - /** Destructively set the type of the tree. This should be called only when it is known that - * it is safe under sharing to do so. One use-case is in the withType method below - * which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer, - * where we overwrite with a simplified version of the type itself. - */ - private[dotc] def overwriteType(tpe: T @uncheckedVariance): Unit = - myTpe = tpe - - /** The type of the tree. In case of an untyped tree, - * an UnAssignedTypeException is thrown. (Overridden by empty trees) - */ - final def tpe: T = - if myTpe == null then throw UnAssignedTypeException(this) - myTpe.uncheckedNN - - /** Copy `tpe` attribute from tree `from` into this tree, independently - * whether it is null or not. - final def copyAttr[U <: Untyped](from: Tree[U]): ThisTree[T] = { - val t1 = this.withSpan(from.span) - val t2 = - if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type]) - else t1 - t2.asInstanceOf[ThisTree[T]] - } - */ - - /** Return a typed tree that's isomorphic to this tree, but has given - * type. (Overridden by empty trees) - */ - def withType(tpe: Type)(using Context): ThisTree[Type] = { - if (tpe.isInstanceOf[ErrorType]) - assert(!Config.checkUnreportedErrors || - ctx.reporter.errorsReported || - ctx.settings.YshowPrintErrors.value - // under -Yshow-print-errors, errors might arise during printing, but they do not count as reported - ) - else if (Config.checkTreesConsistent) - checkChildrenTyped(productIterator) - withTypeUnchecked(tpe) - } - - /** Check that typed trees don't refer to untyped ones, except if - * - the parent tree is an import, or - * - the child tree is an identifier, or - * - errors were reported - */ - private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = - if (!this.isInstanceOf[Import[?]]) - while (it.hasNext) - it.next() match { - case x: Ident[?] => // untyped idents are used in a number of places in typed trees - case x: Tree[?] => - assert(x.hasType || ctx.reporter.errorsReported, - s"$this has untyped child $x") - case xs: List[?] => checkChildrenTyped(xs.iterator) - case _ => - } - - def withTypeUnchecked(tpe: Type): ThisTree[Type] = { - val tree = - (if (myTpe == null || - (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this - else cloneIn(source)).asInstanceOf[Tree[Type]] - tree overwriteType tpe - tree.asInstanceOf[ThisTree[Type]] - } - - /** Does the tree have its type field set? Note: this operation is not - * referentially transparent, because it can observe the withType - * modifications. Should be used only in special circumstances (we - * need it for printing trees with optional type info). - */ - final def hasType: Boolean = myTpe != null - - final def typeOpt: Type = myTpe match - case tp: Type => tp - case null => NoType - - /** The denotation referred to by this tree. - * Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other - * kinds of trees - */ - def denot(using Context): Denotation = NoDenotation - - /** Shorthand for `denot.symbol`. */ - final def symbol(using Context): Symbol = denot.symbol - - /** Does this tree represent a type? */ - def isType: Boolean = false - - /** Does this tree represent a term? */ - def isTerm: Boolean = false - - /** Is this a legal part of a pattern which is not at the same time a term? */ - def isPattern: Boolean = false - - /** Does this tree define a new symbol that is not defined elsewhere? */ - def isDef: Boolean = false - - /** Is this tree either the empty tree or the empty ValDef or an empty type ident? */ - def isEmpty: Boolean = false - - /** Convert tree to a list. Gives a singleton list, except - * for thickets which return their element trees. - */ - def toList: List[Tree[T]] = this :: Nil - - /** if this tree is the empty tree, the alternative, else this tree */ - inline def orElse[U >: T <: Untyped](inline that: Tree[U]): Tree[U] = - if (this eq genericEmptyTree) that else this - - /** The number of nodes in this tree */ - def treeSize: Int = { - var s = 1 - def addSize(elem: Any): Unit = elem match { - case t: Tree[?] => s += t.treeSize - case ts: List[?] => ts foreach addSize - case _ => - } - productIterator foreach addSize - s - } - - /** If this is a thicket, perform `op` on each of its trees - * otherwise, perform `op` ion tree itself. - */ - def foreachInThicket(op: Tree[T] => Unit): Unit = op(this) - - override def toText(printer: Printer): Text = printer.toText(this) - - def sameTree(that: Tree[?]): Boolean = { - def isSame(x: Any, y: Any): Boolean = - x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { - x match { - case x: Tree[?] => - y match { - case y: Tree[?] => x.sameTree(y) - case _ => false - } - case x: List[?] => - y match { - case y: List[?] => x.corresponds(y)(isSame) - case _ => false - } - case _ => - false - } - } - this.getClass == that.getClass && { - val it1 = this.productIterator - val it2 = that.productIterator - it1.corresponds(it2)(isSame) - } - } - - override def hashCode(): Int = System.identityHashCode(this) - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - } - - class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { - override def getMessage: String = s"type of $tree is not assigned" - } - - type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] - type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] - - // ------ Categories of trees ----------------------------------- - - /** Instances of this class are trees for which isType is definitely true. - * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) - */ - trait TypTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: TypTree[T] - override def isType: Boolean = true - } - - /** Instances of this class are trees for which isTerm is definitely true. - * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) - */ - trait TermTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: TermTree[T] - override def isTerm: Boolean = true - } - - /** Instances of this class are trees which are not terms but are legal - * parts of patterns. - */ - trait PatternTree[+T <: Untyped] extends Tree[T] { - type ThisTree[+T <: Untyped] <: PatternTree[T] - override def isPattern: Boolean = true - } - - /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: DenotingTree[T] - override def denot(using Context): Denotation = typeOpt.stripped match - case tpe: NamedType => tpe.denot - case tpe: ThisType => tpe.cls.denot - case _ => NoDenotation - } - - /** Tree's denot/isType/isTerm properties come from a subtree - * identified by `forwardTo`. - */ - abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: ProxyTree[T] - def forwardTo: Tree[T] - override def denot(using Context): Denotation = forwardTo.denot - override def isTerm: Boolean = forwardTo.isTerm - override def isType: Boolean = forwardTo.isType - } - - /** Tree has a name */ - abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: NameTree[T] - def name: Name - } - - /** Tree refers by name to a denotation */ - abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { - type ThisTree[+T <: Untyped] <: RefTree[T] - def qualifier: Tree[T] - override def isType: Boolean = name.isTypeName - override def isTerm: Boolean = name.isTermName - } - - /** Tree defines a new symbol */ - trait DefTree[+T <: Untyped] extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: DefTree[T] - - private var myMods: untpd.Modifiers | Null = uninitialized - - private[dotc] def rawMods: untpd.Modifiers = - if (myMods == null) untpd.EmptyModifiers else myMods.uncheckedNN - - def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) - - def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { - val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) - tree.setMods(mods) - tree.asInstanceOf[ThisTree[Untyped]] - } - - def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) - def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) - - /** Destructively update modifiers. To be used with care. */ - def setMods(mods: untpd.Modifiers): Unit = myMods = mods - - override def isDef: Boolean = true - def namedType: NamedType = tpe.asInstanceOf[NamedType] - } - - extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - - sealed trait WithEndMarker[+T <: Untyped]: - self: PackageDef[T] | NamedDefTree[T] => - - import WithEndMarker.* - - final def endSpan(using Context): Span = - if hasEndMarker then - val realName = srcName.stripModuleClassSuffix.lastPart - span.withStart(span.end - realName.length) - else - NoSpan - - /** The name in source code that represents this construct, - * and is the name that the user must write to create a valid - * end marker. - * e.g. a constructor definition is terminated in the source - * code by `end this`, so it's `srcName` should return `this`. - */ - protected def srcName(using Context): Name - - final def withEndMarker(): self.type = - self.withAttachment(HasEndMarker, ()) - - final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = - if copyFrom.hasEndMarker then - this.withEndMarker() - else - this - - final def dropEndMarker(): self.type = - self.removeAttachment(HasEndMarker) - this - - protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) - - object WithEndMarker: - /** Property key that signals the tree was terminated - * with an `end` marker in the source code - */ - private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() - - end WithEndMarker - - abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends NameTree[T] with DefTree[T] with WithEndMarker[T] { - type ThisTree[+T <: Untyped] <: NamedDefTree[T] - - protected def srcName(using Context): Name = - if name == nme.CONSTRUCTOR then nme.this_ - else if symbol.isPackageObject then symbol.owner.name - else name - - /** The position of the name defined by this definition. - * This is a point position if the definition is synthetic, or a range position - * if the definition comes from source. - * It might also be that the definition does not have a position (for instance when synthesized by - * a calling chain from `viewExists`), in that case the return position is NoSpan. - * Overridden in Bind - */ - def nameSpan(using Context): Span = - if (span.exists) { - val point = span.point - if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) - else { - val realName = srcName.stripModuleClassSuffix.lastPart - Span(point, point + realName.length, point) - } - } - else span - - /** The source position of the name defined by this definition. - * This is a point position if the definition is synthetic, or a range position - * if the definition comes from source. - */ - def namePos(using Context): SourcePosition = source.atSpan(nameSpan) - } - - /** Tree defines a new symbol and carries modifiers. - * The position of a MemberDef contains only the defined identifier or pattern. - * The envelope of a MemberDef contains the whole definition and has its point - * on the opening keyword (or the next token after that if keyword is missing). - */ - abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { - type ThisTree[+T <: Untyped] <: MemberDef[T] - - def rawComment: Option[Comment] = getAttachment(DocComment) - - def setComment(comment: Option[Comment]): this.type = { - comment.map(putAttachment(DocComment, _)) - this - } - - def name: Name - } - - /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T] with WithLazyField[Tree[T]] { - type ThisTree[+T <: Untyped] <: ValOrDefDef[T] - def name: TermName - def tpt: Tree[T] - def unforcedRhs: LazyTree[T] = unforced - def rhs(using Context): Tree[T] = forceIfLazy - } - - trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: - type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] - - type ParamClause[T <: Untyped] = List[ValDef[T]] | List[TypeDef[T]] - - // ----------- Tree case classes ------------------------------------ - - /** name */ - case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { - type ThisTree[+T <: Untyped] = Ident[T] - def qualifier: Tree[T] = genericEmptyTree - - def isBackquoted: Boolean = hasAttachment(Backquoted) - } - - class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: -> String)(implicit @constructorOnly src: SourceFile) - extends Ident[T](name) { - def explanation = expl - override def toString: String = s"SearchFailureIdent($explanation)" - } - - /** qualifier.name, or qualifier#name, if qualifier is a type */ - case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { - type ThisTree[+T <: Untyped] = Select[T] - - override def denot(using Context): Denotation = typeOpt match - case ConstantType(_) if ConstFold.foldedUnops.contains(name) => - // Recover the denotation of a constant-folded selection - qualifier.typeOpt.member(name).atSignature(Signature.NotAMethod, name) - case _ => - super.denot - - def nameSpan(using Context): Span = - if span.exists then - val point = span.point - if name.toTermName == nme.ERROR then - Span(point) - else if qualifier.span.start > span.start then // right associative - val realName = name.stripModuleClassSuffix.lastPart - Span(span.start, span.start + realName.length, point) - else - Span(point, span.end, point) - else span - } - - class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) - extends Select[T](qualifier, name) { - override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" - } - - /** qual.this */ - case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = This[T] - // Denotation of a This tree is always the underlying class; needs correction for modules. - override def denot(using Context): Denotation = - typeOpt match { - case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => - tpe.symbol.moduleClass.denot.asSeenFrom(pre) - case _ => - super.denot - } - } - - /** C.super[mix], where qual = C.this */ - case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Super[T] - def forwardTo: Tree[T] = qual - } - - abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] <: GenericApply[T] - val fun: Tree[T] - val args: List[Tree[T]] - def forwardTo: Tree[T] = fun - } - - object GenericApply: - def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match - case tree: GenericApply[T] => Some((tree.fun, tree.args)) - case _ => None - - /** The kind of application */ - enum ApplyKind: - case Regular // r.f(x) - case Using // r.f(using x) - case InfixTuple // r f (x1, ..., xN) where N != 1; needs to be treated specially for an error message in typedApply - - /** fun(args) */ - case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { - type ThisTree[+T <: Untyped] = Apply[T] - - def setApplyKind(kind: ApplyKind) = - putAttachment(untpd.KindOfApply, kind) - this - - /** The kind of this application. Works reliably only for untyped trees; typed trees - * are under no obligation to update it correctly. - */ - def applyKind: ApplyKind = - attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) - } - - /** fun[args] */ - case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { - type ThisTree[+T <: Untyped] = TypeApply[T] - } - - /** const */ - case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Literal[T] - } - - /** new tpt, but no constructor call */ - case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = New[T] - } - - /** expr : tpt */ - case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { - type ThisTree[+T <: Untyped] = Typed[T] - def forwardTo: Tree[T] = expr - } - - /** name = arg, in a parameter list */ - case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = NamedArg[T] - } - - /** name = arg, outside a parameter list */ - case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Assign[T] - } - - /** { stats; expr } */ - case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = Block[T] - override def isType: Boolean = expr.isType - override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary - } - - /** if cond then thenp else elsep */ - case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = If[T] - def isInline = false - } - class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends If(cond, thenp, elsep) { - override def isInline = true - override def toString = s"InlineIf($cond, $thenp, $elsep)" - } - - /** A closure with an environment and a reference to a method. - * @param env The captured parameters of the closure - * @param meth A ref tree that refers to the method of the closure. - * The first (env.length) parameters of that method are filled - * with env values. - * @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type - * of the closure is a function type, otherwise it is the type - * given in `tpt`, which must be a SAM type. - */ - case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Closure[T] - } - - /** selector match { cases } */ - case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Match[T] - def isInline = false - } - class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends Match(selector, cases) { - override def isInline = true - override def toString = s"InlineMatch($selector, $cases)" - } - - /** case pat if guard => body */ - case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = CaseDef[T] - } - - /** label[tpt]: { expr } */ - case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NameTree[T] { - type ThisTree[+T <: Untyped] = Labeled[T] - def name: Name = bind.name - } - - /** return expr - * where `from` refers to the method or label from which the return takes place - * After program transformations this is not necessarily the enclosing method, because - * closures can intervene. - */ - case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Return[T] - } - - /** while (cond) { body } */ - case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = WhileDo[T] - } - - /** try block catch cases finally finalizer */ - case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { - type ThisTree[+T <: Untyped] = Try[T] - } - - /** Seq(elems) - * @param tpt The element type of the sequence. - */ - case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = SeqLiteral[T] - } - - /** Array(elems) */ - class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends SeqLiteral(elems, elemtpt) { - override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" - } - - /** A tree representing inlined code. - * - * @param call Info about the original call that was inlined - * Until PostTyper, this is the full call, afterwards only - * a reference to the toplevel class from which the call was inlined. - * @param bindings Bindings for proxies to be used in the inlined code - * @param expansion The inlined tree, minus bindings. - * - * The full inlined code is equivalent to - * - * { bindings; expansion } - * - * The reason to keep `bindings` separate is because they are typed in a - * different context: `bindings` represent the arguments to the inlined - * call, whereas `expansion` represents the body of the inlined function. - */ - case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { - type ThisTree[+T <: Untyped] = Inlined[T] - override def isTerm = expansion.isTerm - override def isType = expansion.isType - } - - /** A type tree that represents an existing or inferred type */ - case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = TypeTree[T] - override def isEmpty: Boolean = !hasType - override def toString: String = - s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" - } - - /** A type tree whose type is inferred. These trees appear in two contexts - * - as an argument of a TypeApply. In that case its type is always a TypeVar - * - as a (result-)type of an inferred ValDef or DefDef. - * Every TypeVar is created as the type of one InferredTypeTree. - */ - class InferredTypeTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] - - /** ref.type */ - case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = SingletonTypeTree[T] - } - - /** tpt { refinements } */ - case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = RefinedTypeTree[T] - def forwardTo: Tree[T] = tpt - } - - /** tpt[args] */ - case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { - type ThisTree[+T <: Untyped] = AppliedTypeTree[T] - def forwardTo: Tree[T] = tpt - } - - /** [typeparams] -> tpt - * - * Note: the type of such a tree is not necessarily a `HKTypeLambda`, it can - * also be a `TypeBounds` where the upper bound is an `HKTypeLambda`, and the - * lower bound is either a reference to `Nothing` or an `HKTypeLambda`, - * this happens because these trees are typed by `HKTypeLambda#fromParams` which - * makes sure to move bounds outside of the type lambda itself to simplify their - * handling in the compiler. - * - * You may ask: why not normalize the trees too? That way, - * - * LambdaTypeTree(X, TypeBoundsTree(A, B)) - * - * would become, - * - * TypeBoundsTree(LambdaTypeTree(X, A), LambdaTypeTree(X, B)) - * - * which would maintain consistency between a tree and its type. The problem - * with this definition is that the same tree `X` appears twice, therefore - * we'd have to create two symbols for it which makes it harder to relate the - * source code written by the user with the trees used by the compiler (for - * example, to make "find all references" work in the IDE). - */ - case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = LambdaTypeTree[T] - } - - case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] - } - - /** [bound] selector match { cases } */ - case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = MatchTypeTree[T] - } - - /** => T */ - case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = ByNameTypeTree[T] - } - - /** >: lo <: hi - * >: lo <: hi = alias for RHS of bounded opaque type - */ - case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { - type ThisTree[+T <: Untyped] = TypeBoundsTree[T] - } - - /** name @ body */ - case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NamedDefTree[T] with PatternTree[T] { - type ThisTree[+T <: Untyped] = Bind[T] - override def isType: Boolean = name.isTypeName - override def isTerm: Boolean = name.isTermName - - override def nameSpan(using Context): Span = - if span.exists then Span(span.start, span.start + name.toString.length) else span - } - - /** tree_1 | ... | tree_n */ - case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends PatternTree[T] { - type ThisTree[+T <: Untyped] = Alternative[T] - } - - /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following - * components: - * - * @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`) - * possibly with type parameters - * @param implicits Any implicit parameters passed to the unapply after the selector - * @param patterns The argument patterns in the pattern match. - * - * It is typed with same type as first `fun` argument - * Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows - * - * val result = fun(sel)(implicits) - * if (result.isDefined) "match patterns against result" - */ - case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with PatternTree[T] { - type ThisTree[+T <: Untyped] = UnApply[T] - def forwardTo = fun - } - - /** mods val name: tpt = rhs */ - case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T], ValOrTypeDef[T] { - type ThisTree[+T <: Untyped] = ValDef[T] - assert(isEmpty || (tpt ne genericEmptyTree)) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x - } - - /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ - case class DefDef[+T <: Untyped] private[ast] (name: TermName, - paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T] { - type ThisTree[+T <: Untyped] = DefDef[T] - assert(tpt ne genericEmptyTree) - def unforced: LazyTree[T] = preRhs - protected def force(x: Tree[T @uncheckedVariance]): Unit = preRhs = x - - def leadingTypeParams(using Context): List[TypeDef[T]] = paramss match - case (tparams @ (tparam: TypeDef[_]) :: _) :: _ => tparams.asInstanceOf[List[TypeDef[T]]] - case _ => Nil - - def trailingParamss(using Context): List[ParamClause[T]] = paramss match - case ((tparam: TypeDef[_]) :: _) :: paramss1 => paramss1 - case _ => paramss - - def termParamss(using Context): List[List[ValDef[T]]] = - (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) - .asInstanceOf[List[List[ValDef[T]]]] - } - - /** mods class name template or - * mods trait name template or - * mods type name = rhs or - * mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) or - * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods - */ - case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends MemberDef[T], ValOrTypeDef[T] { - type ThisTree[+T <: Untyped] = TypeDef[T] - - /** Is this a definition of a class? */ - def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] - - def isBackquoted: Boolean = hasAttachment(Backquoted) - } - - /** extends parents { self => body } - * @param parentsOrDerived A list of parents followed by a list of derived classes, - * if this is of class untpd.DerivingTemplate. - * Typed templates only have parents. - */ - case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], parentsOrDerived: List[Tree[T]], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) - extends DefTree[T] with WithLazyField[List[Tree[T]]] { - type ThisTree[+T <: Untyped] = Template[T] - def unforcedBody: LazyTreeList[T] = unforced - def unforced: LazyTreeList[T] = preBody - protected def force(x: List[Tree[T @uncheckedVariance]]): Unit = preBody = x - def body(using Context): List[Tree[T]] = forceIfLazy - - def parents: List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate - def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate - } - - - abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] { - type ThisTree[+T <: Untyped] <: ImportOrExport[T] - val expr: Tree[T] - val selectors: List[untpd.ImportSelector] - } - - /** import expr.selectors - * where a selector is either an untyped `Ident`, `name` or - * an untyped thicket consisting of `name` and `rename`. - */ - case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { - type ThisTree[+T <: Untyped] = Import[T] - } - - /** export expr.selectors - * where a selector is either an untyped `Ident`, `name` or - * an untyped thicket consisting of `name` and `rename`. - */ - case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { - type ThisTree[+T <: Untyped] = Export[T] - } - - /** package pid { stats } */ - case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with WithEndMarker[T] { - type ThisTree[+T <: Untyped] = PackageDef[T] - def forwardTo: RefTree[T] = pid - protected def srcName(using Context): Name = pid.name - } - - /** arg @annot */ - case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] { - type ThisTree[+T <: Untyped] = Annotated[T] - def forwardTo: Tree[T] = arg - } - - trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { - override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] - override def span: Span = NoSpan - override def span_=(span: Span): Unit = {} - } - - /** Temporary class that results from translation of ModuleDefs - * (and possibly other statements). - * The contained trees will be integrated when transformed with - * a `transform(List[Tree])` call. - */ - case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with WithoutTypeOrPos[T] { - myTpe = NoType.asInstanceOf[T] - type ThisTree[+T <: Untyped] = Thicket[T] - - def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { - val newTrees = trees.mapConserve(op) - if (trees eq newTrees) - this - else - Thicket[U](newTrees)(source).asInstanceOf[this.type] - } - - override def foreachInThicket(op: Tree[T] => Unit): Unit = - trees foreach (_.foreachInThicket(op)) - - override def isEmpty: Boolean = trees.isEmpty - override def toList: List[Tree[T]] = flatten(trees) - override def toString: String = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")" - override def span: Span = - def combine(s: Span, ts: List[Tree[T]]): Span = ts match - case t :: ts1 => combine(s.union(t.span), ts1) - case nil => s - combine(NoSpan, trees) - - override def withSpan(span: Span): this.type = - mapElems(_.withSpan(span)).asInstanceOf[this.type] - } - - class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { - // assert(uniqueId != 1492) - override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") - } - - class EmptyValDef[T <: Untyped] extends ValDef[T]( - nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { - myTpe = NoType.asInstanceOf[T] - setMods(untpd.Modifiers(PrivateLocal)) - override def isEmpty: Boolean = true - override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") - } - - @sharable val theEmptyTree = new EmptyTree[Type]() - @sharable val theEmptyValDef = new EmptyValDef[Type]() - - def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] - def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - - /** Tree that replaces a level 1 splices in pickled (level 0) quotes. - * It is only used when picking quotes (will never be in a TASTy file). - * - * @param isTermHole If this hole is a term, otherwise it is a type hole. - * @param idx The index of the hole in it's enclosing level 0 quote. - * @param args The arguments of the splice to compute its content - * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. - * @param tpt Type of the hole - */ - case class Hole[+T <: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { - type ThisTree[+T <: Untyped] <: Hole[T] - override def isTerm: Boolean = isTermHole - override def isType: Boolean = !isTermHole - } - - def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { - def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = - remaining match { - case Thicket(elems) :: remaining1 => - var buf1 = buf - if (buf1 == null) { - buf1 = new ListBuffer[Tree[T]] - var scanned = trees - while (scanned `ne` remaining) { - buf1 += scanned.head - scanned = scanned.tail - } - } - recur(recur(buf1, elems), remaining1) - case tree :: remaining1 => - if (buf != null) buf += tree - recur(buf, remaining1) - case nil => - buf - } - val buf = recur(null, trees) - if (buf != null) buf.toList else trees - } - - // ----- Lazy trees and tree sequences - - /** A tree that can have a lazy field - * The field is represented by some private `var` which is - * accessed by `unforced` and `force`. Forcing the field will - * set the `var` to the underlying value. - */ - trait WithLazyField[+T <: AnyRef] { - def unforced: T | Lazy[T] - protected def force(x: T @uncheckedVariance): Unit - def forceIfLazy(using Context): T = unforced match { - case lzy: Lazy[T @unchecked] => - val x = lzy.complete - force(x) - x - case x: T @ unchecked => x - } - } - - /** A base trait for lazy tree fields. - * These can be instantiated with Lazy instances which - * can delay tree construction until the field is first demanded. - */ - trait Lazy[+T <: AnyRef] { - def complete(using Context): T - } - - // ----- Generic Tree Instances, inherited from `tpt` and `untpd`. - - abstract class Instance[T <: Untyped] { inst => - - type Tree = Trees.Tree[T] - type TypTree = Trees.TypTree[T] - type TermTree = Trees.TermTree[T] - type PatternTree = Trees.PatternTree[T] - type DenotingTree = Trees.DenotingTree[T] - type ProxyTree = Trees.ProxyTree[T] - type NameTree = Trees.NameTree[T] - type RefTree = Trees.RefTree[T] - type DefTree = Trees.DefTree[T] - type NamedDefTree = Trees.NamedDefTree[T] - type MemberDef = Trees.MemberDef[T] - type ValOrDefDef = Trees.ValOrDefDef[T] - type ValOrTypeDef = Trees.ValOrTypeDef[T] - type LazyTree = Trees.LazyTree[T] - type LazyTreeList = Trees.LazyTreeList[T] - type ParamClause = Trees.ParamClause[T] - - type Ident = Trees.Ident[T] - type SearchFailureIdent = Trees.SearchFailureIdent[T] - type Select = Trees.Select[T] - type SelectWithSig = Trees.SelectWithSig[T] - type This = Trees.This[T] - type Super = Trees.Super[T] - type Apply = Trees.Apply[T] - type TypeApply = Trees.TypeApply[T] - type GenericApply = Trees.GenericApply[T] - type Literal = Trees.Literal[T] - type New = Trees.New[T] - type Typed = Trees.Typed[T] - type NamedArg = Trees.NamedArg[T] - type Assign = Trees.Assign[T] - type Block = Trees.Block[T] - type If = Trees.If[T] - type InlineIf = Trees.InlineIf[T] - type Closure = Trees.Closure[T] - type Match = Trees.Match[T] - type InlineMatch = Trees.InlineMatch[T] - type CaseDef = Trees.CaseDef[T] - type Labeled = Trees.Labeled[T] - type Return = Trees.Return[T] - type WhileDo = Trees.WhileDo[T] - type Try = Trees.Try[T] - type SeqLiteral = Trees.SeqLiteral[T] - type JavaSeqLiteral = Trees.JavaSeqLiteral[T] - type Inlined = Trees.Inlined[T] - type TypeTree = Trees.TypeTree[T] - type InferredTypeTree = Trees.InferredTypeTree[T] - type SingletonTypeTree = Trees.SingletonTypeTree[T] - type RefinedTypeTree = Trees.RefinedTypeTree[T] - type AppliedTypeTree = Trees.AppliedTypeTree[T] - type LambdaTypeTree = Trees.LambdaTypeTree[T] - type TermLambdaTypeTree = Trees.TermLambdaTypeTree[T] - type MatchTypeTree = Trees.MatchTypeTree[T] - type ByNameTypeTree = Trees.ByNameTypeTree[T] - type TypeBoundsTree = Trees.TypeBoundsTree[T] - type Bind = Trees.Bind[T] - type Alternative = Trees.Alternative[T] - type UnApply = Trees.UnApply[T] - type ValDef = Trees.ValDef[T] - type DefDef = Trees.DefDef[T] - type TypeDef = Trees.TypeDef[T] - type Template = Trees.Template[T] - type Import = Trees.Import[T] - type Export = Trees.Export[T] - type ImportOrExport = Trees.ImportOrExport[T] - type PackageDef = Trees.PackageDef[T] - type Annotated = Trees.Annotated[T] - type Thicket = Trees.Thicket[T] - - type Hole = Trees.Hole[T] - - @sharable val EmptyTree: Thicket = genericEmptyTree - @sharable val EmptyValDef: ValDef = genericEmptyValDef - @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure - - // ----- Auxiliary creation methods ------------------ - - def Thicket(): Thicket = EmptyTree - def Thicket(x1: Tree, x2: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: Nil) - def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) - def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) - - def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { - case x :: Nil => x - case ys => Thicket(ys) - } - - // ----- Helper classes for copying, transforming, accumulating ----------------- - - val cpy: TreeCopier - - /** A class for copying trees. The copy methods avoid creating a new tree - * If all arguments stay the same. - * - * Note: Some of the copy methods take a context. - * These are exactly those methods that are overridden in TypedTreeCopier - * so that they selectively retype themselves. Retyping needs a context. - */ - abstract class TreeCopier { - protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] - protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] - - /** Soucre of the copied tree */ - protected def sourceFile(tree: Tree): SourceFile = tree.source - - protected def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] = - Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") - postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) - - protected def finalize(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] = - Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") - postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) - - def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { - case tree: Ident if name == tree.name => tree - case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) - } - def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { - case tree: SelectWithSig => - if ((qualifier eq tree.qualifier) && (name == tree.name)) tree - else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) - case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree - case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) - } - /** Copy Ident or Select trees */ - def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { - case Ident(_) => Ident(tree)(name) - case Select(qual, _) => Select(tree)(qual, name) - } - def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { - case tree: This if (qual eq tree.qual) => tree - case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) - } - def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { - case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree - case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) - } - def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { - case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) - //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") - } - def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { - case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) - } - def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { - case tree: Literal if const == tree.const => tree - case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) - } - def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { - case tree: New if (tpt eq tree.tpt) => tree - case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) - } - def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { - case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree - case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) - } - def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { - case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree - case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) - } - def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { - case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) - } - def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { - case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) - } - def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { - case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree - case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) - case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) - } - def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { - case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree - case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) - } - def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { - case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree - case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) - case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) - } - def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { - case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) - } - def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { - case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) - } - def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { - case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree - case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) - } - def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { - case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) - } - def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { - case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree - case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) - } - def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { - case tree: JavaSeqLiteral => - if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree - else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) - case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree - case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) - } - def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { - case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree - case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) - } - def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { - case tree: SingletonTypeTree if (ref eq tree.ref) => tree - case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) - } - def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { - case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree - case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) - } - def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { - case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree - case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) - } - def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { - case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) - } - def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { - case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) - } - def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { - case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree - case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) - } - def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { - case tree: ByNameTypeTree if (result eq tree.result) => tree - case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) - } - def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { - case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree - case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) - } - def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { - case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) - } - def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { - case tree: Alternative if (trees eq tree.trees) => tree - case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) - } - def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { - case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree - case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) - } - def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { - case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree - case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) - } - def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { - case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree - case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) - } - def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { - case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) - } - def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { - case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree - case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) - } - def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { - case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree - case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) - } - def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { - case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree - case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) - } - def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { - case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree - case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) - } - def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { - case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree - case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) - } - def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { - case tree: Thicket if (trees eq tree.trees) => tree - case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) - } - def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = tree match { - case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree - case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content, tpt)(sourceFile(tree))) - } - - // Copier methods with default arguments; these demand that the original tree - // is of the same class as the copy. We only include trees with more than 2 elements here. - def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = - If(tree: Tree)(cond, thenp, elsep) - def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = - CaseDef(tree: Tree)(pat, guard, body) - def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = - Try(tree: Tree)(expr, cases, finalizer) - def UnApply(tree: UnApply)(fun: Tree = tree.fun, implicits: List[Tree] = tree.implicits, patterns: List[Tree] = tree.patterns)(using Context): UnApply = - UnApply(tree: Tree)(fun, implicits, patterns) - def ValDef(tree: ValDef)(name: TermName = tree.name, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): ValDef = - ValDef(tree: Tree)(name, tpt, rhs) - def DefDef(tree: DefDef)(name: TermName = tree.name, paramss: List[ParamClause] = tree.paramss, tpt: Tree = tree.tpt, rhs: LazyTree = tree.unforcedRhs)(using Context): DefDef = - DefDef(tree: Tree)(name, paramss, tpt, rhs) - def TypeDef(tree: TypeDef)(name: TypeName = tree.name, rhs: Tree = tree.rhs)(using Context): TypeDef = - TypeDef(tree: Tree)(name, rhs) - def Template(tree: Template)(constr: DefDef = tree.constr, parents: List[Tree] = tree.parents, derived: List[untpd.Tree] = tree.derived, self: ValDef = tree.self, body: LazyTreeList = tree.unforcedBody)(using Context): Template = - Template(tree: Tree)(constr, parents, derived, self, body) - def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content, tpt: Tree = tree.tpt)(using Context): Hole = - Hole(tree: Tree)(isTerm, idx, args, content, tpt) - - } - - /** Hook to indicate that a transform of some subtree should be skipped */ - protected def skipTransform(tree: Tree)(using Context): Boolean = false - - /** For untyped trees, this is just the identity. - * For typed trees, a context derived form `ctx` that records `call` as the - * innermost enclosing call for which the inlined version is currently - * processed. - */ - protected def inlineContext(call: tpd.Tree)(using Context): Context = ctx - - /** The context to use when mapping or accumulating over a tree */ - def localCtx(tree: Tree)(using Context): Context - - /** The context to use when transforming a tree. - * It ensures that the source is correct, and that the local context is used if - * that's necessary for transforming the whole tree. - * TODO: ensure transform is always called with the correct context as argument - * @see https://github.com/lampepfl/dotty/pull/13880#discussion_r836395977 - */ - def transformCtx(tree: Tree)(using Context): Context = - val sourced = - if tree.source.exists && tree.source != ctx.source - then ctx.withSource(tree.source) - else ctx - tree match - case t: (MemberDef | PackageDef | LambdaTypeTree | TermLambdaTypeTree) => - localCtx(t)(using sourced) - case _ => - sourced - - abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self: TreeMap @retains(caps.cap) => - def transform(tree: Tree)(using Context): Tree = { - inContext(transformCtx(tree)) { - Stats.record(s"TreeMap.transform/$getClass") - if (skipTransform(tree)) tree - else tree match { - case Ident(name) => - tree - case Select(qualifier, name) => - cpy.Select(tree)(transform(qualifier), name) - case This(qual) => - tree - case Super(qual, mix) => - cpy.Super(tree)(transform(qual), mix) - case Apply(fun, args) => - cpy.Apply(tree)(transform(fun), transform(args)) - case TypeApply(fun, args) => - cpy.TypeApply(tree)(transform(fun), transform(args)) - case Literal(const) => - tree - case New(tpt) => - cpy.New(tree)(transform(tpt)) - case Typed(expr, tpt) => - cpy.Typed(tree)(transform(expr), transform(tpt)) - case NamedArg(name, arg) => - cpy.NamedArg(tree)(name, transform(arg)) - case Assign(lhs, rhs) => - cpy.Assign(tree)(transform(lhs), transform(rhs)) - case blk: Block => - transformBlock(blk) - case If(cond, thenp, elsep) => - cpy.If(tree)(transform(cond), transform(thenp), transform(elsep)) - case Closure(env, meth, tpt) => - cpy.Closure(tree)(transform(env), transform(meth), transform(tpt)) - case Match(selector, cases) => - cpy.Match(tree)(transform(selector), transformSub(cases)) - case CaseDef(pat, guard, body) => - cpy.CaseDef(tree)(transform(pat), transform(guard), transform(body)) - case Labeled(bind, expr) => - cpy.Labeled(tree)(transformSub(bind), transform(expr)) - case Return(expr, from) => - cpy.Return(tree)(transform(expr), transformSub(from)) - case WhileDo(cond, body) => - cpy.WhileDo(tree)(transform(cond), transform(body)) - case Try(block, cases, finalizer) => - cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer)) - case SeqLiteral(elems, elemtpt) => - cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt)) - case Inlined(call, bindings, expansion) => - cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion)(using inlineContext(call))) - case TypeTree() => - tree - case SingletonTypeTree(ref) => - cpy.SingletonTypeTree(tree)(transform(ref)) - case RefinedTypeTree(tpt, refinements) => - cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements)) - case AppliedTypeTree(tpt, args) => - cpy.AppliedTypeTree(tree)(transform(tpt), transform(args)) - case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) - case TermLambdaTypeTree(params, body) => - cpy.TermLambdaTypeTree(tree)(transformSub(params), transform(body)) - case MatchTypeTree(bound, selector, cases) => - cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) - case ByNameTypeTree(result) => - cpy.ByNameTypeTree(tree)(transform(result)) - case TypeBoundsTree(lo, hi, alias) => - cpy.TypeBoundsTree(tree)(transform(lo), transform(hi), transform(alias)) - case Bind(name, body) => - cpy.Bind(tree)(name, transform(body)) - case Alternative(trees) => - cpy.Alternative(tree)(transform(trees)) - case UnApply(fun, implicits, patterns) => - cpy.UnApply(tree)(transform(fun), transform(implicits), transform(patterns)) - case EmptyValDef => - tree - case tree @ ValDef(name, tpt, _) => - val tpt1 = transform(tpt) - val rhs1 = transform(tree.rhs) - cpy.ValDef(tree)(name, tpt1, rhs1) - case tree @ DefDef(name, paramss, tpt, _) => - cpy.DefDef(tree)(name, transformParamss(paramss), transform(tpt), transform(tree.rhs)) - case tree @ TypeDef(name, rhs) => - cpy.TypeDef(tree)(name, transform(rhs)) - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - cpy.Template(tree)(transformSub(constr), transform(tree.parents), Nil, transformSub(self), transformStats(tree.body, tree.symbol)) - case Import(expr, selectors) => - cpy.Import(tree)(transform(expr), selectors) - case Export(expr, selectors) => - cpy.Export(tree)(transform(expr), selectors) - case PackageDef(pid, stats) => - cpy.PackageDef(tree)(transformSub(pid), transformStats(stats, ctx.owner)) - case Annotated(arg, annot) => - cpy.Annotated(tree)(transform(arg), transform(annot)) - case Thicket(trees) => - val trees1 = transform(trees) - if (trees1 eq trees) tree else Thicket(trees1) - case tree @ Hole(_, _, args, content, tpt) => - cpy.Hole(tree)(args = transform(args), content = transform(content), tpt = transform(tpt)) - case _ => - transformMoreCases(tree) - } - } - } - - def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = - transform(trees) - def transformBlock(blk: Block)(using Context): Block = - cpy.Block(blk)(transformStats(blk.stats, ctx.owner), transform(blk.expr)) - def transform(trees: List[Tree])(using Context): List[Tree] = - flatten(trees mapConserve (transform(_))) - def transformSub[Tr <: Tree](tree: Tr)(using Context): Tr = - transform(tree).asInstanceOf[Tr] - def transformSub[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = - transform(trees).asInstanceOf[List[Tr]] - def transformParams(params: ParamClause)(using Context): ParamClause = - transform(params).asInstanceOf[ParamClause] - def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = - paramss.mapConserve(transformParams) - - protected def transformMoreCases(tree: Tree)(using Context): Tree = { - assert(ctx.reporter.errorsReported) - tree - } - } - - abstract class TreeAccumulator[X] { self: TreeAccumulator[X] @retains(caps.cap) => - // Ties the knot of the traversal: call `foldOver(x, tree))` to dive in the `tree` node. - def apply(x: X, tree: Tree)(using Context): X - - def apply(x: X, trees: List[Tree])(using Context): X = - def fold(x: X, trees: List[Tree]): X = trees match - case tree :: rest => fold(apply(x, tree), rest) - case Nil => x - fold(x, trees) - - def foldOver(x: X, tree: Tree)(using Context): X = - if (tree.source != ctx.source && tree.source.exists) - foldOver(x, tree)(using ctx.withSource(tree.source)) - else { - Stats.record(s"TreeAccumulator.foldOver/$getClass") - tree match { - case Ident(name) => - x - case Select(qualifier, name) => - this(x, qualifier) - case This(qual) => - x - case Super(qual, mix) => - this(x, qual) - case Apply(fun, args) => - this(this(x, fun), args) - case TypeApply(fun, args) => - this(this(x, fun), args) - case Literal(const) => - x - case New(tpt) => - this(x, tpt) - case Typed(expr, tpt) => - this(this(x, expr), tpt) - case NamedArg(name, arg) => - this(x, arg) - case Assign(lhs, rhs) => - this(this(x, lhs), rhs) - case Block(stats, expr) => - this(this(x, stats), expr) - case If(cond, thenp, elsep) => - this(this(this(x, cond), thenp), elsep) - case Closure(env, meth, tpt) => - this(this(this(x, env), meth), tpt) - case Match(selector, cases) => - this(this(x, selector), cases) - case CaseDef(pat, guard, body) => - this(this(this(x, pat), guard), body) - case Labeled(bind, expr) => - this(this(x, bind), expr) - case Return(expr, from) => - this(this(x, expr), from) - case WhileDo(cond, body) => - this(this(x, cond), body) - case Try(block, handler, finalizer) => - this(this(this(x, block), handler), finalizer) - case SeqLiteral(elems, elemtpt) => - this(this(x, elems), elemtpt) - case Inlined(call, bindings, expansion) => - this(this(x, bindings), expansion)(using inlineContext(call)) - case TypeTree() => - x - case SingletonTypeTree(ref) => - this(x, ref) - case RefinedTypeTree(tpt, refinements) => - this(this(x, tpt), refinements) - case AppliedTypeTree(tpt, args) => - this(this(x, tpt), args) - case LambdaTypeTree(tparams, body) => - inContext(localCtx(tree)) { - this(this(x, tparams), body) - } - case TermLambdaTypeTree(params, body) => - inContext(localCtx(tree)) { - this(this(x, params), body) - } - case MatchTypeTree(bound, selector, cases) => - this(this(this(x, bound), selector), cases) - case ByNameTypeTree(result) => - this(x, result) - case TypeBoundsTree(lo, hi, alias) => - this(this(this(x, lo), hi), alias) - case Bind(name, body) => - this(x, body) - case Alternative(trees) => - this(x, trees) - case UnApply(fun, implicits, patterns) => - this(this(this(x, fun), implicits), patterns) - case tree @ ValDef(_, tpt, _) => - inContext(localCtx(tree)) { - this(this(x, tpt), tree.rhs) - } - case tree @ DefDef(_, paramss, tpt, _) => - inContext(localCtx(tree)) { - this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) - } - case TypeDef(_, rhs) => - inContext(localCtx(tree)) { - this(x, rhs) - } - case tree @ Template(constr, parents, self, _) if tree.derived.isEmpty => - this(this(this(this(x, constr), parents), self), tree.body) - case Import(expr, _) => - this(x, expr) - case Export(expr, _) => - this(x, expr) - case PackageDef(pid, stats) => - this(this(x, pid), stats)(using localCtx(tree)) - case Annotated(arg, annot) => - this(this(x, arg), annot) - case Thicket(ts) => - this(x, ts) - case Hole(_, _, args, content, tpt) => - this(this(this(x, args), content), tpt) - case _ => - foldMoreCases(x, tree) - } - } - - def foldMoreCases(x: X, tree: Tree)(using Context): X = { - assert(ctx.reporter.hasUnreportedErrors - || ctx.reporter.errorsReported - || ctx.mode.is(Mode.Interactive), tree) - // In interactive mode, errors might come from previous runs. - // In case of errors it may be that typed trees point to untyped ones. - // The IDE can still traverse inside such trees, either in the run where errors - // are reported, or in subsequent ones. - x - } - } - - abstract class TreeTraverser extends TreeAccumulator[Unit] { - def traverse(tree: Tree)(using Context): Unit - def traverse(trees: List[Tree])(using Context) = apply((), trees) - def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) - protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit - * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. - */ - class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = { - val x1 = f(x, tree) - if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 - else foldOver(x1, tree) - } - } - - def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { - tree match { - case tree: Ident => cpy.Ident(tree)(newName) - case tree: Select => cpy.Select(tree)(tree.qualifier, newName) - case tree: Bind => cpy.Bind(tree)(newName, tree.body) - case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) - case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) - case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) - } - }.asInstanceOf[tree.ThisTree[T]] - - object TypeDefs: - def unapply(xs: List[Tree]): Option[List[TypeDef]] = xs match - case (x: TypeDef) :: _ => Some(xs.asInstanceOf[List[TypeDef]]) - case _ => None - - object ValDefs: - def unapply(xs: List[Tree]): Option[List[ValDef]] = xs match - case Nil => Some(Nil) - case (x: ValDef) :: _ => Some(xs.asInstanceOf[List[ValDef]]) - case _ => None - - def termParamssIn(paramss: List[ParamClause]): List[List[ValDef]] = paramss match - case ValDefs(vparams) :: paramss1 => - val paramss2 = termParamssIn(paramss1) - if paramss2 eq paramss1 then paramss.asInstanceOf[List[List[ValDef]]] - else vparams :: paramss2 - case _ :: paramss1 => - termParamssIn(paramss1) - case nil => - Nil - - /** If `tparams` is non-empty, add it to the left `paramss`, merging - * it with a leading type parameter list of `paramss`, if one exists. - */ - def joinParams(tparams: List[TypeDef], paramss: List[ParamClause]): List[ParamClause] = - if tparams.isEmpty then paramss - else paramss match - case TypeDefs(tparams1) :: paramss1 => (tparams ++ tparams1) :: paramss1 - case _ => tparams :: paramss - - def isTermOnly(paramss: List[ParamClause]): Boolean = paramss match - case Nil => true - case params :: paramss1 => - params match - case (param: untpd.TypeDef) :: _ => false - case _ => isTermOnly(paramss1) - - def asTermOnly(paramss: List[ParamClause]): List[List[ValDef]] = - assert(isTermOnly(paramss)) - paramss.asInstanceOf[List[List[ValDef]]] - - /** Delegate to FunProto or FunProtoTyped depending on whether the prefix is `untpd` or `tpd`. */ - protected def FunProto(args: List[Tree], resType: Type)(using Context): ProtoTypes.FunProto - - /** Construct the application `$receiver.$method[$targs]($args)` using overloading resolution - * to find a matching overload of `$method` if necessary. - * This is useful when overloading resolution needs to be performed in a phase after typer. - * Note that this will not perform any kind of implicit search. - * - * @param expectedType An expected type of the application used to guide overloading resolution - */ - def applyOverloaded( - receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], - expectedType: Type)(using parentCtx: Context): tpd.Tree = { - given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) - import dotty.tools.dotc.ast.tpd.TreeOps - - val typer = ctx.typer - val proto = FunProto(args, expectedType) - val denot = receiver.tpe.member(method) - if !denot.exists then - overload.println(i"members = ${receiver.tpe.decls}") - report.error(em"no member $receiver . $method", receiver.srcPos) - val selected = - if (denot.isOverloaded) { - def typeParamCount(tp: Type) = tp.widen match { - case tp: PolyType => tp.paramInfos.length - case _ => 0 - } - val allAlts = denot.alternatives - .map(denot => TermRef(receiver.tpe, denot.symbol)) - .filter(tr => typeParamCount(tr) == targs.length) - .filter { _.widen match { - case MethodTpe(_, _, x: MethodType) => !x.isImplicitMethod - case _ => true - }} - val alternatives = ctx.typer.resolveOverloaded(allAlts, proto) - assert(alternatives.size == 1, - i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " + - i"$method on ${receiver.tpe.widenDealiasKeepAnnots} with targs: $targs%, %; args: $args%, %; expectedType: $expectedType." + - i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + - i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it - alternatives.head - } - else TermRef(receiver.tpe, denot.symbol) - val fun = receiver.select(selected).appliedToTypes(targs) - - val apply = untpd.Apply(fun, args) - typer.ApplyTo(apply, fun, selected, proto, expectedType) - } - - - def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { - val targs = atp.argTypes - withoutMode(Mode.PatternOrTypeBits) { - applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala b/tests/pos-with-compiler-cc/dotc/ast/tpd.scala deleted file mode 100644 index f778824a18d3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/tpd.scala +++ /dev/null @@ -1,1546 +0,0 @@ -package dotty.tools -package dotc -package ast - -import dotty.tools.dotc.transform.{ExplicitOuter, Erasure} -import typer.ProtoTypes -import transform.SymUtils._ -import transform.TypeUtils._ -import core._ -import Scopes.newScope -import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ -import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ -import Decorators._, DenotTransformers._ -import collection.{immutable, mutable} -import util.{Property, SourceFile} -import NameKinds.{TempResultName, OuterSelectName} -import typer.ConstFold - -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer -import language.experimental.pureFunctions - -/** Some creators for typed trees */ -object tpd extends Trees.Instance[Type] with TypedTreeInfo { - - private def ta(using Context) = ctx.typeAssigner - - def Ident(tp: NamedType)(using Context): Ident = - ta.assignType(untpd.Ident(tp.name), tp) - - def Select(qualifier: Tree, name: Name)(using Context): Select = - ta.assignType(untpd.Select(qualifier, name), qualifier) - - def Select(qualifier: Tree, tp: NamedType)(using Context): Select = - untpd.Select(qualifier, tp.name).withType(tp) - - def This(cls: ClassSymbol)(using Context): This = - untpd.This(untpd.Ident(cls.name)).withType(cls.thisType) - - def Super(qual: Tree, mix: untpd.Ident, mixinClass: Symbol)(using Context): Super = - ta.assignType(untpd.Super(qual, mix), qual, mixinClass) - - def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = - Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) - - def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match - case Block(Nil, expr) => - Apply(expr, args) - case _: RefTree | _: GenericApply | _: Inlined | _: Hole => - ta.assignType(untpd.Apply(fn, args), fn, args) - - def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match - case Block(Nil, expr) => - TypeApply(expr, args) - case _: RefTree | _: GenericApply => - ta.assignType(untpd.TypeApply(fn, args), fn, args) - - def Literal(const: Constant)(using Context): Literal = - ta.assignType(untpd.Literal(const)) - - def unitLiteral(using Context): Literal = - Literal(Constant(())) - - def nullLiteral(using Context): Literal = - Literal(Constant(null)) - - def New(tpt: Tree)(using Context): New = - ta.assignType(untpd.New(tpt), tpt) - - def New(tp: Type)(using Context): New = New(TypeTree(tp)) - - def Typed(expr: Tree, tpt: Tree)(using Context): Typed = - ta.assignType(untpd.Typed(expr, tpt), tpt) - - def NamedArg(name: Name, arg: Tree)(using Context): NamedArg = - ta.assignType(untpd.NamedArg(name, arg), arg) - - def Assign(lhs: Tree, rhs: Tree)(using Context): Assign = - ta.assignType(untpd.Assign(lhs, rhs)) - - def Block(stats: List[Tree], expr: Tree)(using Context): Block = - ta.assignType(untpd.Block(stats, expr), stats, expr) - - /** Join `stats` in front of `expr` creating a new block if necessary */ - def seq(stats: List[Tree], expr: Tree)(using Context): Tree = - if (stats.isEmpty) expr - else expr match { - case Block(_, _: Closure) => - Block(stats, expr) // leave closures in their own block - case Block(estats, eexpr) => - cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) - case _ => - Block(stats, expr) - } - - def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = - ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) - - def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = - ta.assignType(untpd.InlineIf(cond, thenp, elsep), thenp, elsep) - - def Closure(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = - ta.assignType(untpd.Closure(env, meth, tpt), meth, tpt) - - /** A function def - * - * vparams => expr - * - * gets expanded to - * - * { def $anonfun(vparams) = expr; Closure($anonfun) } - * - * where the closure's type is the target type of the expression (FunctionN, unless - * otherwise specified). - */ - def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { - val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree - val call = - if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) - else TypeApply(Ident(TermRef(NoPrefix, meth)), targs) - Block( - DefDef(meth, rhsFn) :: Nil, - Closure(Nil, call, targetTpt)) - } - - /** A closure whose anonymous function has the given method type */ - def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { - val meth = newAnonFun(ctx.owner, tpe) - Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) - } - - def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = - ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) - - def Match(selector: Tree, cases: List[CaseDef])(using Context): Match = - ta.assignType(untpd.Match(selector, cases), selector, cases) - - def InlineMatch(selector: Tree, cases: List[CaseDef])(using Context): Match = - ta.assignType(untpd.InlineMatch(selector, cases), selector, cases) - - def Labeled(bind: Bind, expr: Tree)(using Context): Labeled = - ta.assignType(untpd.Labeled(bind, expr)) - - def Labeled(sym: TermSymbol, expr: Tree)(using Context): Labeled = - Labeled(Bind(sym, EmptyTree), expr) - - def Return(expr: Tree, from: Tree)(using Context): Return = - ta.assignType(untpd.Return(expr, from)) - - def Return(expr: Tree, from: Symbol)(using Context): Return = - Return(expr, Ident(from.termRef)) - - def WhileDo(cond: Tree, body: Tree)(using Context): WhileDo = - ta.assignType(untpd.WhileDo(cond, body)) - - def Try(block: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = - ta.assignType(untpd.Try(block, cases, finalizer), block, cases) - - def SeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = - ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt) - - def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(using Context): JavaSeqLiteral = - ta.assignType(untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral] - - def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = - ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion) - - def TypeTree(tp: Type, inferred: Boolean = false)(using Context): TypeTree = - (if inferred then untpd.InferredTypeTree() else untpd.TypeTree()).withType(tp) - - def SingletonTypeTree(ref: Tree)(using Context): SingletonTypeTree = - ta.assignType(untpd.SingletonTypeTree(ref), ref) - - def RefinedTypeTree(parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): Tree = - ta.assignType(untpd.RefinedTypeTree(parent, refinements), parent, refinements, refineCls) - - def AppliedTypeTree(tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = - ta.assignType(untpd.AppliedTypeTree(tycon, args), tycon, args) - - def ByNameTypeTree(result: Tree)(using Context): ByNameTypeTree = - ta.assignType(untpd.ByNameTypeTree(result), result) - - def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = - ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) - - def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = - ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) - - def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = - ta.assignType(untpd.TypeBoundsTree(lo, hi, alias), lo, hi, alias) - - def Bind(sym: Symbol, body: Tree)(using Context): Bind = - ta.assignType(untpd.Bind(sym.name, body), sym) - - /** A pattern corresponding to `sym: tpe` */ - def BindTyped(sym: TermSymbol, tpe: Type)(using Context): Bind = - Bind(sym, Typed(Underscore(tpe), TypeTree(tpe))) - - def Alternative(trees: List[Tree])(using Context): Alternative = - ta.assignType(untpd.Alternative(trees), trees) - - def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { - assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) - ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) - } - - def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = - ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) - - def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = - ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) - - def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], - resultType: Type, rhs: Tree)(using Context): DefDef = - sym.setParamss(paramss) - ta.assignType( - untpd.DefDef( - sym.name, - paramss.map { - case TypeSymbols(params) => params.map(param => TypeDef(param).withSpan(param.span)) - case TermSymbols(params) => params.map(param => ValDef(param).withSpan(param.span)) - case _ => unreachable() - }, - TypeTree(resultType), - rhs), - sym) - - def DefDef(sym: TermSymbol, rhs: Tree = EmptyTree)(using Context): DefDef = - ta.assignType(DefDef(sym, Function.const(rhs) _), sym) - - /** A DefDef with given method symbol `sym`. - * @rhsFn A function from parameter references - * to the method's right-hand side. - * Parameter symbols are taken from the `rawParamss` field of `sym`, or - * are freshly generated if `rawParamss` is empty. - */ - def DefDef(sym: TermSymbol, rhsFn: List[List[Tree]] => Tree)(using Context): DefDef = - - // Map method type `tp` with remaining parameters stored in rawParamss to - // final result type and all (given or synthesized) parameters - def recur(tp: Type, remaining: List[List[Symbol]]): (Type, List[List[Symbol]]) = tp match - case tp: PolyType => - val (tparams: List[TypeSymbol], remaining1) = remaining match - case tparams :: remaining1 => - assert(tparams.hasSameLengthAs(tp.paramNames) && tparams.head.isType) - (tparams.asInstanceOf[List[TypeSymbol]], remaining1) - case nil => - (newTypeParams(sym, tp.paramNames, EmptyFlags, tp.instantiateParamInfos(_)), Nil) - val (rtp, paramss) = recur(tp.instantiate(tparams.map(_.typeRef)), remaining1) - (rtp, tparams :: paramss) - case tp: MethodType => - val isParamDependent = tp.isParamDependent - val previousParamRefs: ListBuffer[TermRef] = - // It is ok to assign `null` here. - // If `isParamDependent == false`, the value of `previousParamRefs` is not used. - if isParamDependent then mutable.ListBuffer[TermRef]() else (null: ListBuffer[TermRef] | Null).uncheckedNN - - def valueParam(name: TermName, origInfo: Type): TermSymbol = - val maybeImplicit = - if tp.isContextualMethod then Given - else if tp.isImplicitMethod then Implicit - else EmptyFlags - val maybeErased = if tp.isErasedMethod then Erased else EmptyFlags - - def makeSym(info: Type) = newSymbol(sym, name, TermParam | maybeImplicit | maybeErased, info, coord = sym.coord) - - if isParamDependent then - val sym = makeSym(origInfo.substParams(tp, previousParamRefs.toList)) - previousParamRefs += sym.termRef - sym - else makeSym(origInfo) - end valueParam - - val (vparams: List[TermSymbol], remaining1) = - if tp.paramNames.isEmpty then (Nil, remaining) - else remaining match - case vparams :: remaining1 => - assert(vparams.hasSameLengthAs(tp.paramNames) && vparams.head.isTerm) - (vparams.asInstanceOf[List[TermSymbol]], remaining1) - case nil => - (tp.paramNames.lazyZip(tp.paramInfos).map(valueParam), Nil) - val (rtp, paramss) = recur(tp.instantiate(vparams.map(_.termRef)), remaining1) - (rtp, vparams :: paramss) - case _ => - assert(remaining.isEmpty) - (tp.widenExpr, Nil) - end recur - - val (rtp, paramss) = recur(sym.info, sym.rawParamss) - DefDef(sym, paramss, rtp, rhsFn(paramss.nestedMap(ref))) - end DefDef - - def TypeDef(sym: TypeSymbol)(using Context): TypeDef = - ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) - - def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { - val firstParent :: otherParents = cls.info.parents: @unchecked - val superRef = - if (cls.is(Trait)) TypeTree(firstParent) - else { - def isApplicable(ctpe: Type): Boolean = ctpe match { - case ctpe: PolyType => - isApplicable(ctpe.instantiate(firstParent.argTypes)) - case ctpe: MethodType => - (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) - case _ => - false - } - val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) - New(firstParent, constr.symbol.asTerm, superArgs) - } - ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } - - def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { - val selfType = - if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) - else EmptyValDef - def isOwnTypeParam(stat: Tree) = - stat.symbol.is(TypeParam) && stat.symbol.owner == cls - val bodyTypeParams = body filter isOwnTypeParam map (_.symbol) - val newTypeParams = - for (tparam <- cls.typeParams if !(bodyTypeParams contains tparam)) - yield TypeDef(tparam) - val findLocalDummy = FindLocalDummyAccumulator(cls) - val localDummy = body.foldLeft(NoSymbol: Symbol)(findLocalDummy.apply) - .orElse(newLocalDummy(cls)) - val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) - .withType(localDummy.termRef) - ta.assignType(untpd.TypeDef(cls.name, impl), cls) - } - - /** An anonymous class - * - * new parents { forwarders } - * - * where `forwarders` contains forwarders for all functions in `fns`. - * @param parents a non-empty list of class types - * @param fns a non-empty of functions for which forwarders should be defined in the class. - * The class has the same owner as the first function in `fns`. - * Its position is the union of all functions in `fns`. - */ - def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { - AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => - def forwarder(fn: TermSymbol, name: TermName) = { - val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm - for overridden <- fwdMeth.allOverriddenSymbols do - if overridden.is(Extension) then fwdMeth.setFlag(Extension) - if !overridden.is(Deferred) then fwdMeth.setFlag(Override) - DefDef(fwdMeth, ref(fn).appliedToArgss(_)) - } - fns.lazyZip(methNames).map(forwarder) - } - } - - /** An anonymous class - * - * new parents { body } - * - * with the specified owner and position. - */ - def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = - val parents1 = - if (parents.head.classSymbol.is(Trait)) { - val head = parents.head.parents.head - if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents - } - else parents - val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) - val constr = newConstructor(cls, Synthetic, Nil, Nil).entered - val cdef = ClassDef(cls, DefDef(constr), body(cls)) - Block(cdef :: Nil, New(cls.typeRef, Nil)) - - def Import(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = - ta.assignType(untpd.Import(expr, selectors), newImportSymbol(ctx.owner, expr)) - - def Export(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = - ta.assignType(untpd.Export(expr, selectors)) - - def PackageDef(pid: RefTree, stats: List[Tree])(using Context): PackageDef = - ta.assignType(untpd.PackageDef(pid, stats), pid) - - def Annotated(arg: Tree, annot: Tree)(using Context): Annotated = - ta.assignType(untpd.Annotated(arg, annot), arg, annot) - - def Throw(expr: Tree)(using Context): Tree = - ref(defn.throwMethod).appliedTo(expr) - - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(using Context): Hole = - ta.assignType(untpd.Hole(isTermHole, idx, args, content, tpt), tpt) - - // ------ Making references ------------------------------------------------------ - - def prefixIsElidable(tp: NamedType)(using Context): Boolean = { - val typeIsElidable = tp.prefix match { - case pre: ThisType => - tp.isType || - pre.cls.isStaticOwner || - tp.symbol.isParamOrAccessor && !pre.cls.is(Trait) && ctx.owner.enclosingClass == pre.cls - // was ctx.owner.enclosingClass.derivesFrom(pre.cls) which was not tight enough - // and was spuriously triggered in case inner class would inherit from outer one - // eg anonymous TypeMap inside TypeMap.andThen - case pre: TermRef => - pre.symbol.is(Module) && pre.symbol.isStatic - case pre => - pre `eq` NoPrefix - } - typeIsElidable || - tp.symbol.is(JavaStatic) || - tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) - } - - def needsSelect(tp: Type)(using Context): Boolean = tp match { - case tp: TermRef => !prefixIsElidable(tp) - case _ => false - } - - /** A tree representing the same reference as the given type */ - def ref(tp: NamedType, needLoad: Boolean = true)(using Context): Tree = - if (tp.isType) TypeTree(tp) - else if (prefixIsElidable(tp)) Ident(tp) - else if (tp.symbol.is(Module) && ctx.owner.isContainedIn(tp.symbol.moduleClass)) - followOuterLinks(This(tp.symbol.moduleClass.asClass)) - else if (tp.symbol hasAnnotation defn.ScalaStaticAnnot) - Ident(tp) - else - val pre = tp.prefix - if (pre.isSingleton) followOuterLinks(singleton(pre.dealias, needLoad)).select(tp) - else - val res = Select(TypeTree(pre), tp) - if needLoad && !res.symbol.isStatic then - throw TypeError(em"cannot establish a reference to $res") - res - - def ref(sym: Symbol)(using Context): Tree = - ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) - - private def followOuterLinks(t: Tree)(using Context) = t match { - case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => - // after erasure outer paths should be respected - ExplicitOuter.OuterOps(ctx.detach).path(toCls = t.tpe.classSymbol) - case t => - t - } - - def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { - case tp: TermRef => ref(tp, needLoad) - case tp: ThisType => This(tp.cls) - case tp: SkolemType => singleton(tp.narrow, needLoad) - case SuperType(qual, _) => singleton(qual, needLoad) - case ConstantType(value) => Literal(value) - } - - /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement - * of an addressable singleton type. - */ - def pathFor(tp: Type)(using Context): Tree = { - def recur(tp: Type): Tree = tp match { - case tp: NamedType => - tp.info match { - case TypeAlias(alias) => recur(alias) - case _: TypeBounds => EmptyTree - case _ => singleton(tp) - } - case tp: TypeProxy => recur(tp.superType) - case _ => EmptyTree - } - recur(tp).orElse { - report.error(em"$tp is not an addressable singleton type") - TypeTree(tp) - } - } - - /** A tree representing a `newXYZArray` operation of the right - * kind for the given element type in `elemTpe`. No type arguments or - * `length` arguments are given. - */ - def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { - val elemClass = elemTpe.classSymbol - def newArr = - ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) - - if (!ctx.erasedTypes) { - assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray - newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } - else // after erasure - newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } - - /** The wrapped array method name for an array of type elemtp */ - def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { - val elemCls = elemtp.classSymbol - if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) - else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray - else nme.genericWrapArray - } - - /** A tree representing a `wrapXYZArray(tree)` operation of the right - * kind for the given element type in `elemTpe`. - */ - def wrapArray(tree: Tree, elemtp: Type)(using Context): Tree = - val wrapper = ref(defn.getWrapVarargsArrayModule) - .select(wrapArrayMethodName(elemtp)) - .appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil) - val actualElem = wrapper.tpe.widen.firstParamTypes.head - wrapper.appliedTo(tree.ensureConforms(actualElem)) - - // ------ Creating typed equivalents of trees that exist only in untyped form ------- - - /** new C(args), calling the primary constructor of C */ - def New(tp: Type, args: List[Tree])(using Context): Apply = - New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) - - /** new C(args), calling given constructor `constr` of C */ - def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { - val targs = tp.argTypes - val tycon = tp.typeConstructor - New(tycon) - .select(TermRef(tycon, constr)) - .appliedToTypes(targs) - .appliedToTermArgs(args) - } - - /** An object def - * - * object obs extends parents { decls } - * - * gets expanded to - * - * val obj = new obj$ - * class obj$ extends parents { this: obj.type => decls } - * - * (The following no longer applies: - * What's interesting here is that the block is well typed - * (because class obj$ is hoistable), but the type of the `obj` val is - * not expressible. What needs to happen in general when - * inferring the type of a val from its RHS, is: if the type contains - * a class that has the val itself as owner, then that class - * is remapped to have the val's owner as owner. Remapping could be - * done by cloning the class with the new owner and substituting - * everywhere in the tree. We know that remapping is safe - * because the only way a local class can appear in the RHS of a val is - * by being hoisted outside of a block, and the necessary checks are - * done at this point already. - * - * On the other hand, for method result type inference, if the type of - * the RHS of a method contains a class owned by the method, this would be - * an error.) - */ - def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { - val modcls = sym.moduleClass.asClass - val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered - val constr = DefDef(constrSym.asTerm, EmptyTree) - val clsdef = ClassDef(modcls, constr, body) - val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) - Thicket(valdef, clsdef) - } - - /** A `_` with given type */ - def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) - - def defaultValue(tpe: Type)(using Context): Tree = { - val tpw = tpe.widen - - if (tpw isRef defn.IntClass) Literal(Constant(0)) - else if (tpw isRef defn.LongClass) Literal(Constant(0L)) - else if (tpw isRef defn.BooleanClass) Literal(Constant(false)) - else if (tpw isRef defn.CharClass) Literal(Constant('\u0000')) - else if (tpw isRef defn.FloatClass) Literal(Constant(0f)) - else if (tpw isRef defn.DoubleClass) Literal(Constant(0d)) - else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) - else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) - else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) - } - - private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { - def apply(sym: Symbol, tree: Tree)(using Context) = - if (sym.exists) sym - else if (tree.isDef) { - val owner = tree.symbol.owner - if (owner.isLocalDummy && owner.owner == cls) owner - else if (owner == cls) foldOver(sym, tree) - else sym - } - else foldOver(sym, tree) - } - - /** The owner to be used in a local context when traversing a tree */ - def localOwner(tree: Tree)(using Context): Symbol = - val sym = tree.symbol - (if sym.is(PackageVal) then sym.moduleClass else sym).orElse(ctx.owner) - - /** The local context to use when traversing trees */ - def localCtx(tree: Tree)(using Context): Context = ctx.withOwner(localOwner(tree)) - - override val cpy: TypedTreeCopier = // Type ascription needed to pick up any new members in TreeCopier (currently there are none) - TypedTreeCopier() - - val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() - - class TypedTreeCopier extends TreeCopier { - def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = - copied.withTypeUnchecked(tree.tpe) - def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = - copied.withTypeUnchecked(tree.tpe) - - protected val untpdCpy = untpd.cpy - - override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { - val tree1 = untpdCpy.Select(tree)(qualifier, name) - tree match { - case tree: Select if qualifier.tpe eq tree.qualifier.tpe => - tree1.withTypeUnchecked(tree.tpe) - case _ => - val tree2: Select = tree.tpe match { - case tpe: NamedType => - val qualType = qualifier.tpe.widenIfUnstable - if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) - else tree1.withType(tpe.derivedSelect(qualType)) - case _ => tree1.withTypeUnchecked(tree.tpe) - } - ConstFold.Select(tree2) - } - } - - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { - val tree1 = untpdCpy.Apply(tree)(fun, args) - tree match { - case tree: Apply - if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, fun, args) - } - } - - override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { - val tree1 = untpdCpy.TypeApply(tree)(fun, args) - tree match { - case tree: TypeApply - if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, fun, args) - } - } - - override def Literal(tree: Tree)(const: Constant)(using Context): Literal = - ta.assignType(untpdCpy.Literal(tree)(const)) - - override def New(tree: Tree)(tpt: Tree)(using Context): New = - ta.assignType(untpdCpy.New(tree)(tpt), tpt) - - override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = - ta.assignType(untpdCpy.Typed(tree)(expr, tpt), tpt) - - override def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = - ta.assignType(untpdCpy.NamedArg(tree)(name, arg), arg) - - override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = - ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) - - override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { - val tree1 = untpdCpy.Block(tree)(stats, expr) - tree match { - case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => - // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then - // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` - // to change even if `expr.tpe` itself didn't change, e.g: - // { val s = ...; s } - // If the type of `s` changed, then the type of the block might have changed, even though `expr.tpe` - // will still be `TermRef(NoPrefix, s)` - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, stats, expr) - } - } - - override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { - val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) - tree match { - case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && - ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => - // The last guard is a conservative check similar to the one done in `Block` above, - // if `tree.tpe` is not identical to the type of one of its branch, it might have been - // computed from the widened type of the branches, so the same reasoning than - // in `Block` applies. - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, thenp, elsep) - } - } - - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { - val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) - tree match { - case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, meth, tpt) - } - } - - override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { - val tree1 = untpdCpy.Match(tree)(selector, cases) - tree match { - case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, selector, cases) - } - } - - override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { - val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) - tree match { - case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, pat, body) - } - } - - override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = - ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) - - override def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = - ta.assignType(untpdCpy.Return(tree)(expr, from)) - - override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = - ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) - - override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { - val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) - tree match { - case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, expr, cases) - } - } - - override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { - val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) - tree match { - case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, bindings, expansion) - } - } - - override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { - val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) - tree match { - case tree: SeqLiteral - if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => - tree1.withTypeUnchecked(tree.tpe) - case _ => - ta.assignType(tree1, elems, elemtpt) - } - } - - override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { - val tree1 = untpdCpy.Annotated(tree)(arg, annot) - tree match { - case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, arg, annot) - } - } - - override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = - If(tree: Tree)(cond, thenp, elsep) - override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - override def CaseDef(tree: CaseDef)(pat: Tree = tree.pat, guard: Tree = tree.guard, body: Tree = tree.body)(using Context): CaseDef = - CaseDef(tree: Tree)(pat, guard, body) - override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = - Try(tree: Tree)(expr, cases, finalizer) - } - - class TimeTravellingTreeCopier extends TypedTreeCopier { - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = - tree match - case tree: Apply - if (tree.fun eq fun) && (tree.args eq args) - && tree.tpe.isInstanceOf[ConstantType] - && isPureExpr(tree) => tree - case _ => - ta.assignType(untpdCpy.Apply(tree)(fun, args), fun, args) - // Note: Reassigning the original type if `fun` and `args` have the same types as before - // does not work here in general: The computed type depends on the widened function type, not - // the function type itself. A tree transform may keep the function type the - // same but its widened type might change. - // However, we keep constant types of pure expressions. This uses the underlying assumptions - // that pure functions yielding a constant will not change in later phases. - - override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = - ta.assignType(untpdCpy.TypeApply(tree)(fun, args), fun, args) - // Same remark as for Apply - - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = - ta.assignType(untpdCpy.Closure(tree)(env, meth, tpt), meth, tpt) - - override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = - Closure(tree: Tree)(env, meth, tpt) - } - - override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError - - implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { - - def isValue(using Context): Boolean = - tree.isTerm && tree.tpe.widen.isValueType - - def isValueOrPattern(using Context): Boolean = - tree.isValue || tree.isPattern - - def isValueType: Boolean = - tree.isType && tree.tpe.isValueType - - def isInstantiation: Boolean = tree match { - case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true - case _ => false - } - - def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = - ShallowFolder(op).apply(z, tree) - - def deepFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = - DeepFolder(op).apply(z, tree) - - def find[T](pred: (tpd.Tree) => Boolean)(using Context): Option[tpd.Tree] = - shallowFold[Option[tpd.Tree]](None)((accum, tree) => if (pred(tree)) Some(tree) else accum) - - def subst(from: List[Symbol], to: List[Symbol])(using Context): ThisTree = - TreeTypeMap(substFrom = from, substTo = to).apply(tree) - - /** Change owner from `from` to `to`. If `from` is a weak owner, also change its - * owner to `to`, and continue until a non-weak owner is reached. - */ - def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { - @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = - if (from.isWeakOwner && !from.owner.isClass) - loop(from.owner, from :: froms, to :: tos) - else - //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") - TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) - if (from == to) tree else loop(from, Nil, to :: Nil) - } - - /** - * Set the owner of every definition in this tree which is not itself contained in this - * tree to be `newowner` - */ - def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { - val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { - def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { - case tree: DefTree => - val sym = tree.symbol - if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss - case _ => - foldOver(ss, tree) - } - } - val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList - val newOwners = List.fill(owners.size)(newOwner) - TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) - } - - /** After phase `trans`, set the owner of every definition in this tree that was formerly - * owner by `from` to `to`. - */ - def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = - if (ctx.phase == trans.next) { - val traverser = new TreeTraverser { - def traverse(tree: Tree)(using Context) = tree match { - case tree: DefTree => - val sym = tree.symbol - val prevDenot = atPhase(trans)(sym.denot) - if (prevDenot.effectiveOwner == from.skipWeakOwner) { - val d = sym.copySymDenotation(owner = to) - d.installAfter(trans) - d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) - } - if (sym.isWeakOwner) traverseChildren(tree) - case _ => - traverseChildren(tree) - } - } - traverser.traverse(tree) - tree - } - else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) - - /** A select node with the given selector name and a computed type */ - def select(name: Name)(using Context): Select = - Select(tree, name) - - /** A select node with the given selector name such that the designated - * member satisfies predicate `p`. Useful for disambiguating overloaded members. - */ - def select(name: Name, p: Symbol => Boolean)(using Context): Select = - select(tree.tpe.member(name).suchThat(p).symbol) - - /** A select node with the given type */ - def select(tp: NamedType)(using Context): Select = - untpd.Select(tree, tp.name).withType(tp) - - /** A select node that selects the given symbol. Note: Need to make sure this - * is in fact the symbol you would get when you select with the symbol's name, - * otherwise a data race may occur which would be flagged by -Yno-double-bindings. - */ - def select(sym: Symbol)(using Context): Select = { - val tp = - if (sym.isType) { - assert(!sym.is(TypeParam)) - TypeRef(tree.tpe, sym.asType) - } - else - TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) - untpd.Select(tree, sym.name).withType(tp) - } - - /** A select node with the given selector name and signature and a computed type */ - def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = - untpd.SelectWithSig(tree, name, sig).withType(tree.tpe.select(name.asTermName, sig, target)) - - /** A select node with selector name and signature taken from `sym`. - * Note: Use this method instead of select(sym) if the referenced symbol - * might be overridden in the type of the qualifier prefix. See note - * on select(sym: Symbol). - */ - def selectWithSig(sym: Symbol)(using Context): Tree = - selectWithSig(sym.name, sym.signature, sym.targetName) - - /** A unary apply node with given argument: `tree(arg)` */ - def appliedTo(arg: Tree)(using Context): Apply = - appliedToTermArgs(arg :: Nil) - - /** An apply node with given arguments: `tree(arg, args0, ..., argsN)` */ - def appliedTo(arg: Tree, args: Tree*)(using Context): Apply = - appliedToTermArgs(arg :: args.toList) - - /** An apply node with given argument list `tree(args(0), ..., args(args.length - 1))` */ - def appliedToTermArgs(args: List[Tree])(using Context): Apply = - Apply(tree, args) - - /** An applied node that accepts only varargs as arguments */ - def appliedToVarargs(args: List[Tree], tpt: Tree)(using Context): Apply = - appliedTo(repeated(args, tpt)) - - /** An apply or type apply node with given argument list */ - def appliedToArgs(args: List[Tree])(using Context): GenericApply = args match - case arg :: args1 if arg.isType => TypeApply(tree, args) - case _ => Apply(tree, args) - - /** The current tree applied to given argument lists: - * `tree (argss(0)) ... (argss(argss.length -1))` - */ - def appliedToArgss(argss: List[List[Tree]])(using Context): Tree = - argss.foldLeft(tree: Tree)(_.appliedToArgs(_)) - - /** The current tree applied to (): `tree()` */ - def appliedToNone(using Context): Apply = Apply(tree, Nil) - - /** The current tree applied to given type argument: `tree[targ]` */ - def appliedToType(targ: Type)(using Context): Tree = - appliedToTypes(targ :: Nil) - - /** The current tree applied to given type arguments: `tree[targ0, ..., targN]` */ - def appliedToTypes(targs: List[Type])(using Context): Tree = - appliedToTypeTrees(targs map (TypeTree(_))) - - /** The current tree applied to given type argument: `tree[targ]` */ - def appliedToTypeTree(targ: Tree)(using Context): Tree = - appliedToTypeTrees(targ :: Nil) - - /** The current tree applied to given type argument list: `tree[targs(0), ..., targs(targs.length - 1)]` */ - def appliedToTypeTrees(targs: List[Tree])(using Context): Tree = - if targs.isEmpty then tree else TypeApply(tree, targs) - - /** Apply to `()` unless tree's widened type is parameterless */ - def ensureApplied(using Context): Tree = - if (tree.tpe.widen.isParameterless) tree else tree.appliedToNone - - /** `tree == that` */ - def equal(that: Tree)(using Context): Tree = - if (that.tpe.widen.isRef(defn.NothingClass)) - Literal(Constant(false)) - else - applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) - - /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ - def isInstance(tp: Type)(using Context): Tree = tp.dealias match { - case ConstantType(c) if c.tag == StringTag => - singleton(tp).equal(tree) - case tp: SingletonType => - if tp.widen.derivesFrom(defn.ObjectClass) then - tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) - else - singleton(tp).equal(tree) - case _ => - tree.select(defn.Any_isInstanceOf).appliedToType(tp) - } - - /** tree.asInstanceOf[`tp`] */ - def asInstance(tp: Type)(using Context): Tree = { - assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") - tree.select(defn.Any_asInstanceOf).appliedToType(tp) - } - - /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ - def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) - - /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ - def cast(tpt: TypeTree)(using Context): Tree = - assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") - tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) - .appliedToTypeTree(tpt) - - /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after - * erasure and value and non-value types are mixed), - * unless tree's type already conforms to `tp`. - */ - def ensureConforms(tp: Type)(using Context): Tree = - if (tree.tpe <:< tp) tree - else if (!ctx.erasedTypes) cast(tp) - else Erasure.Boxing.adaptToType(tree, tp) - - /** `tree ne null` (might need a cast to be type correct) */ - def testNotNull(using Context): Tree = { - // If the receiver is of type `Nothing` or `Null`, add an ascription or cast - // so that the selection succeeds. - // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. - val receiver = - if tree.tpe.isBottomType then - if ctx.explicitNulls then tree.cast(defn.AnyRefType) - else Typed(tree, TypeTree(defn.AnyRefType)) - else tree.ensureConforms(defn.ObjectType) - // also need to cast the null literal to AnyRef in explicit nulls - val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral - receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) - } - - /** If inititializer tree is `_`, the default value of its type, - * otherwise the tree itself. - */ - def wildcardToDefault(using Context): Tree = - if (isWildcardArg(tree)) defaultValue(tree.tpe) else tree - - /** `this && that`, for boolean trees `this`, `that` */ - def and(that: Tree)(using Context): Tree = - tree.select(defn.Boolean_&&).appliedTo(that) - - /** `this || that`, for boolean trees `this`, `that` */ - def or(that: Tree)(using Context): Tree = - tree.select(defn.Boolean_||).appliedTo(that) - - /** The translation of `tree = rhs`. - * This is either the tree as an assignment, or a setter call. - */ - def becomes(rhs: Tree)(using Context): Tree = { - val sym = tree.symbol - if (sym.is(Method)) { - val setter = sym.setter.orElse { - assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) - sym - } - val qual = tree match { - case id: Ident => desugarIdentPrefix(id) - case Select(qual, _) => qual - } - qual.select(setter).appliedTo(rhs) - } - else Assign(tree, rhs) - } - - /** tree @annot - * - * works differently for type trees and term trees - */ - def annotated(annot: Tree)(using Context): Tree = - if (tree.isTerm) - Typed(tree, TypeTree(AnnotatedType(tree.tpe.widenIfUnstable, Annotation(annot)))) - else - Annotated(tree, annot) - - /** A synthetic select with that will be turned into an outer path by ExplicitOuter. - * @param levels How many outer levels to select - * @param tp The type of the destination of the outer path. - */ - def outerSelect(levels: Int, tp: Type)(using Context): Tree = - untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) - - /** Replace Inlined nodes and InlineProxy references to underlying arguments */ - def underlyingArgument(using Context): Tree = { - val mapToUnderlying = new MapToUnderlying { - /** Should get the rhs of this binding - * Returns true if the symbol is a val or def generated by eta-expansion/inline - */ - override protected def skipLocal(sym: Symbol): Boolean = - sym.isOneOf(InlineProxy | Synthetic) - } - mapToUnderlying.transform(tree) - } - - /** Replace Ident nodes references to the underlying tree that defined them */ - def underlying(using Context): Tree = MapToUnderlying().transform(tree) - - // --- Higher order traversal methods ------------------------------- - - /** Apply `f` to each subtree of this tree */ - def foreachSubTree(f: Tree => Unit)(using Context): Unit = { - val traverser = new TreeTraverser { - def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) - } - traverser.traverse(tree) - } - - /** Is there a subtree of this tree that satisfies predicate `p`? */ - def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new TreeAccumulator[Boolean] { - def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } - acc(false, tree) - } - - /** All subtrees of this tree that satisfy predicate `p`. */ - def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { - val buf = mutable.ListBuffer[Tree]() - foreachSubTree { tree => if (f(tree)) buf += tree } - buf.toList - } - - /** Set this tree as the `defTree` of its symbol and return this tree */ - def setDefTree(using Context): ThisTree = { - val sym = tree.symbol - if (sym.exists) sym.defTree = tree - tree - } - - def etaExpandCFT(using Context): Tree = - def expand(target: Tree, tp: Type)(using Context): Tree = tp match - case defn.ContextFunctionType(argTypes, resType, isErased) => - val anonFun = newAnonFun( - ctx.owner, - MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), - coord = ctx.owner.coord) - def lambdaBody(refss: List[List[Tree]]) = - expand(target.select(nme.apply).appliedToArgss(refss), resType)( - using ctx.withOwner(anonFun)) - Closure(anonFun, lambdaBody) - case _ => - target - expand(tree, tree.tpe.widen) - } - - inline val MapRecursionLimit = 10 - - extension (trees: List[Tree]) - - /** A map that expands to a recursive function. It's equivalent to - * - * flatten(trees.mapConserve(op)) - * - * and falls back to it after `MaxRecursionLimit` recursions. - * Before that it uses a simpler method that uses stackspace - * instead of heap. - * Note `op` is duplicated in the generated code, so it should be - * kept small. - */ - inline def mapInline(inline op: Tree => Tree): List[Tree] = - def recur(trees: List[Tree], count: Int): List[Tree] = - if count > MapRecursionLimit then - // use a slower implementation that avoids stack overflows - flatten(trees.mapConserve(op)) - else trees match - case tree :: rest => - val tree1 = op(tree) - val rest1 = recur(rest, count + 1) - if (tree1 eq tree) && (rest1 eq rest) then trees - else tree1 match - case Thicket(elems1) => elems1 ::: rest1 - case _ => tree1 :: rest1 - case nil => nil - recur(trees, 0) - - /** Transform statements while maintaining import contexts and expression contexts - * in the same way as Typer does. The code addresses additional concerns: - * - be tail-recursive where possible - * - don't re-allocate trees where nothing has changed - */ - inline def mapStatements[T]( - exprOwner: Symbol, - inline op: Tree => Context ?=> Tree, - inline wrapResult: List[Tree] => Context ?=> T)(using Context): T = - @tailrec - def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): T = - pending match - case stat :: rest => - val statCtx = stat match - case _: DefTree | _: ImportOrExport => ctx - case _ => ctx.exprContext(stat, exprOwner) - val stat1 = op(stat)(using statCtx) - val restCtx = stat match - case stat: Import => ctx.importContext(stat, stat.symbol) - case _ => ctx - if stat1 eq stat then - loop(mapped, unchanged, rest)(using restCtx) - else - val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped - var xc = unchanged - while xc ne pending do - buf += xc.head - xc = xc.tail - stat1 match - case Thicket(stats1) => buf ++= stats1 - case _ => buf += stat1 - loop(buf, rest, rest)(using restCtx) - case nil => - wrapResult( - if mapped == null then unchanged - else mapped.prependToList(unchanged)) - - loop(null, trees, trees) - end mapStatements - end extension - - /** A treemap that generates the same contexts as the original typer for statements. - * This means: - * - statements that are not definitions get the exprOwner as owner - * - imports are reflected in the contexts of subsequent statements - */ - class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): - def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = - trees.mapStatements(exprOwner, transform(_), wrapResult) - final override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = - transformStats(trees, exprOwner, sameStats) - override def transformBlock(blk: Block)(using Context) = - transformStats(blk.stats, ctx.owner, - stats1 => ctx ?=> cpy.Block(blk)(stats1, transform(blk.expr))) - - val sameStats: List[Tree] => Context ?=> List[Tree] = stats => stats - - /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. - * Also drops Inline and Block with no statements. - */ - private class MapToUnderlying extends TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match { - case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => - tree.symbol.defTree match { - case defTree: ValOrDefDef => - val rhs = defTree.rhs - assert(!rhs.isEmpty) - transform(rhs) - case _ => tree - } - case Inlined(_, Nil, arg) => transform(arg) - case Block(Nil, arg) => transform(arg) - case NamedArg(_, arg) => transform(arg) - case tree => super.transform(tree) - } - - /** Should get the rhs of this binding */ - protected def skipLocal(sym: Symbol): Boolean = true - - /** Is this a symbol that of a local val or parameterless def for which we could get the rhs */ - private def isBinding(sym: Symbol)(using Context): Boolean = - sym.isTerm && !sym.is(Param) && !sym.owner.isClass && - !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless - } - - extension (xs: List[tpd.Tree]) - def tpes: List[Type] = xs match { - case x :: xs1 => x.tpe :: xs1.tpes - case nil => Nil - } - - /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ - trait TreeProvider { - protected def computeRootTrees(using Context): List[Tree] - - private var myTrees: List[Tree] | Null = _ - - /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ - def rootTrees(using Context): List[Tree] = - if (ctx.settings.YretainTrees.value) { - if (myTrees == null) myTrees = computeRootTrees - myTrees.uncheckedNN - } - else computeRootTrees - - /** Get first tree defined by this provider, or EmptyTree if none exists */ - def tree(using Context): Tree = - rootTrees.headOption.getOrElse(EmptyTree) - - /** Is it possible that the tree to load contains a definition of or reference to `id`? */ - def mightContain(id: String)(using Context): Boolean = true - } - - // convert a numeric with a toXXX method - def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { - val mname = "to".concat(numericCls.name) - val conversion = tree.tpe member(mname) - if (conversion.symbol.exists) - tree.select(conversion.symbol.termRef).ensureApplied - else if (tree.tpe.widen isRef numericCls) - tree - else { - report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") - Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) - } - } - - /** A tree that corresponds to `Predef.classOf[$tp]` in source */ - def clsOf(tp: Type)(using Context): Tree = - if ctx.erasedTypes && !tp.isRef(defn.UnitClass) then - Literal(Constant(TypeErasure.erasure(tp))) - else - Literal(Constant(tp)) - - @tailrec - def sameTypes(trees: List[tpd.Tree], trees1: List[tpd.Tree]): Boolean = - if (trees.isEmpty) trees.isEmpty - else if (trees1.isEmpty) trees.isEmpty - else (trees.head.tpe eq trees1.head.tpe) && sameTypes(trees.tail, trees1.tail) - - /** If `tree`'s purity level is less than `level`, let-bind it so that it gets evaluated - * only once. I.e. produce a - * - * { val x = 'tree ; ~within('x) } - * - * instead of otherwise - * - * ~within('tree) - */ - def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = - if (exprPurity(tree) >= level) within(tree) - else { - val vdef = SyntheticValDef(TempResultName.fresh(), tree) - Block(vdef :: Nil, within(Ident(vdef.namedType))) - } - - /** Let bind `tree` unless `tree` is at least idempotent */ - def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = - letBindUnless(TreeInfo.Idempotent, tree)(within) - - def runtimeCall(name: TermName, args: List[Tree])(using Context): Tree = - Ident(defn.ScalaRuntimeModule.requiredMethod(name).termRef).appliedToTermArgs(args) - - /** An extractor that pulls out type arguments */ - object MaybePoly: - def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match - case TypeApply(tree, targs) => Some(tree, targs) - case _ => Some(tree, Nil) - - object TypeArgs: - def unapply(ts: List[Tree]): Option[List[Tree]] = - if ts.nonEmpty && ts.head.isType then Some(ts) else None - - /** Split argument clauses into a leading type argument clause if it exists and - * remaining clauses - */ - def splitArgs(argss: List[List[Tree]]): (List[Tree], List[List[Tree]]) = argss match - case TypeArgs(targs) :: argss1 => (targs, argss1) - case _ => (Nil, argss) - - def joinArgs(targs: List[Tree], argss: List[List[Tree]]): List[List[Tree]] = - if targs.isEmpty then argss else targs :: argss - - /** A key to be used in a context property that tracks enclosing inlined calls */ - private val InlinedCalls = Property.Key[List[Tree]]() - - /** A key to be used in a context property that tracks the number of inlined trees */ - private val InlinedTrees = Property.Key[Counter]() - final class Counter { - var count: Int = 0 - } - - /** Record an enclosing inlined call. - * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. - * We assume parameters are never nested inside parameters. - */ - override def inlineContext(call: Tree)(using Context): Context = { - // We assume enclosingInlineds is already normalized, and only process the new call with the head. - val oldIC = enclosingInlineds - - val newIC = - if call.isEmpty then - oldIC match - case t1 :: ts2 => ts2 - case _ => oldIC - else - call :: oldIC - - val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) - if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 - } - - /** All enclosing calls that are currently inlined, from innermost to outermost. - */ - def enclosingInlineds(using Context): List[Tree] = - ctx.property(InlinedCalls).getOrElse(Nil) - - /** Record inlined trees */ - def addInlinedTrees(n: Int)(using Context): Unit = - ctx.property(InlinedTrees).foreach(_.count += n) - - /** Check if the limit on the number of inlined trees has been reached */ - def reachedInlinedTreesLimit(using Context): Boolean = - ctx.property(InlinedTrees) match - case Some(c) => c.count > ctx.settings.XmaxInlinedTrees.value - case None => false - - /** The source file where the symbol of the `inline` method referred to by `call` - * is defined - */ - def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source - - /** Desugar identifier into a select node. Return the tree itself if not possible */ - def desugarIdent(tree: Ident)(using Context): RefTree = { - val qual = desugarIdentPrefix(tree) - if (qual.isEmpty) tree - else qual.select(tree.symbol) - } - - /** Recover identifier prefix (e.g. this) if it exists */ - def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { - case TermRef(prefix: TermRef, _) => - prefix.info match - case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => - ref(mt.resultType.typeSymbol.sourceModule) - case _ => - ref(prefix) - case TermRef(prefix: ThisType, _) => - This(prefix.cls) - case _ => - EmptyTree - } - - /** - * The symbols that are imported with `expr.name` - * - * @param expr The base of the import statement - * @param name The name that is being imported. - * @return All the symbols that would be imported with `expr.name`. - */ - def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { - def lookup(name: Name): Symbol = expr.tpe.member(name).symbol - val symbols = - List(lookup(name.toTermName), - lookup(name.toTypeName), - lookup(name.moduleClassName), - lookup(name.sourceModuleName)) - - symbols.map(_.sourceSymbol).filter(_.exists).distinct - } - - /** - * All the symbols that are imported by the first selector of `imp` that matches - * `selectorPredicate`. - * - * @param imp The import statement to analyze - * @param selectorPredicate A test to find the selector to use. - * @return The symbols imported. - */ - def importedSymbols(imp: Import, - selectorPredicate: untpd.ImportSelector -> Boolean = util.common.alwaysTrue) - (using Context): List[Symbol] = - imp.selectors.find(selectorPredicate) match - case Some(sel) => importedSymbols(imp.expr, sel.name) - case _ => Nil - - /** - * The list of select trees that resolve to the same symbols as the ones that are imported - * by `imp`. - */ - def importSelections(imp: Import)(using Context): List[Select] = { - def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { - // Give a zero-extent position to the qualifier to prevent it from being included several - // times in results in the language server. - val noPosExpr = focusPositions(imp.expr) - val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) - rename match { - case None => - selectTree :: Nil - case Some(rename) => - // Get the type of the symbol that is actually selected, and construct a select - // node with the new name and the type of the real symbol. - val name = if (sym.name.isTypeName) rename.name.toTypeName else rename.name - val actual = Select(noPosExpr, sym.name) - val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) - selectTree :: renameTree :: Nil - } - } - - imp.selectors.flatMap { sel => - if sel.isWildcard then Nil - else - val renamedOpt = sel.renamed match - case renamed: untpd.Ident => Some(renamed) - case untpd.EmptyTree => None - importedSymbols(imp.expr, sel.name).flatMap { sym => - imported(sym, sel.imported, renamedOpt) - } - } - } - - /** Creates the tuple type tree repesentation of the type trees in `ts` */ - def tupleTypeTree(elems: List[Tree])(using Context): Tree = { - val arity = elems.length - if arity <= Definitions.MaxTupleArity then - val tupleTp = defn.TupleType(arity) - if tupleTp != null then - AppliedTypeTree(TypeTree(tupleTp), elems) - else nestedPairsTypeTree(elems) - else nestedPairsTypeTree(elems) - } - - /** Creates the nested pairs type tree repesentation of the type trees in `ts` */ - def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = - ts.foldRight[Tree](TypeTree(defn.EmptyTupleModule.termRef))((x, acc) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), x :: acc :: Nil)) - - /** Replaces all positions in `tree` with zero-extent positions */ - private def focusPositions(tree: Tree)(using Context): Tree = { - val transformer = new tpd.TreeMap { - override def transform(tree: Tree)(using Context): Tree = - super.transform(tree).withSpan(tree.span.focus) - } - transformer.transform(tree) - } - - /** Convert a list of trees to a vararg-compatible tree. - * Used to make arguments for methods that accept varargs. - */ - def repeated(trees: List[Tree], tpt: Tree)(using Context): Tree = - ctx.typeAssigner.arrayToRepeated(JavaSeqLiteral(trees, tpt)) - - /** Create a tree representing a list containing all - * the elements of the argument list. A "list of tree to - * tree of list" conversion. - * - * @param trees the elements the list represented by - * the resulting tree should contain. - * @param tpe the type of the elements of the resulting list. - * - */ - def mkList(trees: List[Tree], tpt: Tree)(using Context): Tree = - ref(defn.ListModule).select(nme.apply) - .appliedToTypeTree(tpt) - .appliedToVarargs(trees, tpt) - - - protected def FunProto(args: List[Tree], resType: Type)(using Context) = - ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala b/tests/pos-with-compiler-cc/dotc/ast/untpd.scala deleted file mode 100644 index a6d3bc5a072c..000000000000 --- a/tests/pos-with-compiler-cc/dotc/ast/untpd.scala +++ /dev/null @@ -1,829 +0,0 @@ -package dotty.tools -package dotc -package ast - -import core._ -import Types._, Contexts._, Constants._, Names._, Flags._ -import dotty.tools.dotc.typer.ProtoTypes -import Symbols._, StdNames._, Trees._ -import util.{Property, SourceFile, NoSource} -import util.Spans.Span -import annotation.constructorOnly -import annotation.internal.sharable -import Decorators._ -import annotation.retains -import language.experimental.pureFunctions - -object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { - - // ----- Tree cases that exist in untyped form only ------------------ - - abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { - def op: Ident - override def isTerm: Boolean = op.isTerm - override def isType: Boolean = op.isType - } - - /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice - * @param owner The current owner at the time the tree was defined - * @param isExtensionReceiver The splice was created from the receiver `e` in an extension - * method call `e.f(...)` - */ - abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { - def forwardTo: tpd.Tree = splice - override def toString = - def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" - s"TypedSplice($splice$ext)" - } - - object TypedSplice { - def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = - val owner = ctx.owner - given SourceFile = ctx.source - new TypedSplice(tree)(owner, isExtensionReceiver) {} - } - - /** mods object name impl */ - case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) - extends MemberDef { - type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef - def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) - } - - /** An untyped template with a derives clause. Derived parents are added to the end - * of the `parents` list. `derivedCount` keeps track of how many there are. - * This representation was chosen because it balances two concerns: - * - maximize overlap between DerivingTemplate and Template for code streamlining - * - keep invariant that elements of untyped trees align with source positions - */ - class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) - extends Template(constr, parentsOrDerived, self, preBody) { - override val parents = parentsOrDerived.dropRight(derivedCount) - override val derived = parentsOrDerived.takeRight(derivedCount) - } - - case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - - case class SymbolLit(str: String)(implicit @constructorOnly src: SourceFile) extends TermTree - - /** An interpolated string - * @param segments a list of two element tickets consisting of string literal and argument tree, - * possibly with a simple string literal as last element of the list - */ - case class InterpolatedString(id: TermName, segments: List[Tree])(implicit @constructorOnly src: SourceFile) - extends TermTree - - /** A function type or closure */ - case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = body.isTerm - override def isType: Boolean = body.isType - } - - /** A function type or closure with `implicit`, `erased`, or `given` modifiers */ - class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers)(implicit @constructorOnly src: SourceFile) - extends Function(args, body) - - /** A polymorphic function type */ - case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm = body.isTerm - override def isType = body.isType - } - - /** A function created from a wildcard expression - * @param placeholderParams a list of definitions of synthetic parameters. - * @param body the function body where wildcards are replaced by - * references to synthetic parameters. - * This is equivalent to Function, except that forms a special case for the overlapping - * positions tests. - */ - class WildcardFunction(placeholderParams: List[ValDef], body: Tree)(implicit @constructorOnly src: SourceFile) - extends Function(placeholderParams, body) - - case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree - case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree - case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree - case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { - def forwardTo: Tree = t - } - case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm - override def isType: Boolean = !isTerm - } - case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Quote(quoted: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class Splice(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree { - def isInBraces: Boolean = span.end != expr.span.end - } - case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree - case class GenFrom(pat: Tree, expr: Tree, checkMode: GenCheckMode)(implicit @constructorOnly src: SourceFile) extends Tree - case class GenAlias(pat: Tree, expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree - case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree - case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree - case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - - case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { - // TODO: Make bound a typed tree? - - /** It's a `given` selector */ - val isGiven: Boolean = imported.name.isEmpty - - /** It's a `given` or `_` selector */ - val isWildcard: Boolean = isGiven || imported.name == nme.WILDCARD - - /** The imported name, EmptyTermName if it's a given selector */ - val name: TermName = imported.name.asInstanceOf[TermName] - - /** The renamed part (which might be `_`), if present, or `name`, if missing */ - val rename: TermName = renamed match - case Ident(rename: TermName) => rename - case _ => name - } - - case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree - - enum NumberKind { - case Whole(radix: Int) - case Decimal - case Floating - } - - /** {x1, ..., xN} T (only relevant under captureChecking) */ - case class CapturingTypeTree(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree - - /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ - case class DependentTypeTree(tp: List[Symbol] -> Context ?-> Type)(implicit @constructorOnly src: SourceFile) extends Tree - - @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { - override def isEmpty: Boolean = true - } - - def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) - object WildcardTypeBoundsTree: - def unapply(tree: untpd.Tree): Boolean = tree match - case TypeBoundsTree(EmptyTree, EmptyTree, _) => true - case _ => false - - - /** A block generated by the XML parser, only treated specially by - * `Positioned#checkPos` */ - class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) - - /** An enum to control checking or filtering of patterns in GenFrom trees */ - enum GenCheckMode { - case Ignore // neither filter nor check since filtering was done before - case Check // check that pattern is irrefutable - case CheckAndFilter // both check and filter (transitional period starting with 3.2) - case FilterNow // filter out non-matching elements if we are not in 3.2 or later - case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` - } - - // ----- Modifiers ----------------------------------------------------- - /** Mod is intended to record syntactic information about modifiers, it's - * NOT a replacement of FlagSet. - * - * For any query about semantic information, check `flags` instead. - */ - sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) - extends Positioned - - object Mod { - case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) - - case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) - - case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) - - case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) - - case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) - - case class Erased()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Erased) - - case class Final()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Final) - - case class Sealed()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Sealed) - - case class Opaque()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Opaque) - - case class Open()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Open) - - case class Override()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Override) - - case class Abstract()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Abstract) - - case class Lazy()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Lazy) - - case class Inline()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Inline) - - case class Transparent()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Transparent) - - case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) - - /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ - case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) - } - - /** Modifiers and annotations for definitions - * - * @param flags The set flags - * @param privateWithin If a private or protected has is followed by a - * qualifier [q], the name q, "" as a typename otherwise. - * @param annotations The annotations preceding the modifiers - */ - case class Modifiers ( - flags: FlagSet = EmptyFlags, - privateWithin: TypeName = tpnme.EMPTY, - annotations: List[Tree] = Nil, - mods: List[Mod] = Nil) { - - def is(flag: Flag): Boolean = flags.is(flag) - def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) - def isOneOf(fs: FlagSet): Boolean = flags.isOneOf(fs) - def isOneOf(fs: FlagSet, butNot: FlagSet): Boolean = flags.isOneOf(fs, butNot = butNot) - def isAllOf(fc: FlagSet): Boolean = flags.isAllOf(fc) - - def | (fs: FlagSet): Modifiers = withFlags(flags | fs) - def & (fs: FlagSet): Modifiers = withFlags(flags & fs) - def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs) - - def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags) - def toTermFlags: Modifiers = withFlags(flags.toTermFlags) - - def withFlags(flags: FlagSet): Modifiers = - if (this.flags == flags) this - else copy(flags = flags) - - def withoutFlags(flags: FlagSet): Modifiers = - if (this.isOneOf(flags)) - Modifiers(this.flags &~ flags, this.privateWithin, this.annotations, this.mods.filterNot(_.flags.isOneOf(flags))) - else this - - def withAddedMod(mod: Mod): Modifiers = - if (mods.exists(_ eq mod)) this - else withMods(mods :+ mod) - - private def compatible(flags1: FlagSet, flags2: FlagSet): Boolean = - flags1.isEmpty || flags2.isEmpty - || flags1.isTermFlags && flags2.isTermFlags - || flags1.isTypeFlags && flags2.isTypeFlags - - /** Add `flags` to thos modifier set, checking that there are no type/term conflicts. - * If there are conflicts, issue an error and return the modifiers consisting of - * the added flags only. The reason to do it this way is that the added flags usually - * describe the core of a construct whereas the existing set are the modifiers - * given in the source. - */ - def withAddedFlags(flags: FlagSet, span: Span)(using Context): Modifiers = - if this.flags.isAllOf(flags) then this - else if compatible(this.flags, flags) then this | flags - else - val what = if flags.isTermFlags then "values" else "types" - report.error(em"${(flags & ModifierFlags).flagsString} $what cannot be ${this.flags.flagsString}", ctx.source.atSpan(span)) - Modifiers(flags) - - /** Modifiers with given list of Mods. It is checked that - * all modifiers are already accounted for in `flags` and `privateWithin`. - */ - def withMods(ms: List[Mod]): Modifiers = - if (mods eq ms) this - else { - if (ms.nonEmpty) - for (m <- ms) - assert(flags.isAllOf(m.flags) - || m.isInstanceOf[Mod.Private] && !privateWithin.isEmpty - || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), - s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") - copy(mods = ms) - } - - def withAddedAnnotation(annot: Tree): Modifiers = - if (annotations.exists(_ eq annot)) this - else withAnnotations(annotations :+ annot) - - def withAnnotations(annots: List[Tree]): Modifiers = - if (annots eq annotations) this - else copy(annotations = annots) - - def withPrivateWithin(pw: TypeName): Modifiers = - if (pw.isEmpty) this - else copy(privateWithin = pw) - - def hasFlags: Boolean = flags != EmptyFlags - def hasAnnotations: Boolean = annotations.nonEmpty - def hasPrivateWithin: Boolean = privateWithin != tpnme.EMPTY - def hasMod(cls: Class[?]) = mods.exists(_.getClass == cls) - - private def isEnum = is(Enum, butNot = JavaDefined) - - def isEnumCase: Boolean = isEnum && is(Case) - def isEnumClass: Boolean = isEnum && !is(Case) - } - - @sharable val EmptyModifiers: Modifiers = Modifiers() - - // ----- TypeTrees that refer to other tree's symbols ------------------- - - /** A type tree that gets its type from some other tree's symbol. Enters the - * type tree in the References attachment of the `from` tree as a side effect. - */ - abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { - - private var myWatched: Tree = EmptyTree - - /** The watched tree; used only for printing */ - def watched: Tree = myWatched - - /** Install the derived type tree as a dependency on `original` */ - def watching(original: DefTree): this.type = { - myWatched = original - val existing = original.attachmentOrElse(References, Nil) - original.putAttachment(References, this :: existing) - this - } - - /** Install the derived type tree as a dependency on `sym` */ - def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) - - /** A hook to ensure that all necessary symbols are completed so that - * OriginalSymbol attachments are propagated to this tree - */ - def ensureCompletions(using Context): Unit = () - - /** The method that computes the tree with the derived type */ - def derivedTree(originalSym: Symbol)(using Context): tpd.Tree - } - - /** Property key containing TypeTrees whose type is computed - * from the symbol in this type. These type trees have marker trees - * TypeRefOfSym or InfoOfSym as their originals. - */ - val References: Property.Key[List[DerivedTypeTree]] = Property.Key() - - /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym - * which contains the symbol of the original tree from which this - * TypeTree is derived. - */ - val OriginalSymbol: Property.Key[Symbol] = Property.Key() - - /** Property key for contextual Apply trees of the form `fn given arg` */ - val KindOfApply: Property.StickyKey[ApplyKind] = Property.StickyKey() - - // ------ Creation methods for untyped only ----------------- - - def Ident(name: Name)(implicit src: SourceFile): Ident = new Ident(name) - def SearchFailureIdent(name: Name, explanation: -> String)(implicit src: SourceFile): SearchFailureIdent = new SearchFailureIdent(name, explanation) - def Select(qualifier: Tree, name: Name)(implicit src: SourceFile): Select = new Select(qualifier, name) - def SelectWithSig(qualifier: Tree, name: Name, sig: Signature)(implicit src: SourceFile): Select = new SelectWithSig(qualifier, name, sig) - def This(qual: Ident)(implicit src: SourceFile): This = new This(qual) - def Super(qual: Tree, mix: Ident)(implicit src: SourceFile): Super = new Super(qual, mix) - def Apply(fun: Tree, args: List[Tree])(implicit src: SourceFile): Apply = new Apply(fun, args) - def TypeApply(fun: Tree, args: List[Tree])(implicit src: SourceFile): TypeApply = new TypeApply(fun, args) - def Literal(const: Constant)(implicit src: SourceFile): Literal = new Literal(const) - def New(tpt: Tree)(implicit src: SourceFile): New = new New(tpt) - def Typed(expr: Tree, tpt: Tree)(implicit src: SourceFile): Typed = new Typed(expr, tpt) - def NamedArg(name: Name, arg: Tree)(implicit src: SourceFile): NamedArg = new NamedArg(name, arg) - def Assign(lhs: Tree, rhs: Tree)(implicit src: SourceFile): Assign = new Assign(lhs, rhs) - def Block(stats: List[Tree], expr: Tree)(implicit src: SourceFile): Block = new Block(stats, expr) - def If(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new If(cond, thenp, elsep) - def InlineIf(cond: Tree, thenp: Tree, elsep: Tree)(implicit src: SourceFile): If = new InlineIf(cond, thenp, elsep) - def Closure(env: List[Tree], meth: Tree, tpt: Tree)(implicit src: SourceFile): Closure = new Closure(env, meth, tpt) - def Match(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new Match(selector, cases) - def InlineMatch(selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): Match = new InlineMatch(selector, cases) - def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit src: SourceFile): CaseDef = new CaseDef(pat, guard, body) - def Labeled(bind: Bind, expr: Tree)(implicit src: SourceFile): Labeled = new Labeled(bind, expr) - def Return(expr: Tree, from: Tree)(implicit src: SourceFile): Return = new Return(expr, from) - def WhileDo(cond: Tree, body: Tree)(implicit src: SourceFile): WhileDo = new WhileDo(cond, body) - def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree)(implicit src: SourceFile): Try = new Try(expr, cases, finalizer) - def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): SeqLiteral = new SeqLiteral(elems, elemtpt) - def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit src: SourceFile): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt) - def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit src: SourceFile): Inlined = new Inlined(call, bindings, expansion) - def TypeTree()(implicit src: SourceFile): TypeTree = new TypeTree() - def InferredTypeTree()(implicit src: SourceFile): TypeTree = new InferredTypeTree() - def SingletonTypeTree(ref: Tree)(implicit src: SourceFile): SingletonTypeTree = new SingletonTypeTree(ref) - def RefinedTypeTree(tpt: Tree, refinements: List[Tree])(implicit src: SourceFile): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) - def AppliedTypeTree(tpt: Tree, args: List[Tree])(implicit src: SourceFile): AppliedTypeTree = new AppliedTypeTree(tpt, args) - def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit src: SourceFile): LambdaTypeTree = new LambdaTypeTree(tparams, body) - def TermLambdaTypeTree(params: List[ValDef], body: Tree)(implicit src: SourceFile): TermLambdaTypeTree = new TermLambdaTypeTree(params, body) - def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit src: SourceFile): MatchTypeTree = new MatchTypeTree(bound, selector, cases) - def ByNameTypeTree(result: Tree)(implicit src: SourceFile): ByNameTypeTree = new ByNameTypeTree(result) - def TypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(implicit src: SourceFile): TypeBoundsTree = new TypeBoundsTree(lo, hi, alias) - def Bind(name: Name, body: Tree)(implicit src: SourceFile): Bind = new Bind(name, body) - def Alternative(trees: List[Tree])(implicit src: SourceFile): Alternative = new Alternative(trees) - def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree])(implicit src: SourceFile): UnApply = new UnApply(fun, implicits, patterns) - def ValDef(name: TermName, tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): ValDef = new ValDef(name, tpt, rhs) - def DefDef(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(implicit src: SourceFile): DefDef = new DefDef(name, paramss, tpt, rhs) - def TypeDef(name: TypeName, rhs: Tree)(implicit src: SourceFile): TypeDef = new TypeDef(name, rhs) - def Template(constr: DefDef, parents: List[Tree], derived: List[Tree], self: ValDef, body: LazyTreeList)(implicit src: SourceFile): Template = - if (derived.isEmpty) new Template(constr, parents, self, body) - else new DerivingTemplate(constr, parents ++ derived, self, body, derived.length) - def Import(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Import = new Import(expr, selectors) - def Export(expr: Tree, selectors: List[ImportSelector])(implicit src: SourceFile): Export = new Export(expr, selectors) - def PackageDef(pid: RefTree, stats: List[Tree])(implicit src: SourceFile): PackageDef = new PackageDef(pid, stats) - def Annotated(arg: Tree, annot: Tree)(implicit src: SourceFile): Annotated = new Annotated(arg, annot) - def Hole(isTermHole: Boolean, idx: Int, args: List[Tree], content: Tree, tpt: Tree)(implicit src: SourceFile): Hole = new Hole(isTermHole, idx, args, content, tpt) - - // ------ Additional creation methods for untyped only ----------------- - - /** new T(args1)...(args_n) - * ==> - * new T.[Ts](args1)...(args_n) - * - * where `Ts` are the class type arguments of `T` or its class type alias. - * Note: we also keep any type arguments as parts of `T`. This is necessary to allow - * navigation into these arguments from the IDE, and to do the right thing in - * PrepareInlineable. - */ - def New(tpt: Tree, argss: List[List[Tree]])(using Context): Tree = - ensureApplied(argss.foldLeft(makeNew(tpt))(Apply(_, _))) - - /** A new expression with constrictor and possibly type arguments. See - * `New(tpt, argss)` for details. - */ - def makeNew(tpt: Tree)(using Context): Tree = { - val (tycon, targs) = tpt match { - case AppliedTypeTree(tycon, targs) => - (tycon, targs) - case TypedSplice(tpt1: tpd.Tree) => - val argTypes = tpt1.tpe.dealias.argTypesLo - def wrap(tpe: Type) = TypeTree(tpe).withSpan(tpt.span) - (tpt, argTypes.map(wrap)) - case _ => - (tpt, Nil) - } - val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) - if (targs.nonEmpty) TypeApply(nu, targs) else nu - } - - def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = - Block(stat :: Nil, expr) - - def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = - Apply(fn, arg :: Nil) - - def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { - case _: Apply => tpt - case _ => Apply(tpt, Nil) - } - - def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = - AppliedTypeTree(tpt, arg :: Nil) - - def TypeTree(tpe: Type)(using Context): TypedSplice = - TypedSplice(TypeTree().withTypeUnchecked(tpe)) - - def InferredTypeTree(tpe: Type)(using Context): TypedSplice = - TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) - - def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) - - def ref(tp: NamedType)(using Context): Tree = - TypedSplice(tpd.ref(tp)) - - def ref(sym: Symbol)(using Context): Tree = - TypedSplice(tpd.ref(sym)) - - def rawRef(tp: NamedType)(using Context): Tree = - if tp.typeParams.isEmpty then ref(tp) - else AppliedTypeTree(ref(tp), tp.typeParams.map(_ => WildcardTypeBoundsTree())) - - def rootDot(name: Name)(implicit src: SourceFile): Select = Select(Ident(nme.ROOTPKG), name) - def scalaDot(name: Name)(implicit src: SourceFile): Select = Select(rootDot(nme.scala), name) - def scalaAnnotationDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.annotation), name) - def scalaRuntimeDot(name: Name)(using SourceFile): Select = Select(scalaDot(nme.runtime), name) - def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) - def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) - def javaDotLangDot(name: Name)(implicit src: SourceFile): Select = Select(Select(Ident(nme.java), nme.lang), name) - - def captureRoot(using Context): Select = - Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) - - def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = - DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) - - def emptyConstructor(using Context): DefDef = - makeConstructor(Nil, Nil) - - def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = - ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) - - def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { - case t :: Nil => Parens(t) - case _ => Tuple(ts) - } - - def makeTuple(ts: List[Tree])(using Context): Tree = ts match { - case t :: Nil => t - case _ => Tuple(ts) - } - - def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = - AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) - - def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { - val vdef = ValDef(pname, tpe, EmptyTree) - if (isBackquoted) vdef.pushAttachment(Backquoted, ()) - vdef.withMods(mods | Param) - } - - def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = - ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) - .withFlags(flags) - - def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = - params match - case Nil => tpt - case (vd: ValDef) :: _ => TermLambdaTypeTree(params.asInstanceOf[List[ValDef]], tpt) - case _ => LambdaTypeTree(params.asInstanceOf[List[TypeDef]], tpt) - - def lambdaAbstractAll(paramss: List[List[ValDef] | List[TypeDef]], tpt: Tree)(using Context): Tree = - paramss.foldRight(tpt)(lambdaAbstract) - - /** A reference to given definition. If definition is a repeated - * parameter, the reference will be a repeated argument. - */ - def refOfDef(tree: MemberDef)(using Context): Tree = tree match { - case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) - case _ => Ident(tree.name) - } - - /** A repeated argument such as `arg: _*` */ - def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) - - -// --------- Copier/Transformer/Accumulator classes for untyped trees ----- - - def localCtx(tree: Tree)(using Context): Context = ctx - - override val cpy: UntypedTreeCopier = UntypedTreeCopier() - - class UntypedTreeCopier extends TreeCopier { - - def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = - copied.asInstanceOf[copied.ThisTree[Untyped]] - - def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { - tree match { - case tree: MemberDef => copied.withMods(tree.rawMods) - case _ => copied - } - }.asInstanceOf[copied.ThisTree[Untyped]] - - def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { - case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree - case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) - } - def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { - case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree - case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) - } - def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { - case tree: SymbolLit if str == tree.str => tree - case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) - } - def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { - case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree - case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) - } - def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { - case tree: Function if (args eq tree.args) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.Function(args, body)(tree.source)) - } - def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { - case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) - } - def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { - case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree - case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) - } - def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { - case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree - case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) - } - def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { - case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree - case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) - } - def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { - case tree: Parens if t eq tree.t => tree - case _ => finalize(tree, untpd.Parens(t)(tree.source)) - } - def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { - case tree: Tuple if trees eq tree.trees => tree - case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) - } - def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { - case tree: Throw if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Throw(expr)(tree.source)) - } - def Quote(tree: Tree)(quoted: Tree)(using Context): Tree = tree match { - case tree: Quote if quoted eq tree.quoted => tree - case _ => finalize(tree, untpd.Quote(quoted)(tree.source)) - } - def Splice(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: Splice if expr eq tree.expr => tree - case _ => finalize(tree, untpd.Splice(expr)(tree.source)) - } - def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { - case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) - } - def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { - case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree - case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) - } - def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { - case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree - case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) - } - def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { - case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree - case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) - } - def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { - case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree - case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) - } - def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { - case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree - case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) - } - def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match - case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree - case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) - def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match - case tree: Into if tpt eq tree.tpt => tree - case _ => finalize(tree, untpd.Into(tpt)(tree.source)) - def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { - case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree - case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) - } - def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { - case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree - case _ => finalize(tree, untpd.Number(digits, kind)) - } - def CapturingTypeTree(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match - case tree: CapturingTypeTree if (refs eq tree.refs) && (parent eq tree.parent) => tree - case _ => finalize(tree, untpd.CapturingTypeTree(refs, parent)) - - def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { - case tree: TypedSplice if splice `eq` tree.splice => tree - case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) - } - def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { - case tree: MacroTree if expr `eq` tree.expr => tree - case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) - } - } - - abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { - override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { - case ModuleDef(name, impl) => - cpy.ModuleDef(tree)(name, transformSub(impl)) - case tree: DerivingTemplate => - cpy.Template(tree)(transformSub(tree.constr), transform(tree.parents), - transform(tree.derived), transformSub(tree.self), transformStats(tree.body, tree.symbol)) - case ParsedTry(expr, handler, finalizer) => - cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer)) - case SymbolLit(str) => - cpy.SymbolLit(tree)(str) - case InterpolatedString(id, segments) => - cpy.InterpolatedString(tree)(id, segments.mapConserve(transform)) - case Function(args, body) => - cpy.Function(tree)(transform(args), transform(body)) - case PolyFunction(targs, body) => - cpy.PolyFunction(tree)(transform(targs), transform(body)) - case InfixOp(left, op, right) => - cpy.InfixOp(tree)(transform(left), op, transform(right)) - case PostfixOp(od, op) => - cpy.PostfixOp(tree)(transform(od), op) - case PrefixOp(op, od) => - cpy.PrefixOp(tree)(op, transform(od)) - case Parens(t) => - cpy.Parens(tree)(transform(t)) - case Tuple(trees) => - cpy.Tuple(tree)(transform(trees)) - case Throw(expr) => - cpy.Throw(tree)(transform(expr)) - case Quote(t) => - cpy.Quote(tree)(transform(t)) - case Splice(expr) => - cpy.Splice(tree)(transform(expr)) - case ForYield(enums, expr) => - cpy.ForYield(tree)(transform(enums), transform(expr)) - case ForDo(enums, body) => - cpy.ForDo(tree)(transform(enums), transform(body)) - case GenFrom(pat, expr, checkMode) => - cpy.GenFrom(tree)(transform(pat), transform(expr), checkMode) - case GenAlias(pat, expr) => - cpy.GenAlias(tree)(transform(pat), transform(expr)) - case ContextBounds(bounds, cxBounds) => - cpy.ContextBounds(tree)(transformSub(bounds), transform(cxBounds)) - case PatDef(mods, pats, tpt, rhs) => - cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) - case ExtMethods(paramss, methods) => - cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) - case Into(tpt) => - cpy.Into(tree)(transform(tpt)) - case ImportSelector(imported, renamed, bound) => - cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) - case Number(_, _) | TypedSplice(_) => - tree - case MacroTree(expr) => - cpy.MacroTree(tree)(transform(expr)) - case CapturingTypeTree(refs, parent) => - cpy.CapturingTypeTree(tree)(transform(refs), transform(parent)) - case _ => - super.transformMoreCases(tree) - } - } - - abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { - self: UntypedTreeAccumulator[X] @retains(caps.cap) => - override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { - case ModuleDef(name, impl) => - this(x, impl) - case tree: DerivingTemplate => - this(this(this(this(this(x, tree.constr), tree.parents), tree.derived), tree.self), tree.body) - case ParsedTry(expr, handler, finalizer) => - this(this(this(x, expr), handler), finalizer) - case SymbolLit(str) => - x - case InterpolatedString(id, segments) => - this(x, segments) - case Function(args, body) => - this(this(x, args), body) - case PolyFunction(targs, body) => - this(this(x, targs), body) - case InfixOp(left, op, right) => - this(this(this(x, left), op), right) - case PostfixOp(od, op) => - this(this(x, od), op) - case PrefixOp(op, od) => - this(this(x, op), od) - case Parens(t) => - this(x, t) - case Tuple(trees) => - this(x, trees) - case Throw(expr) => - this(x, expr) - case Quote(t) => - this(x, t) - case Splice(expr) => - this(x, expr) - case ForYield(enums, expr) => - this(this(x, enums), expr) - case ForDo(enums, body) => - this(this(x, enums), body) - case GenFrom(pat, expr, _) => - this(this(x, pat), expr) - case GenAlias(pat, expr) => - this(this(x, pat), expr) - case ContextBounds(bounds, cxBounds) => - this(this(x, bounds), cxBounds) - case PatDef(mods, pats, tpt, rhs) => - this(this(this(x, pats), tpt), rhs) - case ExtMethods(paramss, methods) => - this(paramss.foldLeft(x)(apply), methods) - case Into(tpt) => - this(x, tpt) - case ImportSelector(imported, renamed, bound) => - this(this(this(x, imported), renamed), bound) - case Number(_, _) => - x - case TypedSplice(splice) => - this(x, splice) - case MacroTree(expr) => - this(x, expr) - case CapturingTypeTree(refs, parent) => - this(this(x, refs), parent) - case _ => - super.foldMoreCases(x, tree) - } - } - - abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { - def traverse(tree: Tree)(using Context): Unit - def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) - protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } - - /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } - - /** Is there a subtree of this tree that satisfies predicate `p`? */ - extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new UntypedTreeAccumulator[Boolean] { - def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } - acc(false, tree) - } - - protected def FunProto(args: List[Tree], resType: Type)(using Context) = - ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala b/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala deleted file mode 100644 index 56b3f5ba5047..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/BoxedTypeCache.scala +++ /dev/null @@ -1,19 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.* - -/** A one-element cache for the boxed version of an unboxed capturing type */ -class BoxedTypeCache: - private var boxed: Type = compiletime.uninitialized - private var unboxed: Type = NoType - - def apply(tp: AnnotatedType)(using Context): Type = - if tp ne unboxed then - unboxed = tp - val CapturingType(parent, refs) = tp: @unchecked - boxed = CapturingType(parent, refs, boxed = true) - boxed -end BoxedTypeCache \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala deleted file mode 100644 index 67222f07efbb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureAnnotation.scala +++ /dev/null @@ -1,77 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Annotations.* -import ast.Trees.* -import ast.{tpd, untpd} -import Decorators.* -import config.Printers.capt -import printing.Printer -import printing.Texts.Text -import annotation.retains - -/** An annotation representing a capture set and whether it is boxed. - * It simulates a normal @retains annotation except that it is more efficient, - * supports variables as capture sets, and adds a `boxed` flag. - * These annotations are created during capture checking. Before that - * there are only regular @retains and @retainsByName annotations. - * @param refs the capture set - * @param boxed whether the type carrying the annotation is boxed - * @param cls the underlying class (either annotation.retains or annotation.retainsByName) - */ -case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) extends Annotation: - import CaptureAnnotation.* - import tpd.* - - /** A cache for boxed version of a capturing type with this annotation */ - val boxedType = BoxedTypeCache() - - /** Reconstitute annotation tree from capture set */ - override def tree(using Context) = - val elems = refs.elems.toList.map { - case cr: TermRef => ref(cr) - case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) - case cr: ThisType => This(cr.cls) - } - val arg = repeated(elems, TypeTree(defn.AnyType)) - New(symbol.typeRef, arg :: Nil) - - override def symbol(using Context) = cls - - override def derivedAnnotation(tree: Tree)(using Context): Annotation = this - - def derivedAnnotation(refs: CaptureSet, boxed: Boolean)(using Context): Annotation = - if (this.refs eq refs) && (this.boxed == boxed) then this - else CaptureAnnotation(refs, boxed)(cls) - - override def sameAnnotation(that: Annotation)(using Context): Boolean = that match - case CaptureAnnotation(refs, boxed) => - this.refs == refs && this.boxed == boxed && this.symbol == that.symbol - case _ => false - - override def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = - val elems = refs.elems.toList - val elems1 = elems.mapConserve(tm) - if elems1 eq elems then this - else if elems1.forall(_.isInstanceOf[CaptureRef]) - then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) - else EmptyAnnotation - - override def refersToParamOf(tl: TermLambda)(using Context): Boolean = - refs.elems.exists { - case TermParamRef(tl1, _) => tl eq tl1 - case _ => false - } - - override def toText(printer: Printer): Text = refs.toText(printer) - - override def hash: Int = - (refs.hashCode << 1) | (if boxed then 1 else 0) - - override def eql(that: Annotation) = that match - case that: CaptureAnnotation => (this.refs eq that.refs) && (this.boxed == that.boxed) - case _ => false - -end CaptureAnnotation diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala deleted file mode 100644 index 0ede1825e611..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureOps.scala +++ /dev/null @@ -1,256 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* -import ast.{tpd, untpd} -import Decorators.*, NameOps.* -import config.Printers.capt -import util.Property.Key -import tpd.* -import config.Feature - -private val Captures: Key[CaptureSet] = Key() -private val BoxedType: Key[BoxedTypeCache] = Key() - -/** The arguments of a @retains or @retainsByName annotation */ -private[cc] def retainedElems(tree: Tree)(using Context): List[Tree] = tree match - case Apply(_, Typed(SeqLiteral(elems, _), _) :: Nil) => elems - case _ => Nil - -/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ -class IllegalCaptureRef(tpe: Type) extends Exception - -extension (tree: Tree) - - /** Map tree with CaptureRef type to its type, throw IllegalCaptureRef otherwise */ - def toCaptureRef(using Context): CaptureRef = tree.tpe match - case ref: CaptureRef => ref - case tpe => throw IllegalCaptureRef(tpe) - - /** Convert a @retains or @retainsByName annotation tree to the capture set it represents. - * For efficience, the result is cached as an Attachment on the tree. - */ - def toCaptureSet(using Context): CaptureSet = - tree.getAttachment(Captures) match - case Some(refs) => refs - case None => - val refs = CaptureSet(retainedElems(tree).map(_.toCaptureRef)*) - .showing(i"toCaptureSet $tree --> $result", capt) - tree.putAttachment(Captures, refs) - refs - - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Tree = - if Feature.pureFunsEnabledSomewhere then - val rbn = defn.RetainsByNameAnnot - Annotated(tree, - New(rbn.typeRef).select(rbn.primaryConstructor).appliedTo( - Typed( - SeqLiteral(ref(defn.captureRoot) :: Nil, TypeTree(defn.AnyType)), - TypeTree(defn.RepeatedParamType.appliedTo(defn.AnyType)) - ) - ) - ) - else tree - -extension (tp: Type) - - /** @pre `tp` is a CapturingType */ - def derivedCapturingType(parent: Type, refs: CaptureSet)(using Context): Type = tp match - case tp @ CapturingType(p, r) => - if (parent eq p) && (refs eq r) then tp - else CapturingType(parent, refs, tp.isBoxed) - - /** If this is a unboxed capturing type with nonempty capture set, its boxed version. - * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. - * The identity for all other types. - */ - def boxed(using Context): Type = tp.dealias match - case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => - tp.annot match - case ann: CaptureAnnotation => - ann.boxedType(tp) - case ann => - ann.tree.getAttachment(BoxedType) match - case None => ann.tree.putAttachment(BoxedType, BoxedTypeCache()) - case _ => - ann.tree.attachment(BoxedType)(tp) - case tp: RealTypeBounds => - tp.derivedTypeBounds(tp.lo.boxed, tp.hi.boxed) - case _ => - tp - - /** If `sym` is a type parameter, the boxed version of `tp`, otherwise `tp` */ - def boxedIfTypeParam(sym: Symbol)(using Context) = - if sym.is(TypeParam) then tp.boxed else tp - - /** The boxed version of `tp`, unless `tycon` is a function symbol */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionSymbol(tycon.typeSymbol) - then tp - else tp.boxed - - /** The capture set consisting of all top-level captures of `tp` that appear under a box. - * Unlike for `boxed` this also considers parents of capture types, unions and - * intersections, and type proxies other than abstract types. - */ - def boxedCaptureSet(using Context): CaptureSet = - def getBoxed(tp: Type): CaptureSet = tp match - case tp @ CapturingType(parent, refs) => - val pcs = getBoxed(parent) - if tp.isBoxed then refs ++ pcs else pcs - case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty - case tp: TypeProxy => getBoxed(tp.superType) - case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) - case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) - case _ => CaptureSet.empty - getBoxed(tp) - - /** Is the boxedCaptureSet of this type nonempty? */ - def isBoxedCapturing(using Context) = !tp.boxedCaptureSet.isAlwaysEmpty - - /** If this type is a capturing type, the version with boxed statues as given by `boxed`. - * If it is a TermRef of a capturing type, and the box status flips, widen to a capturing - * type that captures the TermRef. - */ - def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match - case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => - val refs1 = tp match - case ref: CaptureRef if ref.isTracked => ref.singletonCaptureSet - case _ => refs - CapturingType(parent, refs1, boxed) - case _ => - tp - - /** Map capturing type to their parents. Capturing types accessible - * via dealising are also stripped. - */ - def stripCapturing(using Context): Type = tp.dealiasKeepAnnots match - case CapturingType(parent, _) => - parent.stripCapturing - case atd @ AnnotatedType(parent, annot) => - atd.derivedAnnotatedType(parent.stripCapturing, annot) - case _ => - tp - - /** Under pureFunctions, map regular function type to impure function type - */ - def adaptFunctionTypeUnderPureFuns(using Context): Type = tp match - case AppliedType(fn, args) - if Feature.pureFunsEnabledSomewhere && defn.isFunctionClass(fn.typeSymbol) => - val fname = fn.typeSymbol.name - defn.FunctionType( - fname.functionArity, - isContextual = fname.isContextFunction, - isErased = fname.isErasedFunction, - isImpure = true).appliedTo(args) - case _ => - tp - - /** Under pureFunctions, add a @retainsByName(*)` annotation to the argument of - * a by name parameter type, turning the latter into an impure by name parameter type. - */ - def adaptByNameArgUnderPureFuns(using Context): Type = - if Feature.pureFunsEnabledSomewhere then - AnnotatedType(tp, - CaptureAnnotation(CaptureSet.universal, boxed = false)(defn.RetainsByNameAnnot)) - else - tp - - def isCapturingType(using Context): Boolean = - tp match - case CapturingType(_, _) => true - case _ => false - - /** Is type known to be always pure by its class structure, - * so that adding a capture set to it would not make sense? - */ - def isAlwaysPure(using Context): Boolean = tp.dealias match - case tp: (TypeRef | AppliedType) => - val sym = tp.typeSymbol - if sym.isClass then sym.isPureClass - else tp.superType.isAlwaysPure - case CapturingType(parent, refs) => - parent.isAlwaysPure || refs.isAlwaysEmpty - case tp: TypeProxy => - tp.superType.isAlwaysPure - case tp: AndType => - tp.tp1.isAlwaysPure || tp.tp2.isAlwaysPure - case tp: OrType => - tp.tp1.isAlwaysPure && tp.tp2.isAlwaysPure - case _ => - false - -extension (cls: ClassSymbol) - - def pureBaseClass(using Context): Option[Symbol] = - cls.baseClasses.find(bc => - defn.pureBaseClasses.contains(bc) - || { - val selfType = bc.givenSelfType - selfType.exists && selfType.captureSet.isAlwaysEmpty - }) - -extension (sym: Symbol) - - /** A class is pure if: - * - one its base types has an explicitly declared self type with an empty capture set - * - or it is a value class - * - or it is an exception - * - or it is one of Nothing, Null, or String - */ - def isPureClass(using Context): Boolean = sym match - case cls: ClassSymbol => - cls.pureBaseClass.isDefined || defn.pureSimpleClasses.contains(cls) - case _ => - false - - /** Does this symbol allow results carrying the universal capability? - * Currently this is true only for function type applies (since their - * results are unboxed) and `erasedValue` since this function is magic in - * that is allows to conjure global capabilies from nothing (aside: can we find a - * more controlled way to achieve this?). - * But it could be generalized to other functions that so that they can take capability - * classes as arguments. - */ - def allowsRootCapture(using Context): Boolean = - sym == defn.Compiletime_erasedValue - || defn.isFunctionClass(sym.maybeOwner) - - /** When applying `sym`, would the result type be unboxed? - * This is the case if the result type contains a top-level reference to an enclosing - * class or method type parameter and the method does not allow root capture. - * If the type parameter is instantiated to a boxed type, that type would - * have to be unboxed in the method's result. - */ - def unboxesResult(using Context): Boolean = - def containsEnclTypeParam(tp: Type): Boolean = tp.strippedDealias match - case tp @ TypeRef(pre: ThisType, _) => tp.symbol.is(Param) - case tp: TypeParamRef => true - case tp: AndOrType => containsEnclTypeParam(tp.tp1) || containsEnclTypeParam(tp.tp2) - case tp: RefinedType => containsEnclTypeParam(tp.parent) || containsEnclTypeParam(tp.refinedInfo) - case _ => false - containsEnclTypeParam(sym.info.finalResultType) - && !sym.allowsRootCapture - && sym != defn.Caps_unsafeBox - && sym != defn.Caps_unsafeUnbox - -extension (tp: AnnotatedType) - /** Is this a boxed capturing type? */ - def isBoxed(using Context): Boolean = tp.annot match - case ann: CaptureAnnotation => ann.boxed - case _ => false - -extension (ts: List[Type]) - /** Equivalent to ts.mapconserve(_.boxedUnlessFun(tycon)) but more efficient where - * it is the identity. - */ - def boxedUnlessFun(tycon: Type)(using Context) = - if ctx.phase != Phases.checkCapturesPhase || defn.isFunctionClass(tycon.typeSymbol) - then ts - else ts.mapconserve(_.boxed) - diff --git a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala b/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala deleted file mode 100644 index 2072b43089fb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CaptureSet.scala +++ /dev/null @@ -1,902 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Flags.*, Contexts.*, Decorators.* -import config.Printers.capt -import Annotations.Annotation -import annotation.threadUnsafe -import annotation.constructorOnly -import annotation.internal.sharable -import reporting.trace -import printing.{Showable, Printer} -import printing.Texts.* -import util.{SimpleIdentitySet, Property} -import util.common.alwaysTrue -import scala.collection.mutable -import config.Config.ccAllowUnsoundMaps -import language.experimental.pureFunctions -import annotation.retains - -/** A class for capture sets. Capture sets can be constants or variables. - * Capture sets support inclusion constraints <:< where <:< is subcapturing. - * - * They also allow - * - mapping with functions from elements to capture sets - * - filtering with predicates on elements - * - intersecting wo capture sets - * - * That is, constraints can be of the forms - * - * cs1 <:< cs2 - * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. - * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references - * cs1 = cs2 ∩ cs2 - * - * We call the resulting constraint system "monadic set constraints". - * To support capture propagation across maps, mappings are supported only - * if the mapped function is either a bijection or if it is idempotent - * on capture references (c.f. doc comment on `map` below). - */ -sealed abstract class CaptureSet extends Showable, Pure: - import CaptureSet.* - - /** The elements of this capture set. For capture variables, - * the elements known so far. - */ - def elems: Refs - - /** Is this capture set constant (i.e. not an unsolved capture variable)? - * Solved capture variables count as constant. - */ - def isConst: Boolean - - /** Is this capture set always empty? For unsolved capture veriables, returns - * always false. - */ - def isAlwaysEmpty: Boolean - - /** Is this capture set definitely non-empty? */ - final def isNotEmpty: Boolean = !elems.isEmpty - - /** Convert to Const. @pre: isConst */ - def asConst: Const = this match - case c: Const => c - case v: Var => - assert(v.isConst) - Const(v.elems) - - /** Cast to variable. @pre: !isConst */ - def asVar: Var = - assert(!isConst) - asInstanceOf[Var] - - /** Does this capture set contain the root reference `*` as element? */ - final def isUniversal(using Context) = - elems.exists { - case ref: TermRef => ref.symbol == defn.captureRoot - case _ => false - } - - /** Add new elements to this capture set if allowed. - * @pre `newElems` is not empty and does not overlap with `this.elems`. - * Constant capture sets never allow to add new elements. - * Variables allow it if and only if the new elements can be included - * in all their dependent sets. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if elements were added, or a conflicting - * capture set that prevents addition otherwise. - */ - protected def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult - - /** If this is a variable, add `cs` as a dependent set */ - protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult - - /** If `cs` is a variable, add this capture set as one of its dependent sets */ - protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = - cs.addDependent(this)(using ctx, UnrecordedState) - this - - /** Try to include all references of `elems` that are not yet accounted for by this - * capture set. Inclusion is via `addNewElems`. - * @param origin The set where the elements come from, or `empty` if not known. - * @return CompareResult.OK if all unaccounted elements could be added, - * capture set that prevents addition otherwise. - */ - protected final def tryInclude(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val unaccounted = elems.filter(!accountsFor(_)) - if unaccounted.isEmpty then CompareResult.OK - else addNewElems(unaccounted, origin) - - /** Equivalent to `tryInclude({elem}, origin)`, but more efficient */ - protected final def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - if accountsFor(elem) then CompareResult.OK - else addNewElems(elem.singletonCaptureSet.elems, origin) - - /* x subsumes y if x is the same as y, or x is a this reference and y refers to a field of x */ - extension (x: CaptureRef) private def subsumes(y: CaptureRef) = - (x eq y) - || y.match - case y: TermRef => y.prefix eq x - case _ => false - - /** {x} <:< this where <:< is subcapturing, but treating all variables - * as frozen. - */ - def accountsFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(_.subsumes(x)) - || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - } - - /** A more optimistic version of accountsFor, which does not take variable supersets - * of the `x` reference into account. A set might account for `x` if it accounts - * for `x` in a state where we assume all supersets of `x` have just the elements - * known at this point. On the other hand if x's capture set has no known elements, - * a set `cs` might account for `x` only if it subsumes `x` or it contains the - * root capability `*`. - */ - def mightAccountFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) - || !x.isRootCapability - && { - val elems = x.captureSetOfInfo.elems - !elems.isEmpty && elems.forall(mightAccountFor) - } - } - - /** A more optimistic version of subCaptures used to choose one of two typing rules - * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for - * every element currently known to be in `cs1`. - */ - def mightSubcapture(that: CaptureSet)(using Context): Boolean = - elems.forall(that.mightAccountFor) - - /** The subcapturing test. - * @param frozen if true, no new variables or dependent sets are allowed to - * be added when making this test. An attempt to add either - * will result in failure. - */ - final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = - subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) - - /** The subcapturing test, using a given VarState */ - private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = - def recur(elems: List[CaptureRef]): CompareResult = elems match - case elem :: elems1 => - var result = that.tryInclude(elem, this) - if !result.isOK && !elem.isRootCapability && summon[VarState] != FrozenState then - result = elem.captureSetOfInfo.subCaptures(that) - if result.isOK then - recur(elems1) - else - varState.rollBack() - result - case Nil => - addDependent(that) - recur(elems.toList) - .showing(i"subcaptures $this <:< $that = $result", capt)(using null) - - /** Two capture sets are considered =:= equal if they mutually subcapture each other - * in a frozen state. - */ - def =:= (that: CaptureSet)(using Context): Boolean = - this.subCaptures(that, frozen = true).isOK - && that.subCaptures(this, frozen = true).isOK - - /** The smallest capture set (via <:<) that is a superset of both - * `this` and `that` - */ - def ++ (that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then that - else if that.subCaptures(this, frozen = true).isOK then this - else if this.isConst && that.isConst then Const(this.elems ++ that.elems) - else Var(this.elems ++ that.elems).addAsDependentTo(this).addAsDependentTo(that) - - /** The smallest superset (via <:<) of this capture set that also contains `ref`. - */ - def + (ref: CaptureRef)(using Context): CaptureSet = - this ++ ref.singletonCaptureSet - - /** The largest capture set (via <:<) that is a subset of both `this` and `that` - */ - def **(that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then this - else if that.subCaptures(this, frozen = true).isOK then that - else if this.isConst && that.isConst then Const(elemIntersection(this, that)) - else Intersected(this, that) - - /** The largest subset (via <:<) of this capture set that does not account for - * any of the elements in the constant capture set `that` - */ - def -- (that: CaptureSet.Const)(using Context): CaptureSet = - val elems1 = elems.filter(!that.accountsFor(_)) - if elems1.size == elems.size then this - else if this.isConst then Const(elems1) - else Diff(asVar, that) - - /** The largest subset (via <:<) of this capture set that does not account for `ref` */ - def - (ref: CaptureRef)(using Context): CaptureSet = - this -- ref.singletonCaptureSet - - /** The largest subset (via <:<) of this capture set that only contains elements - * for which `p` is true. - */ - def filter(p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using Context): CaptureSet = - if this.isConst then - val elems1 = elems.filter(p) - if elems1 == elems then this - else Const(elems.filter(p)) - else Filtered(asVar, p) - - /** Capture set obtained by applying `tm` to all elements of the current capture set - * and joining the results. If the current capture set is a variable, the same - * transformation is applied to all future additions of new elements. - * - * Note: We have a problem how we handle the situation where we have a mapped set - * - * cs2 = tm(cs1) - * - * and then the propagation solver adds a new element `x` to `cs2`. What do we - * know in this case about `cs1`? We can answer this question in a sound way only - * if `tm` is a bijection on capture references or it is idempotent on capture references. - * (see definition in IdempotentCapRefMap). - * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent - * one possible solution is that `x` is in `cs1`, which is what we assume in this case. - * That strategy is sound but not complete. - * - * If `tm` is some other map, we don't know how to handle this case. For now, - * we simply refuse to handle other maps. If they do need to be handled, - * `OtherMapped` provides some approximation to a solution, but it is neither - * sound nor complete. - */ - def map(tm: TypeMap)(using Context): CaptureSet = tm match - case tm: BiTypeMap => - val mappedElems = elems.map(tm.forward) - if isConst then - if mappedElems == elems then this - else Const(mappedElems) - else BiMapped(asVar, tm, mappedElems) - case tm: IdentityCaptRefMap => - this - case _ => - val mapped = mapRefs(elems, tm, tm.variance) - if isConst then - if mapped.isConst && mapped.elems == elems then this - else mapped - else Mapped(asVar, tm, tm.variance, mapped) - - /** A mapping resulting from substituting parameters of a BindingType to a list of types */ - def substParams(tl: BindingType, to: List[Type])(using Context) = - map(Substituters.SubstParamsMap(tl, to).detach) - - /** Invoke handler if this set has (or later aquires) the root capability `*` */ - def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = - if isUniversal then handler() - this - - /** An upper approximation of this capture set, i.e. a constant set that is - * subcaptured by this set. If the current set is a variable - * it is the intersection of all upper approximations of known supersets - * of the variable. - * The upper approximation is meaningful only if it is constant. If not, - * `upperApprox` can return an arbitrary capture set variable. - * `upperApprox` is used in `solve`. - */ - protected def upperApprox(origin: CaptureSet)(using Context): CaptureSet - - /** Assuming set this set dependds on was just solved to be constant, propagate this info - * to this set. This might result in the set being solved to be constant - * itself. - */ - protected def propagateSolved()(using Context): Unit = () - - /** This capture set with a description that tells where it comes from */ - def withDescription(description: String): CaptureSet - - /** The provided description (using `withDescription`) for this capture set or else "" */ - def description: String - - /** A regular @retains or @retainsByName annotation with the elements of this set as arguments. */ - def toRegularAnnotation(cls: Symbol)(using Context): Annotation = - Annotation(CaptureAnnotation(this, boxed = false)(cls).tree) - - override def toText(printer: Printer): Text = - Str("{") ~ Text(elems.toList.map(printer.toTextCaptureRef), ", ") ~ Str("}") ~~ description - -object CaptureSet: - type Refs = SimpleIdentitySet[CaptureRef] - type Vars = SimpleIdentitySet[Var] - type Deps = SimpleIdentitySet[CaptureSet] - - @sharable private var varId = 0 - - /** If set to `true`, capture stack traces that tell us where sets are created */ - private final val debugSets = false - - private val emptySet = SimpleIdentitySet.empty - - /** The empty capture set `{}` */ - val empty: CaptureSet.Const = Const(emptySet) - - /** The universal capture set `{*}` */ - def universal(using Context): CaptureSet = - defn.captureRoot.termRef.singletonCaptureSet - - /** Used as a recursion brake */ - @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) - - def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = - if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.normalizedRef)*)) - - def apply(elems: Refs)(using Context): CaptureSet.Const = - if elems.isEmpty then empty else Const(elems) - - /** The subclass of constant capture sets with given elements `elems` */ - class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: - def isConst = true - def isAlwaysEmpty = elems.isEmpty - - def addNewElems(elems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - CompareResult.fail(this) - - def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK - - def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this - - def withDescription(description: String): Const = Const(elems, description) - - override def toString = elems.toString - end Const - - /** The subclass of captureset variables with given initial elements */ - class Var(initialElems: Refs = emptySet) extends CaptureSet: - - /** A unique identification number for diagnostics */ - val id = - varId += 1 - varId - - /** A variable is solved if it is aproximated to a from-then-on constant set. */ - private var isSolved: Boolean = false - - /** The elements currently known to be in the set */ - var elems: Refs = initialElems - - /** The sets currently known to be dependent sets (i.e. new additions to this set - * are propagated to these dependent sets.) - */ - var deps: Deps = emptySet - - def isConst = isSolved - def isAlwaysEmpty = false - - /** A handler to be invoked if the root reference `*` is added to this set - * The handler is pure in the sense that it will only output diagnostics. - */ - var rootAddedHandler: () -> Context ?-> Unit = () => () - - var description: String = "" - - /** Record current elements in given VarState provided it does not yet - * contain an entry for this variable. - */ - private def recordElemsState()(using VarState): Boolean = - varState.getElems(this) match - case None => varState.putElems(this, elems) - case _ => true - - /** Record current dependent sets in given VarState provided it does not yet - * contain an entry for this variable. - */ - private[CaptureSet] def recordDepsState()(using VarState): Boolean = - varState.getDeps(this) match - case None => varState.putDeps(this, deps) - case _ => true - - /** Reset elements to what was recorded in `state` */ - def resetElems()(using state: VarState): Unit = - elems = state.elems(this) - - /** Reset dependent sets to what was recorded in `state` */ - def resetDeps()(using state: VarState): Unit = - deps = state.deps(this) - - def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - if !isConst && recordElemsState() then - elems ++= newElems - if isUniversal then rootAddedHandler() - // assert(id != 2 || elems.size != 2, this) - (CompareResult.OK /: deps) { (r, dep) => - r.andAlso(dep.tryInclude(newElems, this)) - } - else // fail if variable is solved or given VarState is frozen - CompareResult.fail(this) - - def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = - if (cs eq this) || cs.isUniversal || isConst then - CompareResult.OK - else if recordDepsState() then - deps += cs - CompareResult.OK - else - CompareResult.fail(this) - - override def disallowRootCapability(handler: () -> Context ?-> Unit)(using Context): this.type = - rootAddedHandler = handler - super.disallowRootCapability(handler) - - private var computingApprox = false - - /** Roughly: the intersection of all constant known supersets of this set. - * The aim is to find an as-good-as-possible constant set that is a superset - * of this set. The universal set {*} is a sound fallback. - */ - final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = - if computingApprox then universal - else if isConst then this - else - computingApprox = true - try computeApprox(origin).ensuring(_.isConst) - finally computingApprox = false - - /** The intersection of all upper approximations of dependent sets */ - protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } - - /** Widen the variable's elements to its upper approximation and - * mark it as constant from now on. This is used for contra-variant type variables - * in the results of defs and vals. - */ - def solve()(using Context): Unit = - if !isConst then - val approx = upperApprox(empty) - //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") - val newElems = approx.elems -- elems - if newElems.isEmpty || addNewElems(newElems, empty)(using ctx, VarState()).isOK then - markSolved() - - /** Mark set as solved and propagate this info to all dependent sets */ - def markSolved()(using Context): Unit = - isSolved = true - deps.foreach(_.propagateSolved()) - - def withDescription(description: String): this.type = - this.description = - if this.description.isEmpty then description - else s"${this.description} and $description" - this - - /** Used for diagnostics and debugging: A string that traces the creation - * history of a variable by following source links. Each variable on the - * path is characterized by the variable's id and the first letter of the - * variable's class name. The path ends in a plain variable with letter `V` that - * is not derived from some other variable. - */ - protected def ids(using Context): String = - val trail = this.match - case dv: DerivedVar => dv.source.ids - case _ => "" - s"$id${getClass.getSimpleName.nn.take(1)}$trail" - - /** Adds variables to the ShownVars context property if that exists, which - * establishes a record of all variables printed in an error message. - * Prints variables wih ids under -Ycc-debug. - */ - override def toText(printer: Printer): Text = inContext(printer.printerContext) { - for vars <- ctx.property(ShownVars) do vars += this - super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) - } - - override def toString = s"Var$id$elems" - end Var - - /** A variable that is derived from some other variable via a map or filter. */ - abstract class DerivedVar(initialElems: Refs)(using @constructorOnly ctx: Context) - extends Var(initialElems): - - // For debugging: A trace where a set was created. Note that logically it would make more - // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. - val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null - - /** The variable from which this variable is derived */ - def source: Var - - addAsDependentTo(source) - - override def propagateSolved()(using Context) = - if source.isConst && !isConst then markSolved() - end DerivedVar - - /** A variable that changes when `source` changes, where all additional new elements are mapped - * using ∪ { tm(x) | x <- source.elems }. - * @param source the original set that is mapped - * @param tm the type map, which is assumed to be idempotent on capture refs - * (except if ccUnsoundMaps is enabled) - * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated - * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) - * @param initial The initial mappings of source's elements at the point the Mapped set is created. - */ - class Mapped private[CaptureSet] - (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(initial.elems): - addAsDependentTo(initial) // initial mappings could change by propagation - - private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] - - assert(ccAllowUnsoundMaps || mapIsIdempotent, tm.getClass) - - private def whereCreated(using Context): String = - if stack == null then "" - else i""" - |Stack trace of variable creation:" - |${stack.mkString("\n")}""" - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq source then // elements have to be mapped - mapRefs(newElems, tm, variance) - else - // elements are added by subcapturing propagation with this Mapped set - // as superset; no mapping is necessary or allowed. - Const(newElems) - super.addNewElems(added.elems, origin) - .andAlso { - if added.isConst then CompareResult.OK - else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } - else CompareResult.fail(this) - } - .andAlso { - if (origin ne source) && (origin ne initial) && mapIsIdempotent then - // `tm` is idempotent, propagate back elems from image set. - // This is sound, since we know that for `r in newElems: tm(r) = r`, hence - // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. - // It's not necessarily the only possible solution, so the scheme is incomplete. - source.tryInclude(newElems, this) - else if !mapIsIdempotent && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) then - // The map is neither a BiTypeMap nor an idempotent type map. - // In that case there's no much we can do. - // The scheme then does not propagate added elements back to source and rejects adding - // elements from variable sources in contra- and non-variant positions. In essence, - // we approximate types resulting from such maps by returning a possible super type - // from the actual type. But this is neither sound nor complete. - report.warning(em"trying to add elems ${CaptureSet(newElems)} from unrecognized source $origin of mapped set $this$whereCreated") - CompareResult.fail(this) - else - CompareResult.OK - } - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if source eq origin then - // it's a mapping of origin, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - source.upperApprox(this).map(tm) - - override def propagateSolved()(using Context) = - if initial.isConst then super.propagateSolved() - - override def toString = s"Mapped$id($source, elems = $elems)" - end Mapped - - /** A mapping where the type map is required to be a bijection. - * Parameters as in Mapped. - */ - final class BiMapped private[CaptureSet] - (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) - extends DerivedVar(initialElems): - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - if origin eq source then - super.addNewElems(newElems.map(bimap.forward), origin) - else - super.addNewElems(newElems, origin) - .andAlso { - source.tryInclude(newElems.map(bimap.backward), this) - .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt)(using null) - } - - /** For a BiTypeMap, supertypes of the mapped type also constrain - * the source via the inverse type mapping and vice versa. That is, if - * B = f(A) and B <: C, then A <: f^-1(C), so C should flow into - * the upper approximation of A. - * Conversely if A <: C2, then we also know that B <: f(C2). - * These situations are modeled by the two branches of the conditional below. - */ - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - val supApprox = super.computeApprox(this) - if source eq origin then supApprox.map(bimap.inverseTypeMap.detach) - else source.upperApprox(this).map(bimap) ** supApprox - - override def toString = s"BiMapped$id($source, elems = $elems)" - end BiMapped - - /** A variable with elements given at any time as { x <- source.elems | p(x) } */ - class Filtered private[CaptureSet] - (val source: Var, p: (c: Context) ?-> (CaptureRef -> Boolean) @retains(c))(using @constructorOnly ctx: Context) - extends DerivedVar(source.elems.filter(p)): - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val filtered = newElems.filter(p) - if origin eq source then - super.addNewElems(filtered, origin) - else - // Filtered elements have to be back-propagated to source. - // Elements that don't satisfy `p` are not allowed. - super.addNewElems(newElems, origin) - .andAlso { - if filtered.size == newElems.size then source.tryInclude(newElems, this) - else CompareResult.fail(this) - } - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if source eq origin then - // it's a filter of origin, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - source.upperApprox(this).filter(p) - - override def toString = s"${getClass.getSimpleName}$id($source, elems = $elems)" - end Filtered - - /** A variable with elements given at any time as { x <- source.elems | !other.accountsFor(x) } */ - class Diff(source: Var, other: Const)(using @constructorOnly ctx: Context) - extends Filtered(source, !other.accountsFor(_)) - - class Intersected(cs1: CaptureSet, cs2: CaptureSet)(using @constructorOnly ctx: Context) - extends Var(elemIntersection(cs1, cs2)): - addAsDependentTo(cs1) - addAsDependentTo(cs2) - deps += cs1 - deps += cs2 - - override def addNewElems(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - val added = - if origin eq cs1 then newElems.filter(cs2.accountsFor) - else if origin eq cs2 then newElems.filter(cs1.accountsFor) - else newElems - // If origin is not cs1 or cs2, then newElems will be propagated to - // cs1, cs2 since they are in deps. - super.addNewElems(added, origin) - - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if (origin eq cs1) || (origin eq cs2) then - // it's a combination of origin with some other set, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) - - override def propagateSolved()(using Context) = - if cs1.isConst && cs2.isConst && !isConst then markSolved() - end Intersected - - def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = - cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) - - /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). - * - If r1 is a tracked CaptureRef, return {r1} - * - If r1 has an empty capture set, return {} - * - Otherwise, - * - if the variance is covariant, return r1's capture set - * - if the variance is contravariant, return {} - * - Otherwise assertion failure - */ - def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = - val r1 = tm(r) - val upper = r1.captureSet - def isExact = - upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) - if variance > 0 || isExact then upper - else if variance < 0 then CaptureSet.empty - else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") - - /** Apply `f` to each element in `xs`, and join result sets with `++` */ - def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = - ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) - - /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ - def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = - mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) - - /** Return true iff - * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. - * - arg2 is a capturing type CA U - * - CH <: CA <: CL - * In other words, we can unify CL, CH and CA. - */ - def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match - case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => - given VarState = VarState() - val cs2 = arg2.captureSet - hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK - case _ => - false - - /** A TypeMap with the property that every capture reference in the image - * of the map is mapped to itself. I.e. for all capture references r1, r2, - * if M(r1) == r2 then M(r2) == r2. - */ - trait IdempotentCaptRefMap extends TypeMap - - /** A TypeMap that is the identity on capture references */ - trait IdentityCaptRefMap extends TypeMap - - type CompareResult = CompareResult.TYPE - - /** The result of subcapturing comparisons is an opaque type CompareResult.TYPE. - * This is either OK, indicating success, or - * another capture set, indicating failure. The failure capture set - * is the one that did not allow propagaton of elements into it. - */ - object CompareResult: - opaque type TYPE = CaptureSet - val OK: TYPE = Const(emptySet) - def fail(cs: CaptureSet): TYPE = cs - - extension (result: TYPE) - /** The result is OK */ - def isOK: Boolean = result eq OK - /** If not isOK, the blocking capture set */ - def blocking: CaptureSet = result - inline def andAlso(op: Context ?=> TYPE)(using Context): TYPE = if result.isOK then op else result - def show(using Context): String = if result.isOK then "OK" else i"$result" - end CompareResult - - /** A VarState serves as a snapshot mechanism that can undo - * additions of elements or super sets if an operation fails - */ - class VarState: - - /** A map from captureset variables to their elements at the time of the snapshot. */ - private val elemsMap: util.EqHashMap[Var, Refs] = new util.EqHashMap - - /** A map from captureset variables to their dependent sets at the time of the snapshot. */ - private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap - - /** The recorded elements of `v` (it's required that a recording was made) */ - def elems(v: Var): Refs = elemsMap(v) - - /** Optionally the recorded elements of `v`, None if nothing was recorded for `v` */ - def getElems(v: Var): Option[Refs] = elemsMap.get(v) - - /** Record elements, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. - */ - def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } - - /** The recorded dependent sets of `v` (it's required that a recording was made) */ - def deps(v: Var): Deps = depsMap(v) - - /** Optionally the recorded dependent sets of `v`, None if nothing was recorded for `v` */ - def getDeps(v: Var): Option[Deps] = depsMap.get(v) - - /** Record dependent sets, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. - */ - def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } - - /** Roll back global state to what was recorded in this VarState */ - def rollBack(): Unit = - elemsMap.keysIterator.foreach(_.resetElems()(using this)) - depsMap.keysIterator.foreach(_.resetDeps()(using this)) - end VarState - - /** A special state that does not allow to record elements or dependent sets. - * In effect this means that no new elements or dependent sets can be added - * in this state (since the previous state cannot be recorded in a snapshot) - */ - @sharable - object FrozenState extends VarState: - override def putElems(v: Var, refs: Refs) = false - override def putDeps(v: Var, deps: Deps) = false - override def rollBack(): Unit = () - - @sharable - /** A special state that turns off recording of elements. Used only - * in `addSub` to prevent cycles in recordings. - */ - private object UnrecordedState extends VarState: - override def putElems(v: Var, refs: Refs) = true - override def putDeps(v: Var, deps: Deps) = true - override def rollBack(): Unit = () - - /** The current VarState, as passed by the implicit context */ - def varState(using state: VarState): VarState = state - - /* Not needed: - def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = - CaptureSet.empty - def captureSetOf(tp: Type): CaptureSet = tp match - case tp: TypeRef if tp.symbol.is(ParamAccessor) => - def mapArg(accs: List[Symbol], tps: List[Type]): CaptureSet = accs match - case acc :: accs1 if tps.nonEmpty => - if acc == tp.symbol then tps.head.captureSet - else mapArg(accs1, tps.tail) - case _ => - empty - mapArg(cinfo.cls.paramAccessors, argTypes) - case _ => - tp.captureSet - val css = - for - parent <- cinfo.parents if parent.classSymbol == defn.RetainingClass - arg <- parent.argInfos - yield captureSetOf(arg) - css.foldLeft(empty)(_ ++ _) - */ - - /** The capture set of the type underlying a CaptureRef */ - def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ref: TermRef if ref.isRootCapability => ref.singletonCaptureSet - case _ => ofType(ref.underlying) - - /** Capture set of a type */ - def ofType(tp: Type)(using Context): CaptureSet = - def recur(tp: Type): CaptureSet = tp.dealias match - case tp: TermRef => - tp.captureSet - case tp: TermParamRef => - tp.captureSet - case _: TypeRef => - if tp.classSymbol.hasAnnotation(defn.CapabilityAnnot) then universal else empty - case _: TypeParamRef => - empty - case CapturingType(parent, refs) => - recur(parent) ++ refs - case AppliedType(tycon, args) => - val cs = recur(tycon) - tycon.typeParams match - case tparams @ (LambdaParam(tl, _) :: _) => cs.substParams(tl, args) - case _ => cs - case tp: TypeProxy => - recur(tp.underlying) - case AndType(tp1, tp2) => - recur(tp1) ** recur(tp2) - case OrType(tp1, tp2) => - recur(tp1) ++ recur(tp2) - case _ => - empty - recur(tp) - .showing(i"capture set of $tp = $result", capt) - - private val ShownVars: Property.Key[mutable.Set[Var]] = Property.Key() - - /** Perform `op`. Under -Ycc-debug, collect and print info about all variables reachable - * via `(_.deps)*` from the variables that were shown in `op`. - */ - def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = - if ctx.settings.YccDebug.value then - val shownVars = mutable.Set[Var]() - inContext(ctx.withProperty(ShownVars, Some(shownVars))) { - try op - finally - val reachable = mutable.Set[Var]() - val todo = mutable.Queue[Var]() ++= shownVars - def incl(cv: Var): Unit = - if !reachable.contains(cv) then todo += cv - while todo.nonEmpty do - val cv = todo.dequeue() - if !reachable.contains(cv) then - reachable += cv - cv.deps.foreach { - case cv: Var => incl(cv) - case _ => - } - cv match - case cv: DerivedVar => incl(cv.source) - case _ => - val allVars = reachable.toArray.sortBy(_.id) - println(i"Capture set dependencies:") - for cv <- allVars do - println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") - } - else op -end CaptureSet diff --git a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala b/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala deleted file mode 100644 index e9862f1f20b8..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CapturingType.scala +++ /dev/null @@ -1,72 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.* - -/** A (possibly boxed) capturing type. This is internally represented as an annotated type with a @retains - * or @retainsByName annotation, but the extractor will succeed only at phase CheckCaptures. - * That way, we can ignore caturing information until phase CheckCaptures since it is - * wrapped in a plain annotation. - * - * The same trick does not work for the boxing information. Boxing is context dependent, so - * we have to add that information in the Setup step preceding CheckCaptures. Boxes are - * added for all type arguments of methods. For type arguments of applied types a different - * strategy is used where we box arguments of applied types that are not functions when - * accessing the argument. - * - * An alternative strategy would add boxes also to arguments of applied types during setup. - * But this would have to be done for all possibly accessibly types from the compiled units - * as well as their dependencies. It's difficult to do this in a DenotationTransformer without - * accidentally forcing symbol infos. That's why this alternative was not implemented. - * If we would go back on this it would make sense to also treat captuyring types different - * from annotations and to generate them all during Setup and in DenotationTransformers. - */ -object CapturingType: - - /** Smart constructor that drops empty capture sets and fuses compatible capturiong types. - * An outer type capturing type A can be fused with an inner capturing type B if their - * boxing status is the same or if A is boxed. - */ - def apply(parent: Type, refs: CaptureSet, boxed: Boolean = false)(using Context): Type = - if refs.isAlwaysEmpty then parent - else parent match - case parent @ CapturingType(parent1, refs1) if boxed || !parent.isBoxed => - apply(parent1, refs ++ refs1, boxed) - case _ => - AnnotatedType(parent, CaptureAnnotation(refs, boxed)(defn.RetainsAnnot)) - - /** An extractor that succeeds only during CheckCapturingPhase. Boxing statis is - * returned separately by CaptureOps.isBoxed. - */ - def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - if ctx.phase == Phases.checkCapturesPhase - && tp.annot.symbol == defn.RetainsAnnot - && !ctx.mode.is(Mode.IgnoreCaptures) - then - EventuallyCapturingType.unapply(tp) - else None - -end CapturingType - -/** An extractor for types that will be capturing types at phase CheckCaptures. Also - * included are types that indicate captures on enclosing call-by-name parameters - * before phase ElimByName. - */ -object EventuallyCapturingType: - - def unapply(tp: AnnotatedType)(using Context): Option[(Type, CaptureSet)] = - val sym = tp.annot.symbol - if sym == defn.RetainsAnnot || sym == defn.RetainsByNameAnnot then - tp.annot match - case ann: CaptureAnnotation => - Some((tp.parent, ann.refs)) - case ann => - try Some((tp.parent, ann.tree.toCaptureSet)) - catch case ex: IllegalCaptureRef => None - else None - -end EventuallyCapturingType - - diff --git a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala b/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala deleted file mode 100644 index ce3f788202b6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/CheckCaptures.scala +++ /dev/null @@ -1,1039 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Phases.*, DenotTransformers.*, SymDenotations.* -import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.*, Denotations.* -import config.Printers.{capt, recheckr} -import config.{Config, Feature} -import ast.{tpd, untpd, Trees} -import Trees.* -import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents} -import typer.Checking.{checkBounds, checkAppliedTypesIn} -import util.{SimpleIdentitySet, EqHashMap, SrcPos} -import transform.SymUtils.* -import transform.{Recheck, PreRecheck} -import Recheck.* -import scala.collection.mutable -import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap} -import StdNames.nme -import NameKinds.DefaultGetterName -import reporting.trace -import language.experimental.pureFunctions - -/** The capture checker */ -object CheckCaptures: - import ast.tpd.* - - class Pre extends PreRecheck, SymTransformer: - - override def isEnabled(using Context) = true - - /** Reset `private` flags of parameter accessors so that we can refine them - * in Setup if they have non-empty capture sets. Special handling of some - * symbols defined for case classes. - */ - def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if sym.isAllOf(PrivateParamAccessor) && !sym.hasAnnotation(defn.ConstructorOnlyAnnot) then - sym.copySymDenotation(initFlags = sym.flags &~ Private | Recheck.ResetPrivate) - else if Synthetics.needsTransform(sym) then - Synthetics.transformToCC(sym) - else - sym - end Pre - - /** A class describing environments. - * @param owner the current owner - * @param nestedInOwner true if the environment is a temporary one nested in the owner's environment, - * and does not have a different actual owner symbol (this happens when doing box adaptation). - * @param captured the caputure set containing all references to tracked free variables outside of boxes - * @param isBoxed true if the environment is inside a box (in which case references are not counted) - * @param outer0 the next enclosing environment - */ - case class Env( - owner: Symbol, - nestedInOwner: Boolean, - captured: CaptureSet, - isBoxed: Boolean, - outer0: Env | Null - ): - def outer = outer0.nn - - def isOutermost = outer0 == null - - /** If an environment is open it tracks free references */ - def isOpen = !captured.isAlwaysEmpty && !isBoxed - end Env - - /** Similar normal substParams, but this is an approximating type map that - * maps parameters in contravariant capture sets to the empty set. - * TODO: check what happens with non-variant. - */ - final class SubstParamsMap(from: BindingType, to: List[Type])(using DetachedContext) - extends ApproximatingTypeMap, IdempotentCaptRefMap: - def apply(tp: Type): Type = tp match - case tp: ParamRef => - if tp.binder == from then to(tp.paramNum) else tp - case tp: NamedType => - if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) - - /** Check that a @retains annotation only mentions references that can be tracked. - * This check is performed at Typer. - */ - def checkWellformed(ann: Tree)(using Context): Unit = - for elem <- retainedElems(ann) do - elem.tpe match - case ref: CaptureRef => - if !ref.canBeTracked then - report.error(em"$elem cannot be tracked since it is not a parameter or local value", elem.srcPos) - case tpe => - report.error(em"$elem: $tpe is not a legal element of a capture set", elem.srcPos) - - /** If `tp` is a capturing type, check that all references it mentions have non-empty - * capture sets. Also: warn about redundant capture annotations. - * This check is performed after capture sets are computed in phase cc. - */ - def checkWellformedPost(tp: Type, pos: SrcPos)(using Context): Unit = tp match - case CapturingType(parent, refs) => - for ref <- refs.elems do - if ref.captureSetOfInfo.elems.isEmpty then - report.error(em"$ref cannot be tracked since its capture set is empty", pos) - else if parent.captureSet.accountsFor(ref) then - report.warning(em"redundant capture: $parent already accounts for $ref", pos) - case _ => - - /** Warn if `ann`, which is a tree of a @retains annotation, defines some elements that - * are already accounted for by other elements of the same annotation. - * Note: We need to perform the check on the original annotation rather than its - * capture set since the conversion to a capture set already eliminates redundant elements. - */ - def warnIfRedundantCaptureSet(ann: Tree)(using Context): Unit = - // The lists `elems(i) :: prev.reverse :: elems(0),...,elems(i-1),elems(i+1),elems(n)` - // where `n == elems.length-1`, i <- 0..n`. - // I.e. - // choices(Nil, elems) = [[elems(i), elems(0), ..., elems(i-1), elems(i+1), .... elems(n)] | i <- 0..n] - def choices(prev: List[Tree], elems: List[Tree]): List[List[Tree]] = elems match - case Nil => Nil - case elem :: elems => - List(elem :: (prev reverse_::: elems)) ++ choices(elem :: prev, elems) - for case first :: others <- choices(Nil, retainedElems(ann)) do - val firstRef = first.toCaptureRef - val remaining = CaptureSet(others.map(_.toCaptureRef)*) - if remaining.accountsFor(firstRef) then - report.warning(em"redundant capture: $remaining already accounts for $firstRef", ann.srcPos) - -class CheckCaptures extends Recheck, SymTransformer: - thisPhase => - - import ast.tpd.* - import CheckCaptures.* - - def phaseName: String = "cc" - override def isEnabled(using Context) = true - - def newRechecker()(using Context) = CaptureChecker(ctx.detach) - - override def run(using Context): Unit = - if Feature.ccEnabled then - checkOverrides.traverse(ctx.compilationUnit.tpdTree) - super.run - - override def transformSym(sym: SymDenotation)(using Context): SymDenotation = - if Synthetics.needsTransform(sym) then Synthetics.transformFromCC(sym) - else super.transformSym(sym) - - /** Check overrides again, taking capture sets into account. - * TODO: Can we avoid doing overrides checks twice? - * We need to do them here since only at this phase CaptureTypes are relevant - * But maybe we can then elide the check during the RefChecks phase under captureChecking? - */ - def checkOverrides = new TreeTraverser: - def traverse(t: Tree)(using Context) = - t match - case t: Template => checkAllOverrides(ctx.owner.asClass) - case _ => - traverseChildren(t) - - class CaptureChecker(ictx: DetachedContext) extends Rechecker(ictx): - import ast.tpd.* - - override def keepType(tree: Tree) = - super.keepType(tree) - || tree.isInstanceOf[Try] // type of `try` needs tp be checked for * escapes - - /** Instantiate capture set variables appearing contra-variantly to their - * upper approximation. - */ - private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: - variance = startingVariance - override def traverse(t: Type) = - t match - case CapturingType(parent, refs: CaptureSet.Var) => - if variance < 0 then - capt.println(i"solving $t") - refs.solve() - traverse(parent) - case t @ RefinedType(_, nme.apply, rinfo) if defn.isFunctionOrPolyType(t) => - traverse(rinfo) - case tp: TypeVar => - case tp: TypeRef => - traverse(tp.prefix) - case _ => - traverseChildren(t) - - /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- - * variantly in it. - */ - private def interpolateVarsIn(tpt: Tree)(using Context): Unit = - if tpt.isInstanceOf[InferredTypeTree] then - interpolator().traverse(tpt.knownType) - .showing(i"solved vars in ${tpt.knownType}", capt)(using null) - - /** Assert subcapturing `cs1 <: cs2` */ - def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = - assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") - - /** Check subcapturing `{elem} <: cs`, report error on failure */ - def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos)(using Context) = - val res = elem.singletonCaptureSet.subCaptures(cs, frozen = false) - if !res.isOK then - report.error(em"$elem cannot be referenced here; it is not included in the allowed capture set ${res.blocking}", pos) - - /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos)(using Context) = - val res = cs1.subCaptures(cs2, frozen = false) - if !res.isOK then - def header = - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList}%, % is not" - else i"references $cs1 are not all" - report.error(em"$header included in allowed capture set ${res.blocking}", pos) - - /** The current environment */ - private var curEnv: Env = Env(NoSymbol, nestedInOwner = false, CaptureSet.empty, isBoxed = false, null) - - private val myCapturedVars: util.EqHashMap[Symbol, CaptureSet] = EqHashMap() - - /** If `sym` is a class or method nested inside a term, a capture set variable representing - * the captured variables of the environment associated with `sym`. - */ - def capturedVars(sym: Symbol)(using Context) = - myCapturedVars.getOrElseUpdate(sym, - if sym.ownersIterator.exists(_.isTerm) then CaptureSet.Var() - else CaptureSet.empty) - - /** For all nested environments up to `limit` perform `op` */ - def forallOuterEnvsUpTo(limit: Symbol)(op: Env => Unit)(using Context): Unit = - def recur(env: Env): Unit = - if env.isOpen && env.owner != limit then - op(env) - if !env.isOutermost then - var nextEnv = env.outer - if env.owner.isConstructor then - if nextEnv.owner != limit && !nextEnv.isOutermost then - recur(nextEnv.outer) - else recur(nextEnv) - recur(curEnv) - - /** Include `sym` in the capture sets of all enclosing environments nested in the - * the environment in which `sym` is defined. - */ - def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists then - val ref = sym.termRef - if ref.isTracked then - forallOuterEnvsUpTo(sym.enclosure) { env => - capt.println(i"Mark $sym with cs ${ref.captureSet} free in ${env.owner}") - checkElem(ref, env.captured, pos) - } - - /** Make sure (projected) `cs` is a subset of the capture sets of all enclosing - * environments. At each stage, only include references from `cs` that are outside - * the environment's owner - */ - def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = - if !cs.isAlwaysEmpty then - forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => - val included = cs.filter { - case ref: TermRef => - (env.nestedInOwner || env.owner != ref.symbol.owner) - && env.owner.isContainedIn(ref.symbol.owner) - case ref: ThisType => - (env.nestedInOwner || env.owner != ref.cls) - && env.owner.isContainedIn(ref.cls) - case _ => false - } - capt.println(i"Include call capture $included in ${env.owner}") - checkSubset(included, env.captured, pos) - } - - /** Include references captured by the called method in the current environment stack */ - def includeCallCaptures(sym: Symbol, pos: SrcPos)(using Context): Unit = - if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) - - override def recheckIdent(tree: Ident)(using Context): Type = - if tree.symbol.is(Method) then includeCallCaptures(tree.symbol, tree.srcPos) - else markFree(tree.symbol, tree.srcPos) - super.recheckIdent(tree) - - /** A specialized implementation of the selection rule. - * - * E |- f: Cf f { m: Cr R } - * ------------------------ - * E |- f.m: C R - * - * The implementation picks as `C` one of `{f}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr - * and Cr otherwise. - */ - override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { - def disambiguate(denot: Denotation): Denotation = denot match - case MultiDenotation(denot1, denot2) => - // This case can arise when we try to merge multiple types that have different - // capture sets on some part. For instance an asSeenFrom might produce - // a bi-mapped capture set arising from a substition. Applying the same substitution - // to the same type twice will nevertheless produce different capture setsw which can - // lead to a failure in disambiguation since neither alternative is better than the - // other in a frozen constraint. An example test case is disambiguate-select.scala. - // We address the problem by disambiguating while ignoring all capture sets as a fallback. - withMode(Mode.IgnoreCaptures) { - disambiguate(denot1).meet(disambiguate(denot2), qualType) - } - case _ => denot - - val selType = recheckSelection(tree, qualType, name, disambiguate) - val selCs = selType.widen.captureSet - if selCs.isAlwaysEmpty || selType.widen.isBoxedCapturing || qualType.isBoxedCapturing then - selType - else - val qualCs = qualType.captureSet - capt.println(i"intersect $qualType, ${selType.widen}, $qualCs, $selCs in $tree") - if qualCs.mightSubcapture(selCs) - && !selCs.mightSubcapture(qualCs) - && !pt.stripCapturing.isInstanceOf[SingletonType] - then - selType.widen.stripCapturing.capturing(qualCs) - .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) - else - selType - }//.showing(i"recheck sel $tree, $qualType = $result") - - /** A specialized implementation of the apply rule. - * - * E |- f: Cf (Ra -> Cr Rr) - * E |- a: Ca Ra - * ------------------------ - * E |- f a: C Rr - * - * The implementation picks as `C` one of `{f, a}` or `Cr`, depending on the - * outcome of a `mightSubcapture` test. It picks `{f, a}` if this might subcapture Cr - * and Cr otherwise. - */ - override def recheckApply(tree: Apply, pt: Type)(using Context): Type = - val meth = tree.fun.symbol - includeCallCaptures(meth, tree.srcPos) - def mapArgUsing(f: Type => Type) = - val arg :: Nil = tree.args: @unchecked - val argType0 = f(recheckStart(arg, pt)) - val argType = super.recheckFinish(argType0, arg, pt) - super.recheckFinish(argType, tree, pt) - - if meth == defn.Caps_unsafeBox then - mapArgUsing(_.forceBoxStatus(true)) - else if meth == defn.Caps_unsafeUnbox then - mapArgUsing(_.forceBoxStatus(false)) - else if meth == defn.Caps_unsafeBoxFunArg then - mapArgUsing { - case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual, isErased) => - defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual, isErased) - } - else - super.recheckApply(tree, pt) match - case appType @ CapturingType(appType1, refs) => - tree.fun match - case Select(qual, _) - if !tree.fun.symbol.isConstructor - && !qual.tpe.isBoxedCapturing - && !tree.args.exists(_.tpe.isBoxedCapturing) - && qual.tpe.captureSet.mightSubcapture(refs) - && tree.args.forall(_.tpe.captureSet.mightSubcapture(refs)) - => - val callCaptures = tree.args.foldLeft(qual.tpe.captureSet)((cs, arg) => - cs ++ arg.tpe.captureSet) - appType.derivedCapturingType(appType1, callCaptures) - .showing(i"narrow $tree: $appType, refs = $refs, qual = ${qual.tpe.captureSet} --> $result", capt) - case _ => appType - case appType => appType - end recheckApply - - /** Handle an application of method `sym` with type `mt` to arguments of types `argTypes`. - * This means: - * - Instantiate result type with actual arguments - * - If call is to a constructor: - * - remember types of arguments corresponding to tracked - * parameters in refinements. - * - add capture set of instantiated class to capture set of result type. - */ - override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = - val ownType = - if mt.isResultDependent then SubstParamsMap(mt, argTypes)(mt.resType) - else mt.resType - - if sym.isConstructor then - val cls = sym.owner.asClass - - /** First half of result pair: - * Refine the type of a constructor call `new C(t_1, ..., t_n)` - * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked - * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. - * - * Second half: union of all capture sets of arguments to tracked parameters. - */ - def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = - mt.paramNames.lazyZip(argTypes).foldLeft((core, initCs)) { (acc, refine) => - val (core, allCaptures) = acc - val (getterName, argType) = refine - val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol - if getter.termRef.isTracked && !getter.is(Private) - then (RefinedType(core, getterName, argType), allCaptures ++ argType.captureSet) - else (core, allCaptures) - } - - def augmentConstructorType(core: Type, initCs: CaptureSet): Type = core match - case core: MethodType => - // more parameters to follow; augment result type - core.derivedLambdaType(resType = augmentConstructorType(core.resType, initCs)) - case CapturingType(parent, refs) => - // can happen for curried constructors if instantiate of a previous step - // added capture set to result. - augmentConstructorType(parent, initCs ++ refs) - case _ => - val (refined, cs) = addParamArgRefinements(core, initCs) - refined.capturing(cs) - - augmentConstructorType(ownType, CaptureSet.empty) match - case augmented: MethodType => - augmented - case augmented => - // add capture sets of class and constructor to final result of constructor call - augmented.capturing(capturedVars(cls) ++ capturedVars(sym)) - .showing(i"constr type $mt with $argTypes%, % in $cls = $result", capt) - else ownType - end instantiate - - override def recheckClosure(tree: Closure, pt: Type)(using Context): Type = - val cs = capturedVars(tree.meth.symbol) - capt.println(i"typing closure $tree with cvs $cs") - super.recheckClosure(tree, pt).capturing(cs) - .showing(i"rechecked $tree / $pt = $result", capt) - - /** Additionally to normal processing, update types of closures if the expected type - * is a function with only pure parameters. In that case, make the anonymous function - * also have the same parameters as the prototype. - * TODO: Develop a clearer rationale for this. - * TODO: Can we generalize this to arbitrary parameters? - * Currently some tests fail if we do this. (e.g. neg.../stackAlloc.scala, others) - */ - override def recheckBlock(block: Block, pt: Type)(using Context): Type = - block match - case closureDef(mdef) => - pt.dealias match - case defn.FunctionOf(ptformals, _, _, _) - if ptformals.nonEmpty && ptformals.forall(_.captureSet.isAlwaysEmpty) => - // Redo setup of the anonymous function so that formal parameters don't - // get capture sets. This is important to avoid false widenings to `*` - // when taking the base type of the actual closures's dependent function - // type so that it conforms to the expected non-dependent function type. - // See withLogFile.scala for a test case. - val meth = mdef.symbol - // First, undo the previous setup which installed a completer for `meth`. - atPhase(preRecheckPhase.prev)(meth.denot.copySymDenotation()) - .installAfter(preRecheckPhase) - - // Next, update all parameter symbols to match expected formals - meth.paramSymss.head.lazyZip(ptformals).foreach { (psym, pformal) => - psym.updateInfoBetween(preRecheckPhase, thisPhase, pformal.mapExprType) - } - // Next, update types of parameter ValDefs - mdef.paramss.head.lazyZip(ptformals).foreach { (param, pformal) => - val ValDef(_, tpt, _) = param: @unchecked - tpt.rememberTypeAlways(pformal) - } - // Next, install a new completer reflecting the new parameters for the anonymous method - val mt = meth.info.asInstanceOf[MethodType] - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = mt.companion(ptformals, mdef.tpt.knownType) - .showing(i"simplify info of $meth to $result", capt) - recheckDef(mdef, meth) - meth.updateInfoBetween(preRecheckPhase, thisPhase, completer) - case _ => - case _ => - super.recheckBlock(block, pt) - - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Unit = - try - if !sym.is(Module) then // Modules are checked by checking the module class - super.recheckValDef(tree, sym) - finally - if !sym.is(Param) then - // Parameters with inferred types belong to anonymous methods. We need to wait - // for more info from the context, so we cannot interpolate. Note that we cannot - // expect to have all necessary info available at the point where the anonymous - // function is compiled since we do not propagate expected types into blocks. - interpolateVarsIn(tree.tpt) - - override def recheckDefDef(tree: DefDef, sym: Symbol)(using Context): Unit = - if !Synthetics.isExcluded(sym) then - val saved = curEnv - val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then curEnv = Env(sym, nestedInOwner = false, localSet, isBoxed = false, curEnv) - try super.recheckDefDef(tree, sym) - finally - interpolateVarsIn(tree.tpt) - curEnv = saved - - /** Class-specific capture set relations: - * 1. The capture set of a class includes the capture sets of its parents. - * 2. The capture set of the self type of a class includes the capture set of the class. - * 3. The capture set of the self type of a class includes the capture set of every class parameter, - * unless the parameter is marked @constructorOnly. - */ - override def recheckClassDef(tree: TypeDef, impl: Template, cls: ClassSymbol)(using Context): Type = - val saved = curEnv - val localSet = capturedVars(cls) - for parent <- impl.parents do // (1) - checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos) - if !localSet.isAlwaysEmpty then curEnv = Env(cls, nestedInOwner = false, localSet, isBoxed = false, curEnv) - try - val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") - checkSubset(localSet, thisSet, tree.srcPos) // (2) - for param <- cls.paramGetters do - if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) - for pureBase <- cls.pureBaseClass do - checkSubset(thisSet, - CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) - super.recheckClassDef(tree, impl, cls) - finally - curEnv = saved - - /** If type is of the form `T @requiresCapability(x)`, - * mark `x` as free in the current environment. This is used to require the - * correct `CanThrow` capability when encountering a `throw`. - */ - override def recheckTyped(tree: Typed)(using Context): Type = - tree.tpt.tpe match - case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => - annot.tree match - case Apply(_, cap :: Nil) => - markFree(cap.symbol, tree.srcPos) - case _ => - case _ => - super.recheckTyped(tree) - - /* Currently not needed, since capture checking takes place after ElimByName. - * Keep around in case we need to get back to it - def recheckByNameArg(tree: Tree, pt: Type)(using Context): Type = - val closureDef(mdef) = tree: @unchecked - val arg = mdef.rhs - val localSet = CaptureSet.Var() - curEnv = Env(mdef.symbol, localSet, isBoxed = false, curEnv) - val result = - try - inContext(ctx.withOwner(mdef.symbol)) { - recheckStart(arg, pt).capturing(localSet) - } - finally curEnv = curEnv.outer - recheckFinish(result, arg, pt) - */ - - /** If expected type `pt` is boxed and the tree is a function or a reference, - * don't propagate free variables. - * Otherwise, if the result type is boxed, simulate an unboxing by - * adding all references in the boxed capture set to the current environment. - */ - override def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = - if tree.isTerm && pt.isBoxedCapturing then - val saved = curEnv - - tree match - case _: RefTree | closureDef(_) => - curEnv = Env(curEnv.owner, nestedInOwner = false, CaptureSet.Var(), isBoxed = true, curEnv) - case _ => - - try super.recheck(tree, pt) - finally curEnv = saved - else - val res = super.recheck(tree, pt) - if tree.isTerm then markFree(res.boxedCaptureSet, tree.srcPos) - res - - /** If `tree` is a reference or an application where the result type refers - * to an enclosing class or method parameter of the reference, check that the result type - * does not capture the universal capability. This is justified since the - * result type would have to be implicitly unboxed. - * TODO: Can we find a cleaner way to achieve this? Logically, this should be part - * of simulated boxing and unboxing. - */ - override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = - val typeToCheck = tree match - case _: Ident | _: Select | _: Apply | _: TypeApply if tree.symbol.unboxesResult => - tpe - case _: Try => - tpe - case _ => - NoType - def checkNotUniversal(tp: Type): Unit = tp.widenDealias match - case wtp @ CapturingType(parent, refs) => - refs.disallowRootCapability { () => - val kind = if tree.isInstanceOf[ValDef] then "mutable variable" else "expression" - report.error( - em"""The $kind's type $wtp is not allowed to capture the root capability `*`. - |This usually means that a capability persists longer than its allowed lifetime.""", - tree.srcPos) - } - checkNotUniversal(parent) - case _ => - checkNotUniversal(typeToCheck) - super.recheckFinish(tpe, tree, pt) - - /** Massage `actual` and `expected` types using the methods below before checking conformance */ - override def checkConformsExpr(actual: Type, expected: Type, tree: Tree)(using Context): Unit = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual), actual.stripCapturing) - val actual1 = adaptBoxed(actual, expected1, tree.srcPos) - //println(i"check conforms $actual1 <<< $expected1") - super.checkConformsExpr(actual1, expected1, tree) - - private def toDepFun(args: List[Type], resultType: Type, isContextual: Boolean, isErased: Boolean)(using Context): Type = - MethodType.companion(isContextual = isContextual, isErased = isErased)(args, resultType) - .toFunctionType(isJava = false, alwaysDependent = true) - - /** Turn `expected` into a dependent function when `actual` is dependent. */ - private def alignDependentFunction(expected: Type, actual: Type)(using Context): Type = - def recur(expected: Type): Type = expected.dealias match - case expected @ CapturingType(eparent, refs) => - CapturingType(recur(eparent), refs, boxed = expected.isBoxed) - case expected @ defn.FunctionOf(args, resultType, isContextual, isErased) - if defn.isNonRefinedFunction(expected) && defn.isFunctionType(actual) && !defn.isNonRefinedFunction(actual) => - val expected1 = toDepFun(args, resultType, isContextual, isErased) - expected1 - case _ => - expected - recur(expected) - - /** For the expected type, implement the rule outlined in #14390: - * - when checking an expression `a: Ca Ta` against an expected type `Ce Te`, - * - where the capture set `Ce` contains Cls.this, - * - and where and all method definitions enclosing `a` inside class `Cls` - * have only pure parameters, - * - add to `Ce` all references to variables or this-references in `Ca` - * that are outside `Cls`. These are all accessed through `Cls.this`, - * so we can assume they are already accounted for by `Ce` and adding - * them explicitly to `Ce` changes nothing. - */ - private def addOuterRefs(expected: Type, actual: Type)(using Context): Type = - def isPure(info: Type): Boolean = info match - case info: PolyType => isPure(info.resType) - case info: MethodType => info.paramInfos.forall(_.captureSet.isAlwaysEmpty) && isPure(info.resType) - case _ => true - def isPureContext(owner: Symbol, limit: Symbol): Boolean = - if owner == limit then true - else if !owner.exists then false - else isPure(owner.info) && isPureContext(owner.owner, limit) - def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = - (erefs /: erefs.elems) { (erefs, eref) => - eref match - case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - erefs ++ arefs.filter { - case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) - case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) - case _ => false - } - case _ => - erefs - } - expected match - case CapturingType(ecore, erefs) => - val erefs1 = augment(erefs, actual.captureSet) - if erefs1 ne erefs then - capt.println(i"augmented $expected from ${actual.captureSet} --> $erefs1") - expected.derivedCapturingType(ecore, erefs1) - case _ => - expected - - /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos)(using Context): Type = - - /** Adapt function type `actual`, which is `aargs -> ares` (possibly with dependencies) - * to `expected` type. - * It returns the adapted type along with the additionally captured variable - * during adaptation. - * @param reconstruct how to rebuild the adapted function type - */ - def adaptFun(actual: Type, aargs: List[Type], ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: (List[Type], Type) => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try - val (eargs, eres) = expected.dealias.stripCapturing match - case defn.FunctionOf(eargs, eres, _, _) => (eargs, eres) - case expected: MethodType => (expected.paramInfos, expected.resType) - case expected @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(expected) => (rinfo.paramInfos, rinfo.resType) - case _ => (aargs.map(_ => WildcardType), WildcardType) - val aargs1 = aargs.zipWithConserve(eargs) { (aarg, earg) => adapt(aarg, earg, !covariant) } - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if (ares1 eq ares) && (aargs1 eq aargs) then actual - else reconstruct(aargs1, ares1) - - (resTp, curEnv.captured) - finally - curEnv = saved - - /** Adapt type function type `actual` to the expected type. - * @see [[adaptFun]] - */ - def adaptTypeFun( - actual: Type, ares: Type, expected: Type, - covariant: Boolean, boxed: Boolean, - reconstruct: Type => Type): (Type, CaptureSet) = - val saved = curEnv - curEnv = Env(curEnv.owner, nestedInOwner = true, CaptureSet.Var(), isBoxed = false, if boxed then null else curEnv) - - try - val eres = expected.dealias.stripCapturing match - case RefinedType(_, _, rinfo: PolyType) => rinfo.resType - case expected: PolyType => expected.resType - case _ => WildcardType - - val ares1 = adapt(ares, eres, covariant) - - val resTp = - if ares1 eq ares then actual - else reconstruct(ares1) - - (resTp, curEnv.captured) - finally - curEnv = saved - end adaptTypeFun - - def adaptInfo(actual: Type, expected: Type, covariant: Boolean): String = - val arrow = if covariant then "~~>" else "<~~" - i"adapting $actual $arrow $expected" - - /** Destruct a capturing type `tp` to a tuple (cs, tp0, boxed), - * where `tp0` is not a capturing type. - * - * If `tp` is a nested capturing type, the return tuple always represents - * the innermost capturing type. The outer capture annotations can be - * reconstructed with the returned function. - */ - def destructCapturingType(tp: Type, reconstruct: Type -> Context ?-> Type = (x: Type) => x) // !cc! need monomorphic default argument - : (Type, CaptureSet, Boolean, Type -> Context ?-> Type) = - tp.dealias match - case tp @ CapturingType(parent, cs) => - if parent.dealias.isCapturingType then - destructCapturingType(parent, res => reconstruct(tp.derivedCapturingType(res, cs))) - else - (parent, cs, tp.isBoxed, reconstruct) - case actual => - (actual, CaptureSet(), false, reconstruct) - - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { - if expected.isInstanceOf[WildcardType] then actual - else - val (parent, cs, actualIsBoxed, recon: (Type -> Context ?-> Type)) = destructCapturingType(actual) - - val needsAdaptation = actualIsBoxed != expected.isBoxedCapturing - val insertBox = needsAdaptation && covariant != actualIsBoxed - - val (parent1, cs1) = parent match { - case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => - val (parent1, leaked) = adaptFun(parent, args.init, args.last, expected, covariant, insertBox, - (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(_, _, rinfo: MethodType) if defn.isFunctionType(actual) => - // TODO Find a way to combine handling of generic and dependent function types (here and elsewhere) - val (parent1, leaked) = adaptFun(parent, rinfo.paramInfos, rinfo.resType, expected, covariant, insertBox, - (aargs1, ares1) => - rinfo.derivedLambdaType(paramInfos = aargs1, resType = ares1) - .toFunctionType(isJava = false, alwaysDependent = true)) - (parent1, leaked ++ cs) - case actual: MethodType => - val (parent1, leaked) = adaptFun(parent, actual.paramInfos, actual.resType, expected, covariant, insertBox, - (aargs1, ares1) => - actual.derivedLambdaType(paramInfos = aargs1, resType = ares1)) - (parent1, leaked ++ cs) - case actual @ RefinedType(p, nme, rinfo: PolyType) if defn.isFunctionOrPolyType(actual) => - val (parent1, leaked) = adaptTypeFun(parent, rinfo.resType, expected, covariant, insertBox, - ares1 => - val rinfo1 = rinfo.derivedLambdaType(rinfo.paramNames, rinfo.paramInfos, ares1) - val actual1 = actual.derivedRefinedType(p, nme, rinfo1) - actual1 - ) - (parent1, leaked ++ cs) - case _ => - (parent, cs) - } - - if needsAdaptation then - val criticalSet = // the set which is not allowed to have `*` - if covariant then cs1 // can't box with `*` - else expected.captureSet // can't unbox with `*` - if criticalSet.isUniversal && expected.isValueType then - // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. This tends to give better error - // messages than disallowing the root capability in `criticalSet`. - if ctx.settings.YccDebug.value then - println(i"cannot box/unbox $actual vs $expected") - actual - else - // Disallow future addition of `*` to `criticalSet`. - criticalSet.disallowRootCapability { () => - report.error( - em"""$actual cannot be box-converted to $expected - |since one of their capture sets contains the root capability `*`""", - pos) - } - if !insertBox then // unboxing - markFree(criticalSet, pos) - recon(CapturingType(parent1, cs1, !actualIsBoxed)) - else - recon(CapturingType(parent1, cs1, actualIsBoxed)) - } - - var actualw = actual.widenDealias - actual match - case ref: CaptureRef if ref.isTracked => - actualw match - case CapturingType(p, refs) => - actualw = actualw.derivedCapturingType(p, ref.singletonCaptureSet) - // given `a: C T`, improve `C T` to `{a} T` - case _ => - case _ => - val adapted = adapt(actualw, expected, covariant = true) - if adapted ne actualw then - capt.println(i"adapt boxed $actual vs $expected ===> $adapted") - adapted - else actual - end adaptBoxed - - override def checkUnit(unit: CompilationUnit)(using Context): Unit = - Setup(preRecheckPhase, thisPhase, recheckDef) - .traverse(ctx.compilationUnit.tpdTree) - //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") - withCaptureSetsExplained { - super.checkUnit(unit) - checkSelfTypes(unit.tpdTree) - postCheck(unit.tpdTree) - if ctx.settings.YccDebug.value then - show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing - } - - /** Check that self types of subclasses conform to self types of super classes. - * (See comment below how this is achieved). The check assumes that classes - * without an explicit self type have the universal capture set `{*}` on the - * self type. If a class without explicit self type is not `effectivelyFinal` - * it is checked that the inferred self type is universal, in order to assure - * that joint and separate compilation give the same result. - */ - def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = - val parentTrees = mutable.HashMap[Symbol, List[Tree]]() - unit.foreachSubTree { - case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents - case _ => - } - // Perform self type checking. The problem here is that `checkParents` compares a - // self type of a subclass with the result of an asSeenFrom of the self type of the - // superclass. That's no good. We need to constrain the original superclass self type - // capture set, not the set mapped by asSeenFrom. - // - // Instead, we proceed from parent classes to child classes. For every class - // we first check its parents, and then interpolate the self type to an - // upper approximation that satisfies all constraints on its capture set. - // That means all capture sets of parent self types are constants, so mapping - // them with asSeenFrom is OK. - while parentTrees.nonEmpty do - val roots = parentTrees.keysIterator.filter { - cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) - } - assert(roots.nonEmpty) - for case root: ClassSymbol <- roots do - checkSelfAgainstParents(root, root.baseClasses) - val selfType = root.asClass.classInfo.selfType - interpolator(startingVariance = -1).traverse(selfType) - if !root.isEffectivelySealed then - def matchesExplicitRefsInBaseClass(refs: CaptureSet, cls: ClassSymbol): Boolean = - cls.baseClasses.tail.exists { psym => - val selfType = psym.asClass.givenSelfType - selfType.exists && selfType.captureSet.elems == refs.elems - } - selfType match - case CapturingType(_, refs: CaptureSet.Var) - if !refs.isUniversal && !matchesExplicitRefsInBaseClass(refs, root) => - // Forbid inferred self types unless they are already implied by an explicit - // self type in a parent. - report.error( - em"""$root needs an explicitly declared self type since its - |inferred self type $selfType - |is not visible in other compilation units that define subclasses.""", - root.srcPos) - case _ => - parentTrees -= root - capt.println(i"checked $root with $selfType") - end checkSelfTypes - - /** Heal ill-formed capture sets in the type parameter. - * - * We can push parameter refs into a capture set in type parameters - * that this type parameter can't see. - * For example, when capture checking the following expression: - * - * def usingLogFile[T](op: (f: {*} File) => T): T = ... - * - * usingLogFile[box ?1 () -> Unit] { (f: {*} File) => () => { f.write(0) } } - * - * We may propagate `f` into ?1, making ?1 ill-formed. - * This also causes soundness issues, since `f` in ?1 should be widened to `*`, - * giving rise to an error that `*` cannot be included in a boxed capture set. - * - * To solve this, we still allow ?1 to capture parameter refs like `f`, but - * compensate this by pushing the widened capture set of `f` into ?1. - * This solves the soundness issue caused by the ill-formness of ?1. - */ - private def healTypeParam(tree: Tree)(using Context): Unit = - val checker = new TypeTraverser: - private def isAllowed(ref: CaptureRef): Boolean = ref match - case ref: TermParamRef => allowed.contains(ref) - case _ => true - - // Widen the given term parameter refs x₁ : C₁ S₁ , ⋯ , xₙ : Cₙ Sₙ to their capture sets C₁ , ⋯ , Cₙ. - // - // If in these capture sets there are any capture references that are term parameter references we should avoid, - // we will widen them recursively. - private def widenParamRefs(refs: List[TermParamRef]): List[CaptureSet] = - @scala.annotation.tailrec - def recur(todos: List[TermParamRef], acc: List[CaptureSet]): List[CaptureSet] = - todos match - case Nil => acc - case ref :: rem => - val cs = ref.captureSetOfInfo - val nextAcc = cs.filter(isAllowed(_)) :: acc - val nextRem: List[TermParamRef] = (cs.elems.toList.filter(!isAllowed(_)) ++ rem).asInstanceOf - recur(nextRem, nextAcc) - recur(refs, Nil) - - private def healCaptureSet(cs: CaptureSet): Unit = - val toInclude = widenParamRefs(cs.elems.toList.filter(!isAllowed(_)).asInstanceOf) - toInclude.foreach(checkSubset(_, cs, tree.srcPos)) - - private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty - - def traverse(tp: Type) = - tp match - case CapturingType(parent, refs) => - healCaptureSet(refs) - traverse(parent) - case tp @ RefinedType(parent, rname, rinfo: MethodType) if defn.isFunctionType(tp) => - traverse(rinfo) - case tp: TermLambda => - val saved = allowed - try - tp.paramRefs.foreach(allowed += _) - traverseChildren(tp) - finally allowed = saved - case _ => - traverseChildren(tp) - - if tree.isInstanceOf[InferredTypeTree] then - checker.traverse(tree.knownType) - end healTypeParam - - /** Perform the following kinds of checks - * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. - * - Check that externally visible `val`s or `def`s have empty capture sets. If not, - * suggest an explicit type. This is so that separate compilation (where external - * symbols have empty capture sets) gives the same results as joint compilation. - * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. - * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. - */ - def postCheck(unit: tpd.Tree)(using Context): Unit = - unit.foreachSubTree { - case _: InferredTypeTree => - case tree: TypeTree if !tree.span.isZeroExtent => - tree.knownType.foreachPart { tp => - checkWellformedPost(tp, tree.srcPos) - tp match - case AnnotatedType(_, annot) if annot.symbol == defn.RetainsAnnot => - warnIfRedundantCaptureSet(annot.tree) - case _ => - } - case t: ValOrDefDef - if t.tpt.isInstanceOf[InferredTypeTree] && !Synthetics.isExcluded(t.symbol) => - val sym = t.symbol - val isLocal = - sym.owner.ownersIterator.exists(_.isTerm) - || sym.accessBoundary(defn.RootClass).isContainedIn(sym.topLevelClass) - def canUseInferred = // If canUseInferred is false, all capturing types in the type of `sym` need to be given explicitly - sym.is(Private) // private symbols can always have inferred types - || sym.name.is(DefaultGetterName) // default getters are exempted since otherwise it would be - // too annoying. This is a hole since a defualt getter's result type - // might leak into a type variable. - || // non-local symbols cannot have inferred types since external capture types are not inferred - isLocal // local symbols still need explicit types if - && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference - def isNotPureThis(ref: CaptureRef) = ref match { - case ref: ThisType => !ref.cls.isPureClass - case _ => true - } - if !canUseInferred then - val inferred = t.tpt.knownType - def checkPure(tp: Type) = tp match - case CapturingType(_, refs) - if !refs.elems.filter(isNotPureThis).isEmpty => - val resultStr = if t.isInstanceOf[DefDef] then " result" else "" - report.error( - em"""Non-local $sym cannot have an inferred$resultStr type - |$inferred - |with non-empty capture set $refs. - |The type needs to be declared explicitly.""".withoutDisambiguation(), - t.srcPos) - case _ => - inferred.foreachPart(checkPure, StopAt.Static) - case t @ TypeApply(fun, args) => - fun.knownType.widen match - case tl: PolyType => - val normArgs = args.lazyZip(tl.paramInfos).map { (arg, bounds) => - arg.withType(arg.knownType.forceBoxStatus( - bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) - } - checkBounds(normArgs, tl) - case _ => - - args.foreach(healTypeParam(_)) - case _ => - } - if !ctx.reporter.errorsReported then - // We dont report errors here if previous errors were reported, because other - // errors often result in bad applied types, but flagging these bad types gives - // often worse error messages than the original errors. - val checkApplied = new TreeTraverser: - def traverse(t: Tree)(using Context) = t match - case tree: InferredTypeTree => - case tree: New => - case tree: TypeTree => checkAppliedTypesIn(tree.withKnownType) - case _ => traverseChildren(t) - checkApplied.traverse(unit) - end CaptureChecker -end CheckCaptures diff --git a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala b/tests/pos-with-compiler-cc/dotc/cc/Setup.scala deleted file mode 100644 index a91831022984..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/Setup.scala +++ /dev/null @@ -1,482 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core._ -import Phases.*, DenotTransformers.*, SymDenotations.* -import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* -import Types.*, StdNames.* -import config.Printers.capt -import ast.tpd -import transform.Recheck.* -import CaptureSet.IdentityCaptRefMap -import Synthetics.isExcluded - -/** A tree traverser that prepares a compilation unit to be capture checked. - * It does the following: - * - For every inferred type, drop any retains annotations, - * add capture sets to all its parts, add refinements to class types and function types. - * (c.f. mapInferred) - * - For explicit capturing types, expand throws aliases to the underlying (pure) function, - * and add some implied capture sets to curried functions (c.f. expandThrowsAlias, expandAbbreviations). - * - Add capture sets to self types of classes and objects, unless the self type was written explicitly. - * - Box the types of mutable variables and type arguments to methods (type arguments of types - * are boxed on access). - * - Link the external types of val and def symbols with the inferred types based on their parameter symbols. - */ -class Setup( - preRecheckPhase: DenotTransformer, - thisPhase: DenotTransformer, - recheckDef: (tpd.ValOrDefDef, Symbol) => Context ?=> Unit) -extends tpd.TreeTraverser: - import tpd.* - - /** Create dependent function with underlying function class `tycon` and given - * arguments `argTypes` and result `resType`. - */ - private def depFun(tycon: Type, argTypes: List[Type], resType: Type)(using Context): Type = - MethodType.companion( - isContextual = defn.isContextFunctionClass(tycon.classSymbol), - isErased = defn.isErasedFunctionClass(tycon.classSymbol) - )(argTypes, resType) - .toFunctionType(isJava = false, alwaysDependent = true) - - /** If `tp` is an unboxed capturing type or a function returning an unboxed capturing type, - * convert it to be boxed. - */ - private def box(tp: Type)(using Context): Type = - def recur(tp: Type): Type = tp.dealias match - case tp @ CapturingType(parent, refs) if !tp.isBoxed => - tp.boxed - case tp1 @ AppliedType(tycon, args) if defn.isNonRefinedFunction(tp1) => - val res = args.last - val boxedRes = recur(res) - if boxedRes eq res then tp - else tp1.derivedAppliedType(tycon, args.init :+ boxedRes) - case tp1 @ RefinedType(_, _, rinfo) if defn.isFunctionType(tp1) => - val boxedRinfo = recur(rinfo) - if boxedRinfo eq rinfo then tp - else boxedRinfo.toFunctionType(isJava = false, alwaysDependent = true) - case tp1: MethodOrPoly => - val res = tp1.resType - val boxedRes = recur(res) - if boxedRes eq res then tp - else tp1.derivedLambdaType(resType = boxedRes) - case _ => tp - tp match - case tp: MethodOrPoly => tp // don't box results of methods outside refinements - case _ => recur(tp) - - /** Perform the following transformation steps everywhere in a type: - * 1. Drop retains annotations - * 2. Turn plain function types into dependent function types, so that - * we can refer to their parameters in capture sets. Currently this is - * only done at the toplevel, i.e. for function types that are not - * themselves argument types of other function types. Without this restriction - * pos.../lists.scala and pos/...curried-shorthands.scala fail. - * Need to figure out why. - * 3. Refine other class types C by adding capture set variables to their parameter getters - * (see addCaptureRefinements) - * 4. Add capture set variables to all types that can be tracked - * - * Polytype bounds are only cleaned using step 1, but not otherwise transformed. - */ - private def mapInferred(using DetachedContext) = new TypeMap: - - /** Drop @retains annotations everywhere */ - object cleanup extends TypeMap: - def apply(t: Type) = t match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - apply(parent) - case _ => - mapOver(t) - - /** Refine a possibly applied class type C where the class has tracked parameters - * x_1: T_1, ..., x_n: T_n to C { val x_1: CV_1 T_1, ..., val x_n: CV_n T_n } - * where CV_1, ..., CV_n are fresh capture sets. - */ - def addCaptureRefinements(tp: Type): Type = tp match - case _: TypeRef | _: AppliedType if tp.typeParams.isEmpty => - tp.typeSymbol match - case cls: ClassSymbol - if !defn.isFunctionClass(cls) && !cls.is(JavaDefined) => - // We assume that Java classes can refer to capturing Scala types only indirectly, - // using type parameters. Hence, no need to refine them. - cls.paramGetters.foldLeft(tp) { (core, getter) => - if getter.termRef.isTracked then - val getterType = tp.memberInfo(getter).strippedDealias - RefinedType(core, getter.name, CapturingType(getterType, CaptureSet.Var())) - .showing(i"add capture refinement $tp --> $result", capt) - else - core - } - case _ => tp - case _ => tp - - private def superTypeIsImpure(tp: Type): Boolean = { - tp.dealias match - case CapturingType(_, refs) => - !refs.isAlwaysEmpty - case tp: (TypeRef | AppliedType) => - val sym = tp.typeSymbol - if sym.isClass then - sym == defn.AnyClass - // we assume Any is a shorthand of {*} Any, so if Any is an upper - // bound, the type is taken to be impure. - else superTypeIsImpure(tp.superType) - case tp: (RefinedOrRecType | MatchType) => - superTypeIsImpure(tp.underlying) - case tp: AndType => - superTypeIsImpure(tp.tp1) || needsVariable(tp.tp2) - case tp: OrType => - superTypeIsImpure(tp.tp1) && superTypeIsImpure(tp.tp2) - case _ => - false - }.showing(i"super type is impure $tp = $result", capt) - - /** Should a capture set variable be added on type `tp`? */ - def needsVariable(tp: Type): Boolean = { - tp.typeParams.isEmpty && tp.match - case tp: (TypeRef | AppliedType) => - val tp1 = tp.dealias - if tp1 ne tp then needsVariable(tp1) - else - val sym = tp1.typeSymbol - if sym.isClass then - !sym.isPureClass && sym != defn.AnyClass - else superTypeIsImpure(tp1) - case tp: (RefinedOrRecType | MatchType) => - needsVariable(tp.underlying) - case tp: AndType => - needsVariable(tp.tp1) && needsVariable(tp.tp2) - case tp: OrType => - needsVariable(tp.tp1) || needsVariable(tp.tp2) - case CapturingType(parent, refs) => - needsVariable(parent) - && refs.isConst // if refs is a variable, no need to add another - && !refs.isUniversal // if refs is {*}, an added variable would not change anything - case _ => - false - }.showing(i"can have inferred capture $tp = $result", capt) - - /** Add a capture set variable to `tp` if necessary, or maybe pull out - * an embedded capture set variable from a part of `tp`. - */ - def addVar(tp: Type) = tp match - case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => - CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) - case tp: RecType => - tp.parent match - case parent @ CapturingType(parent1, refs) => - CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) - case _ => - tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created - // by `mapInferred`. Hence if the underlying type admits capture variables - // a variable was already added, and the first case above would apply. - case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(refs1.asVar.elems.isEmpty) - assert(refs2.asVar.elems.isEmpty) - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => - CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) - case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => - CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case _ if needsVariable(tp) => - val cs = tp.dealias match - case CapturingType(_, refs) => CaptureSet.Var(refs.elems) - case _ => CaptureSet.Var() - CapturingType(tp, cs) - case _ => - tp - - private var isTopLevel = true - - private def mapNested(ts: List[Type]): List[Type] = - val saved = isTopLevel - isTopLevel = false - try ts.mapConserve(this) finally isTopLevel = saved - - def apply(t: Type) = - val tp = expandThrowsAlias(t) - val tp1 = tp match - case AnnotatedType(parent, annot) if annot.symbol == defn.RetainsAnnot => - // Drop explicit retains annotations - apply(parent) - case tp @ AppliedType(tycon, args) => - val tycon1 = this(tycon) - if defn.isNonRefinedFunction(tp) then - // Convert toplevel generic function types to dependent functions - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(tycon1, args1, res1) - .showing(i"add function refinement $tp --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp - else - tp.derivedAppliedType(tycon1, args1 :+ res1) - else - tp.derivedAppliedType(tycon1, args.mapConserve(arg => this(arg))) - case tp @ RefinedType(core, rname, rinfo) if defn.isFunctionType(tp) => - val rinfo1 = apply(rinfo) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) - else tp - case tp: MethodType => - tp.derivedLambdaType( - paramInfos = mapNested(tp.paramInfos), - resType = this(tp.resType)) - case tp: TypeLambda => - // Don't recurse into parameter bounds, just cleanup any stray retains annotations - tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(cleanup(_).bounds), - resType = this(tp.resType)) - case _ => - mapOver(tp) - addVar(addCaptureRefinements(tp1)) - end apply - end mapInferred - - private def transformInferredType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = mapInferred(tp) - if boxed then box(tp1) else tp1 - - /** Expand some aliases of function types to the underlying functions. - * Right now, these are only $throws aliases, but this could be generalized. - */ - private def expandThrowsAlias(tp: Type)(using Context) = tp match - case AppliedType(tycon, res :: exc :: Nil) if tycon.typeSymbol == defn.throwsAlias => - // hard-coded expansion since $throws aliases in stdlib are defined with `?=>` rather than `?->` - defn.FunctionOf(defn.CanThrowClass.typeRef.appliedTo(exc) :: Nil, res, isContextual = true, isErased = true) - case _ => tp - - private def expandThrowsAliases(using DetachedContext) = new TypeMap: - def apply(t: Type) = t match - case _: AppliedType => - val t1 = expandThrowsAlias(t) - if t1 ne t then apply(t1) else mapOver(t) - case _: LazyRef => - t - case t @ AnnotatedType(t1, ann) => - // Don't map capture sets, since that would implicitly normalize sets that - // are not well-formed. - t.derivedAnnotatedType(apply(t1), ann) - case _ => - mapOver(t) - - /** Fill in capture sets of curried function types from left to right, using - * a combination of the following two rules: - * - * 1. Expand `{c} (x: A) -> (y: B) -> C` - * to `{c} (x: A) -> {c} (y: B) -> C` - * 2. Expand `(x: A) -> (y: B) -> C` where `x` is tracked - * to `(x: A) -> {x} (y: B) -> C` - * - * TODO: Should we also propagate capture sets to the left? - */ - private def expandAbbreviations(using DetachedContext) = new TypeMap: - - /** Propagate `outerCs` as well as all tracked parameters as capture set to the result type - * of the dependent function type `tp`. - */ - def propagateDepFunctionResult(tp: Type, outerCs: CaptureSet): Type = tp match - case RefinedType(parent, nme.apply, rinfo: MethodType) => - val localCs = CaptureSet(rinfo.paramRefs.filter(_.isTracked)*) - val rinfo1 = rinfo.derivedLambdaType( - resType = propagateEnclosing(rinfo.resType, CaptureSet.empty, outerCs ++ localCs)) - if rinfo1 ne rinfo then rinfo1.toFunctionType(isJava = false, alwaysDependent = true) - else tp - - /** If `tp` is a function type: - * - add `outerCs` as its capture set, - * - propagate `currentCs`, `outerCs`, and all tracked parameters of `tp` to the right. - */ - def propagateEnclosing(tp: Type, currentCs: CaptureSet, outerCs: CaptureSet): Type = tp match - case tp @ AppliedType(tycon, args) if defn.isFunctionClass(tycon.typeSymbol) => - val tycon1 = this(tycon) - val args1 = args.init.mapConserve(this) - val tp1 = - if args1.exists(!_.captureSet.isAlwaysEmpty) then - val propagated = propagateDepFunctionResult( - depFun(tycon, args1, args.last), currentCs ++ outerCs) - propagated match - case RefinedType(_, _, mt: MethodType) => - if mt.isCaptureDependent then propagated - else - // No need to introduce dependent type, switch back to generic function type - tp.derivedAppliedType(tycon1, args1 :+ mt.resType) - else - val resType1 = propagateEnclosing( - args.last, CaptureSet.empty, currentCs ++ outerCs) - tp.derivedAppliedType(tycon1, args1 :+ resType1) - tp1.capturing(outerCs) - case tp @ RefinedType(parent, nme.apply, rinfo: MethodType) if defn.isFunctionType(tp) => - propagateDepFunctionResult(mapOver(tp), currentCs ++ outerCs) - .capturing(outerCs) - case _ => - mapOver(tp) - - def apply(tp: Type): Type = tp match - case CapturingType(parent, cs) => - tp.derivedCapturingType(propagateEnclosing(parent, cs, CaptureSet.empty), cs) - case _ => - propagateEnclosing(tp, CaptureSet.empty, CaptureSet.empty) - end expandAbbreviations - - private def transformExplicitType(tp: Type, boxed: Boolean)(using Context): Type = - val tp1 = expandThrowsAliases(if boxed then box(tp) else tp) - if tp1 ne tp then capt.println(i"expanded: $tp --> $tp1") - if ctx.settings.YccNoAbbrev.value then tp1 - else expandAbbreviations(tp1) - - /** Transform type of type tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean, exact: Boolean)(using Context): Unit = - if !tree.hasRememberedType then - tree.rememberType( - if tree.isInstanceOf[InferredTypeTree] && !exact - then transformInferredType(tree.tpe, boxed) - else transformExplicitType(tree.tpe, boxed)) - - /** Substitute parameter symbols in `from` to paramRefs in corresponding - * method or poly types `to`. We use a single BiTypeMap to do everything. - * @param from a list of lists of type or term parameter symbols of a curried method - * @param to a list of method or poly types corresponding one-to-one to the parameter lists - */ - private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using DetachedContext) - extends DeepTypeMap, BiTypeMap: - - def apply(t: Type): Type = t match - case t: NamedType => - val sym = t.symbol - def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = - def inner(from: List[Symbol], to: List[ParamRef]): Type = - if from.isEmpty then outer(froms.tail, tos.tail) - else if sym eq from.head then to.head - else inner(from.tail, to.tail) - if tos.isEmpty then t - else inner(froms.head, tos.head.paramRefs) - outer(from, to) - case _ => - mapOver(t) - - def inverse(t: Type): Type = t match - case t: ParamRef => - def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = - if from.isEmpty then t - else if t.binder eq from.head then to.head(t.paramNum).namedType - else recur(from.tail, to.tail) - recur(to, from) - case _ => - mapOver(t) - end SubstParams - - /** Update info of `sym` for CheckCaptures phase only */ - private def updateInfo(sym: Symbol, info: Type)(using Context) = - sym.updateInfoBetween(preRecheckPhase, thisPhase, info) - - def traverse(tree: Tree)(using Context): Unit = - tree match - case tree: DefDef => - if isExcluded(tree.symbol) then - return - tree.tpt match - case tpt: TypeTree if tree.symbol.allOverriddenSymbols.hasNext => - tree.paramss.foreach(traverse) - transformTT(tpt, boxed = false, exact = true) - traverse(tree.rhs) - //println(i"TYPE of ${tree.symbol.showLocated} = ${tpt.knownType}") - case _ => - traverseChildren(tree) - case tree @ ValDef(_, tpt: TypeTree, _) => - transformTT(tpt, - boxed = tree.symbol.is(Mutable), // types of mutable variables are boxed - exact = tree.symbol.allOverriddenSymbols.hasNext // types of symbols that override a parent don't get a capture set - ) - traverse(tree.rhs) - case tree @ TypeApply(fn, args) => - traverse(fn) - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed - case _ => - traverseChildren(tree) - tree match - case tree: TypeTree => - transformTT(tree, boxed = false, exact = false) // other types are not boxed - case tree: ValOrDefDef => - val sym = tree.symbol - - // replace an existing symbol info with inferred types where capture sets of - // TypeParamRefs and TermParamRefs put in correspondence by BiTypeMaps with the - // capture sets of the types of the method's parameter symbols and result type. - def integrateRT( - info: Type, // symbol info to replace - psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` - prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order - prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order - ): Type = - info match - case mt: MethodOrPoly => - val psyms = psymss.head - mt.companion(mt.paramNames)( - mt1 => - if !psyms.exists(_.isUpdatedAfter(preRecheckPhase)) && !mt.isParamDependent && prevLambdas.isEmpty then - mt.paramInfos - else - val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) - psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), - mt1 => - integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) - ) - case info: ExprType => - info.derivedExprType(resType = - integrateRT(info.resType, psymss, prevPsymss, prevLambdas)) - case _ => - val restp = tree.tpt.knownType - if prevLambdas.isEmpty then restp - else SubstParams(prevPsymss, prevLambdas)(restp) - - if tree.tpt.hasRememberedType && !sym.isConstructor then - val newInfo = integrateRT(sym.info, sym.paramSymss, Nil, Nil) - .showing(i"update info $sym: ${sym.info} --> $result", capt) - if newInfo ne sym.info then - val completer = new LazyType: - def complete(denot: SymDenotation)(using Context) = - denot.info = newInfo - recheckDef(tree, sym) - updateInfo(sym, completer) - case tree: Bind => - val sym = tree.symbol - updateInfo(sym, transformInferredType(sym.info, boxed = false)) - case tree: TypeDef => - tree.symbol match - case cls: ClassSymbol => - val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then - // add capture set to self type of nested classes if no self type is given explicitly - val localRefs = CaptureSet.Var() - val newInfo = ClassInfo(prefix, cls, ps, decls, - CapturingType(cinfo.selfType, localRefs) - .showing(i"inferred self type for $cls: $result", capt)) - updateInfo(cls, newInfo) - cls.thisType.asInstanceOf[ThisType].invalidateCaches() - if cls.is(ModuleClass) then - // if it's a module, the capture set of the module reference is the capture set of the self type - val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, localRefs)) - modul.termRef.invalidateCaches() - case _ => - val info = atPhase(preRecheckPhase)(tree.symbol.info) - val newInfo = transformExplicitType(info, boxed = false) - if newInfo ne info then - updateInfo(tree.symbol, newInfo) - capt.println(i"update info of ${tree.symbol} from $info to $newInfo") - case _ => - end traverse -end Setup diff --git a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala b/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala deleted file mode 100644 index dacbd27e0f35..000000000000 --- a/tests/pos-with-compiler-cc/dotc/cc/Synthetics.scala +++ /dev/null @@ -1,189 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Symbols.*, SymDenotations.*, Contexts.*, Flags.*, Types.*, Decorators.* -import StdNames.nme -import Names.Name -import NameKinds.DefaultGetterName -import Phases.checkCapturesPhase -import config.Printers.capt - -/** Classification and transformation methods for synthetic - * case class methods that need to be treated specially. - * In particular, compute capturing types for some of these methods which - * have inferred (result-)types that need to be established under separate - * compilation. - */ -object Synthetics: - private def isSyntheticCopyMethod(sym: SymDenotation)(using Context) = - sym.name == nme.copy && sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) - - private def isSyntheticCompanionMethod(sym: SymDenotation, names: Name*)(using Context): Boolean = - names.contains(sym.name) && sym.is(Synthetic) && sym.owner.is(Module) && sym.owner.companionClass.is(Case) - - private def isSyntheticCopyDefaultGetterMethod(sym: SymDenotation)(using Context) = sym.name match - case DefaultGetterName(nme.copy, _) => sym.is(Synthetic) && sym.owner.isClass && sym.owner.is(Case) - case _ => false - - /** Is `sym` a synthetic apply, copy, or copy default getter method? - * The types of these symbols are transformed in a special way without - * looking at the definitions's RHS - */ - def needsTransform(symd: SymDenotation)(using Context): Boolean = - isSyntheticCopyMethod(symd) - || isSyntheticCompanionMethod(symd, nme.apply, nme.unapply) - || isSyntheticCopyDefaultGetterMethod(symd) - || (symd.symbol eq defn.Object_eq) - || (symd.symbol eq defn.Object_ne) - - /** Method is excluded from regular capture checking. - * Excluded are synthetic class members - * - that override a synthesized case class symbol, or - * - the fromProduct method, or - * - members transformed specially as indicated by `needsTransform`. - */ - def isExcluded(sym: Symbol)(using Context): Boolean = - sym.is(Synthetic) - && sym.owner.isClass - && ( defn.caseClassSynthesized.exists( - ccsym => sym.overriddenSymbol(ccsym.owner.asClass) == ccsym) - || isSyntheticCompanionMethod(sym, nme.fromProduct) - || needsTransform(sym)) - - /** Add capture dependencies to the type of the `apply` or `copy` method of a case class. - * An apply method in a case class like this: - * case class CC(a: {d} A, b: B, {*} c: C) - * would get type - * def apply(a': {d} A, b: B, {*} c': C): {a', c'} CC { val a = {a'} A, val c = {c'} C } - * where `'` is used to indicate the difference between parameter symbol and refinement name. - * Analogous for the copy method. - */ - private def addCaptureDeps(info: Type)(using Context): Type = info match - case info: MethodType => - val trackedParams = info.paramRefs.filter(atPhase(checkCapturesPhase)(_.isTracked)) - def augmentResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = augmentResult(tp.resType)) - case _ => - val refined = trackedParams.foldLeft(tp) { (parent, pref) => - RefinedType(parent, pref.paramName, - CapturingType( - atPhase(ctx.phase.next)(pref.underlying.stripCapturing), - CaptureSet(pref))) - } - CapturingType(refined, CaptureSet(trackedParams*)) - if trackedParams.isEmpty then info - else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addCaptureDeps(info.resType)) - case _ => - info - - /** Drop capture dependencies from the type of `apply` or `copy` method of a case class */ - private def dropCaptureDeps(tp: Type)(using Context): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = dropCaptureDeps(tp.resType)) - case CapturingType(parent, _) => - dropCaptureDeps(parent) - case RefinedType(parent, _, _) => - dropCaptureDeps(parent) - case _ => - tp - - /** Add capture information to the type of the default getter of a case class copy method */ - private def addDefaultGetterCapture(info: Type, owner: Symbol, idx: Int)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = addDefaultGetterCapture(info.resType, owner, idx)) - case info: ExprType => - info.derivedExprType(addDefaultGetterCapture(info.resType, owner, idx)) - case EventuallyCapturingType(parent, _) => - addDefaultGetterCapture(parent, owner, idx) - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(addDefaultGetterCapture(parent, owner, idx), annot) - case _ if idx < owner.asClass.paramGetters.length => - val param = owner.asClass.paramGetters(idx) - val pinfo = param.info - atPhase(ctx.phase.next) { - if pinfo.captureSet.isAlwaysEmpty then info - else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) - } - case _ => - info - - /** Drop capture information from the type of the default getter of a case class copy method */ - private def dropDefaultGetterCapture(info: Type)(using Context): Type = info match - case info: MethodOrPoly => - info.derivedLambdaType(resType = dropDefaultGetterCapture(info.resType)) - case CapturingType(parent, _) => - parent - case info @ AnnotatedType(parent, annot) => - info.derivedAnnotatedType(dropDefaultGetterCapture(parent), annot) - case _ => - info - - /** Augment an unapply of type `(x: C): D` to `(x: {*} C): {x} D` */ - private def addUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - val paramInfo :: Nil = info.paramInfos: @unchecked - val newParamInfo = - CapturingType(paramInfo, CaptureSet.universal) - val trackedParam = info.paramRefs.head - def newResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = newResult(tp.resType)) - case _ => - CapturingType(tp, CaptureSet(trackedParam)) - info.derivedLambdaType(paramInfos = newParamInfo :: Nil, resType = newResult(info.resType)) - .showing(i"augment unapply type $info to $result", capt) - case info: PolyType => - info.derivedLambdaType(resType = addUnapplyCaptures(info.resType)) - - /** Drop added capture information from the type of an `unapply` */ - private def dropUnapplyCaptures(info: Type)(using Context): Type = info match - case info: MethodType => - info.paramInfos match - case CapturingType(oldParamInfo, _) :: Nil => - def oldResult(tp: Type): Type = tp match - case tp: MethodOrPoly => - tp.derivedLambdaType(resType = oldResult(tp.resType)) - case CapturingType(tp, _) => - tp - info.derivedLambdaType(paramInfos = oldParamInfo :: Nil, resType = oldResult(info.resType)) - case _ => - info - case info: PolyType => - info.derivedLambdaType(resType = dropUnapplyCaptures(info.resType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it to account for capture information. - * The method is run in phase CheckCaptures.Pre - * @pre needsTransform(sym) - */ - def transformToCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = addDefaultGetterCapture(sym.info, sym.owner, n)) - case nme.unapply => - sym.copySymDenotation(info = addUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = addCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = - MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType)) - - /** If `sym` refers to a synthetic apply, unapply, copy, or copy default getter method - * of a case class, transform it back to what it was before the CC phase. - * @pre needsTransform(sym) - */ - def transformFromCC(sym: SymDenotation)(using Context): SymDenotation = sym.name match - case DefaultGetterName(nme.copy, n) => - sym.copySymDenotation(info = dropDefaultGetterCapture(sym.info)) - case nme.unapply => - sym.copySymDenotation(info = dropUnapplyCaptures(sym.info)) - case nme.apply | nme.copy => - sym.copySymDenotation(info = dropCaptureDeps(sym.info)) - case n if n == nme.eq || n == nme.ne => - sym.copySymDenotation(info = defn.methOfAnyRef(defn.BooleanType)) - -end Synthetics \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala deleted file mode 100644 index 51b261583feb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/AggregateClassPath.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools -package dotc.classpath - -import scala.language.unsafeNulls - -import java.net.URL -import scala.collection.mutable.ArrayBuffer -import scala.collection.immutable.ArraySeq -import dotc.util - -import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientClassPath } - -/** - * A classpath unifying multiple class- and sourcepath entries. - * The Classpath can obtain entries for classes and sources independently - * so it tries to do operations quite optimally - iterating only these collections - * which are needed in the given moment and only as far as it's necessary. - * - * @param aggregates classpath instances containing entries which this class processes - */ -case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { - case Some(x) => x - } - } - private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() - private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { - packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) - } - - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - - def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { - case Some(s: SourceFileEntry) if isSource => s - case Some(s: ClassFileEntry) if !isSource => s - } - - val classEntry = findEntry(isSource = false) - val sourceEntry = findEntry(isSource = true) - - (classEntry, sourceEntry) match { - case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) - case (c @ Some(_), _) => c - case (_, s) => s - } - } - - override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) - - override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct - - override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct - aggregatedPackages - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = - getDistinctEntries(_.classes(inPackage)) - - override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = - getDistinctEntries(_.sources(inPackage)) - - override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { - val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() - val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() - val onPackage: PackageEntry => Unit = packages.add(_) - val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ - - aggregates.foreach { cp => - try { - cp match { - case ecp: EfficientClassPath => - ecp.list(inPackage, onPackage, onClassesAndSources) - case _ => - val entries = cp.list(inPackage) - entries._1.foreach(entry => packages.add(entry)) - classesAndSourcesBuffer ++= entries._2 - } - } catch { - case ex: java.io.IOException => - val e = FatalError(ex.getMessage) - e.initCause(ex) - throw e - } - } - - val distinctPackages: Seq[PackageEntry] = { - val arr = packages.toArray(new Array[PackageEntry](packages.size())) - ArraySeq.unsafeWrapArray(arr) - } - val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) - ClassPathEntries(distinctPackages, distinctClassesAndSources) - } - - /** - * Returns only one entry for each name. If there's both a source and a class entry, it - * creates an entry containing both of them. If there would be more than one class or source - * entries for the same class it always would use the first entry of each type found on a classpath. - */ - private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { - // based on the implementation from MergedClassPath - var count = 0 - val indices = util.HashMap[String, Int]() - val mergedEntries = new ArrayBuffer[ClassRepresentation](entries.size) - for { - entry <- entries - } { - val name = entry.name - if (indices.contains(name)) { - val index = indices(name) - val existing = mergedEntries(index) - - if (existing.binary.isEmpty && entry.binary.isDefined) - mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) - if (existing.source.isEmpty && entry.source.isDefined) - mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) - } - else { - indices(name) = count - mergedEntries += entry - count += 1 - } - } - if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq - } - - private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { - val seenNames = util.HashSet[String]() - val entriesBuffer = new ArrayBuffer[EntryType](1024) - for { - cp <- aggregates - entry <- getEntries(cp) if !seenNames.contains(entry.name) - } - { - entriesBuffer += entry - seenNames += entry.name - } - entriesBuffer.toIndexedSeq - } -} - -object AggregateClassPath { - def createAggregate(parts: ClassPath*): ClassPath = { - val elems = new ArrayBuffer[ClassPath]() - parts foreach { - case AggregateClassPath(ps) => elems ++= ps - case p => elems += p - } - if (elems.size == 1) elems.head - else AggregateClassPath(elems.toIndexedSeq) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala deleted file mode 100644 index 176b6acf9c6c..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ClassPath.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.AbstractFile -import dotty.tools.io.ClassRepresentation - -case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { - def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) -} - -object ClassPathEntries { - val empty = ClassPathEntries(Seq.empty, Seq.empty) -} - -trait ClassFileEntry extends ClassRepresentation { - def file: AbstractFile -} - -trait SourceFileEntry extends ClassRepresentation { - def file: AbstractFile -} - -case class PackageName(dottedString: String) { - val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" - - val dirPathTrailingSlash: String = - if (java.io.File.separatorChar == '/') - dirPathTrailingSlashJar - else - FileUtils.dirPath(dottedString) + java.io.File.separator - - def isRoot: Boolean = dottedString.isEmpty - - def entryName(entry: String): String = { - if (isRoot) entry else { - val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) - builder.append(dottedString) - builder.append('.') - builder.append(entry) - builder.toString - } - } -} - -trait PackageEntry { - def name: String -} - -private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { - final def fileName: String = file.name - def name: String = FileUtils.stripClassExtension(file.name) // class name - - def binary: Option[AbstractFile] = Some(file) - def source: Option[AbstractFile] = None -} - -private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { - final def fileName: String = file.name - def name: String = FileUtils.stripSourceExtension(file.name) - - def binary: Option[AbstractFile] = None - def source: Option[AbstractFile] = Some(file) -} - -private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { - final def fileName: String = classFile.name - def name: String = FileUtils.stripClassExtension(classFile.name) - - def binary: Option[AbstractFile] = Some(classFile) - def source: Option[AbstractFile] = Some(srcFile) -} - -private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry - -private[dotty] trait NoSourcePaths { - def asSourcePathString: String = "" - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty -} - -private[dotty] trait NoClassPaths { - def findClassFile(className: String): Option[AbstractFile] = None - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala deleted file mode 100644 index ac8b69381938..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ClassPathFactory.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ -import dotty.tools.io.ClassPath -import dotty.tools.dotc.core.Contexts._ - -/** - * Provides factory methods for classpath. When creating classpath instances for a given path, - * it uses proper type of classpath depending on a types of particular files containing sources or classes. - */ -class ClassPathFactory { - /** - * Create a new classpath based on the abstract file. - */ - def newClassPath(file: AbstractFile)(using Context): ClassPath = ClassPathFactory.newClassPath(file) - - /** - * Creators for sub classpaths which preserve this context. - */ - def sourcesInPath(path: String)(using Context): List[ClassPath] = - for { - file <- expandPath(path, expandStar = false) - dir <- Option(AbstractFile getDirectory file) - } - yield createSourcePath(dir) - - - def expandPath(path: String, expandStar: Boolean = true): List[String] = dotty.tools.io.ClassPath.expandPath(path, expandStar) - - def expandDir(extdir: String): List[String] = dotty.tools.io.ClassPath.expandDir(extdir) - - def contentsOfDirsInPath(path: String)(using Context): List[ClassPath] = - for { - dir <- expandPath(path, expandStar = false) - name <- expandDir(dir) - entry <- Option(AbstractFile.getDirectory(name)) - } - yield newClassPath(entry) - - def classesInExpandedPath(path: String)(using Context): IndexedSeq[ClassPath] = - classesInPathImpl(path, expand = true).toIndexedSeq - - def classesInPath(path: String)(using Context): List[ClassPath] = classesInPathImpl(path, expand = false) - - def classesInManifest(useManifestClassPath: Boolean)(using Context): List[ClassPath] = - if (useManifestClassPath) dotty.tools.io.ClassPath.manifests.map(url => newClassPath(AbstractFile getResources url)) - else Nil - - // Internal - protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = - for { - file <- expandPath(path, expand) - dir <- { - def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None - Option(AbstractFile.getDirectory(file)).orElse(asImage) - } - } - yield newClassPath(dir) - - private def createSourcePath(file: AbstractFile)(using Context): ClassPath = - if (file.isJarOrZip) - ZipAndJarSourcePathFactory.create(file) - else if (file.isDirectory) - new DirectorySourcePath(file.file) - else - sys.error(s"Unsupported sourcepath element: $file") -} - -object ClassPathFactory { - def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { - case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) - case _ => - if (file.isJarOrZip) - ZipAndJarClassPathFactory.create(file) - else if (file.isDirectory) - new DirectoryClassPath(file.file) - else - sys.error(s"Unsupported classpath element: $file") - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala deleted file mode 100644 index a5678970411b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/DirectoryClassPath.scala +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import java.io.{File => JFile} -import java.net.URL -import java.nio.file.{FileSystems, Files} - -import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} -import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors} -import FileUtils._ -import PlainFile.toPlainFile - -import scala.jdk.CollectionConverters._ -import scala.collection.immutable.ArraySeq -import scala.util.control.NonFatal -import language.experimental.pureFunctions - -/** - * A trait allowing to look for classpath entries in directories. It provides common logic for - * classes handling class and source files. - * It makes use of the fact that in the case of nested directories it's easy to find a file - * when we have a name of a package. - * It abstracts over the file representation to work with both JFile and AbstractFile. - */ -trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { - type F - - val dir: F - - protected def emptyFiles: Array[F] // avoids reifying ClassTag[F] - protected def getSubDir(dirName: String): Option[F] - protected def listChildren(dir: F, filter: Option[F -> Boolean] = (None: Option[F -> Boolean])): Array[F] // !cc! need explicit typing of default argument - protected def getName(f: F): String - protected def toAbstractFile(f: F): AbstractFile - protected def isPackage(f: F): Boolean - - protected def createFileEntry(file: AbstractFile): FileEntryType - protected def isMatchingFile(f: F): Boolean - - private def getDirectory(forPackage: PackageName): Option[F] = - if (forPackage.isRoot) - Some(dir) - else - getSubDir(forPackage.dirPathTrailingSlash) - - override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined - - private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - val dirForPackage = getDirectory(inPackage) - val nestedDirs: Array[F] = dirForPackage match { - case None => emptyFiles - case Some(directory) => listChildren(directory, Some(isPackage)) - } - ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) - } - - protected def files(inPackage: PackageName): Seq[FileEntryType] = { - val dirForPackage = getDirectory(inPackage) - val files: Array[F] = dirForPackage match { - case None => emptyFiles - case Some(directory) => listChildren(directory, Some(isMatchingFile)) - } - files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq - } - - override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { - val dirForPackage = getDirectory(inPackage) - dirForPackage match { - case None => - case Some(directory) => - for (file <- listChildren(directory)) { - if (isPackage(file)) - onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) - else if (isMatchingFile(file)) - onClassesAndSources(createFileEntry(toAbstractFile(file))) - } - } - } -} - -trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { - type F = JFile - - protected def emptyFiles: Array[JFile] = Array.empty - protected def getSubDir(packageDirName: String): Option[JFile] = { - val packageDir = new JFile(dir, packageDirName) - if (packageDir.exists && packageDir.isDirectory) Some(packageDir) - else None - } - protected def listChildren(dir: JFile, filter: Option[JFile -> Boolean]): Array[JFile] = { - val listing = filter match { - case Some(f) => dir.listFiles(mkFileFilter(f)) - case None => dir.listFiles() - } - - if (listing != null) { - // Sort by file name for stable order of directory .class entries in package scope. - // This gives stable results ordering of base type sequences for unrelated classes - // with the same base type depth. - // - // Notably, this will stably infer`Product with Serializable` - // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. - // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. - // - // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only - // intended to improve determinism of the compiler for compiler hackers. - java.util.Arrays.sort(listing, - new java.util.Comparator[JFile] { - def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) - }) - listing - } - else Array() - } - protected def getName(f: JFile): String = f.getName - protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile - protected def isPackage(f: JFile): Boolean = f.isPackage - - assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") - - def asURLs: Seq[URL] = Seq(dir.toURI.toURL) - def asClassPathStrings: Seq[String] = Seq(dir.getPath) -} - -object JrtClassPath { - import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { - import scala.util.Properties._ - if (!isJavaAtLeast("9")) None - else { - // Longer term we'd like an official API for this in the JDK - // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 - - val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - release match { - case Some(v) if v.toInt < currentMajorVersion => - try { - val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") - if (Files.notExists(ctSym)) None - else Some(new CtSymClassPath(ctSym, v.toInt)) - } catch { - case NonFatal(_) => None - } - case _ => - try { - val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - Some(new JrtClassPath(fs)) - } catch { - case _: ProviderNotFoundException | _: FileSystemNotFoundException => None - } - } - } - } -} - -/** - * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) - * - * https://bugs.openjdk.java.net/browse/JDK-8066492 is the most up to date reference - * for the structure of the jrt:// filesystem. - * - * The implementation assumes that no classes exist in the empty package. - */ -final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ - type F = Path - private val dir: Path = fs.getPath("/packages") - - // e.g. "java.lang" -> Seq("/modules/java.base") - private val packageToModuleBases: Map[String, Seq[Path]] = { - val ps = Files.newDirectoryStream(dir).iterator().asScala - def lookup(pack: Path): Seq[Path] = - Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList - ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap - } - - /** Empty string represents root package */ - override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = - packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = - if (inPackage.isRoot) Nil - else - packageToModuleBases.getOrElse(inPackage.dottedString, Nil).flatMap(x => - Files.list(x.resolve(inPackage.dirPathTrailingSlash)).iterator().asScala.filter(_.getFileName.toString.endsWith(".class"))).map(x => - ClassFileEntryImpl(x.toPlainFile)).toVector - - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = - if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) - else ClassPathEntries(packages(inPackage), classes(inPackage)) - - def asURLs: Seq[URL] = Seq(new URL("jrt:/")) - // We don't yet have a scheme to represent the JDK modules in our `-classpath`. - // java models them as entries in the new "module path", we'll probably need to follow this. - def asClassPathStrings: Seq[String] = Nil - - def findClassFile(className: String): Option[AbstractFile] = - if (!className.contains(".")) None - else { - val (inPackage, _) = separatePkgAndClassNames(className) - packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => - val file = x.resolve(FileUtils.dirPath(className) + ".class") - if (Files.exists(file)) file.toPlainFile :: Nil else Nil - }.take(1).toList.headOption - } -} - -/** - * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 - */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { - import java.nio.file.Path, java.nio.file._ - - private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) - private val root: Path = fileSystem.getRootDirectories.iterator.next - private val roots = Files.newDirectoryStream(root).iterator.asScala.toList - - // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html - private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString - - private val releaseCode: String = codeFor(release) - private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` - private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) - - // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) - private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { - val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() - val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") - rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => - val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 - if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { - val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') - index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p - } - }) - index - } - - /** Empty string represents root package */ - override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector - } - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { - if (inPackage.isRoot) Nil - else { - val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => - Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) - sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector - } - } - - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = - if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) - else ClassPathEntries(packages(inPackage), classes(inPackage)) - - def asURLs: Seq[URL] = Nil - def asClassPathStrings: Seq[String] = Nil - def findClassFile(className: String): Option[AbstractFile] = { - if (!className.contains(".")) None - else { - val (inPackage, classSimpleName) = separatePkgAndClassNames(className) - packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => - val path = p.resolve(classSimpleName + ".sig") - if (Files.exists(path)) path.toPlainFile :: Nil else Nil - }.take(1).toList.headOption - } - } -} - -case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - - def findClassFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val classFile = new JFile(dir, relativePath + ".class") - if (classFile.exists) { - Some(classFile.toPath.toPlainFile) - } - else None - } - - protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: JFile): Boolean = f.isClass - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) -} - -case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { - def asSourcePathString: String = asClassPathString - - protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) - protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) - - override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply - - private def findSourceFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val sourceFile = LazyList("scala", "java") - .map(ext => new JFile(dir, relativePath + "." + ext)) - .collectFirst { case file if file.exists() => file } - - sourceFile.map(_.toPath.toPlainFile) - } - - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala deleted file mode 100644 index 0f5ac16b40bf..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/FileUtils.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools -package dotc.classpath - -import scala.language.unsafeNulls - -import java.io.{File => JFile, FileFilter} -import java.net.URL -import dotty.tools.io.AbstractFile -import language.experimental.pureFunctions - -/** - * Common methods related to Java files and abstract files used in the context of classpath - */ -object FileUtils { - extension (file: AbstractFile) { - def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 - - def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) - - // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") - - /** - * Safe method returning a sequence containing one URL representing this file, when underlying file exists, - * and returning given default value in other case - */ - def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - } - - extension (file: JFile) { - def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - - def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 - } - - private val SUFFIX_CLASS = ".class" - private val SUFFIX_SCALA = ".scala" - private val SUFFIX_JAVA = ".java" - private val SUFFIX_SIG = ".sig" - - def stripSourceExtension(fileName: String): String = - if (endsScala(fileName)) stripClassExtension(fileName) - else if (endsJava(fileName)) stripJavaExtension(fileName) - else throw new FatalError("Unexpected source file ending: " + fileName) - - def dirPath(forPackage: String): String = forPackage.replace('.', JFile.separatorChar) - - def dirPathInJar(forPackage: String): String = forPackage.replace('.', '/') - - inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length - - def endsClass(fileName: String): Boolean = - ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) - - def endsScalaOrJava(fileName: String): Boolean = - endsScala(fileName) || endsJava(fileName) - - def endsJava(fileName: String): Boolean = - ends (fileName, SUFFIX_JAVA) - - def endsScala(fileName: String): Boolean = - ends (fileName, SUFFIX_SCALA) - - def stripClassExtension(fileName: String): String = - fileName.substring(0, fileName.lastIndexOf('.')) - - def stripJavaExtension(fileName: String): String = - fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length - - // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed - // because then some tests in partest don't pass - def mayBeValidPackage(dirName: String): Boolean = - (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') - - def mkFileFilter(f: JFile -> Boolean): FileFilter = new FileFilter { - def accept(pathname: JFile): Boolean = f(pathname) - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala b/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala deleted file mode 100644 index ea7412f15d8a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/PackageNameUtils.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import dotty.tools.io.ClassPath.RootPackage - -/** - * Common methods related to package names represented as String - */ -object PackageNameUtils { - - /** - * @param fullClassName full class name with package - * @return (package, simple class name) - */ - inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { - val lastDotIndex = fullClassName.lastIndexOf('.') - if (lastDotIndex == -1) - (RootPackage, fullClassName) - else - (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) - } - - def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." - - /** - * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: - * - `packageContains("scala", "scala.collection")` - * - `packageContains("", "scala")` - */ - def packageContains(inPackage: String, packageDottedName: String) = { - if (packageDottedName.contains(".")) - packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length - else inPackage == "" - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala b/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala deleted file mode 100644 index ac80d543b539..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/VirtualDirectoryClassPath.scala +++ /dev/null @@ -1,55 +0,0 @@ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import dotty.tools.io.ClassRepresentation -import dotty.tools.io.{AbstractFile, VirtualDirectory} -import FileUtils._ -import java.net.URL - -import dotty.tools.io.ClassPath -import language.experimental.pureFunctions - -case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { - type F = AbstractFile - - // From AbstractFileClassLoader - private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { - var file: AbstractFile = base - val dirParts = pathParts.init.iterator - while (dirParts.hasNext) { - val dirPart = dirParts.next - file = file.lookupName(dirPart, directory = true) - if (file == null) - return null - } - file.lookupName(pathParts.last, directory = directory) - } - - protected def emptyFiles: Array[AbstractFile] = Array.empty - protected def getSubDir(packageDirName: String): Option[AbstractFile] = - Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) - protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile -> Boolean]): Array[F] = filter match { - case Some(f) => dir.iterator.filter(f).toArray - case _ => dir.toArray - } - def getName(f: AbstractFile): String = f.name - def toAbstractFile(f: AbstractFile): AbstractFile = f - def isPackage(f: AbstractFile): Boolean = f.isPackage - - // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) - def asClassPathStrings: Seq[String] = Seq(dir.path) - - override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - - def findClassFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) + ".class" - Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) - } - - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) - - protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) - protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala deleted file mode 100644 index 865f95551a0b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc -package classpath - -import scala.language.unsafeNulls - -import java.io.File -import java.net.URL -import java.nio.file.Files -import java.nio.file.attribute.{BasicFileAttributes, FileTime} - -import scala.annotation.tailrec -import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources} -import dotty.tools.dotc.core.Contexts._ -import FileUtils._ - -/** - * A trait providing an optional cache for classpath entries obtained from zip and jar files. - * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE - * when there are a lot of projects having a lot of common dependencies. - */ -sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath] - - def create(zipFile: AbstractFile)(using Context): ClassPath = - val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - if (ctx.settings.YdisableFlatCpCaching.value || zipFile.file == null) createForZipFile(zipFile, release) - else createUsingCache(zipFile, release) - - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath - - private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = - cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) -} - -/** - * Manages creation of classpath for class files placed in zip and jar files. - * It should be the only way of creating them as it provides caching. - */ -object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { - private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) - extends ZipArchiveFileLookup[ClassFileEntryImpl] - with NoSourcePaths { - - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class").map(_.file) - } - - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - file(PackageName(pkg), simpleClassName + ".class") - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) - - override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass - } - - /** - * This type of classpath is closely related to the support for JSR-223. - * Its usage can be observed e.g. when running: - * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala - * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: - * Name: scala/Function2$mcFJD$sp.class - */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { - override def findClassFile(className: String): Option[AbstractFile] = { - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) - } - - override def asClassPathStrings: Seq[String] = Seq(file.path) - - override def asURLs: Seq[URL] = file.toURLs() - - import ManifestResourcesClassPath.PackageFileInfo - import ManifestResourcesClassPath.PackageInfo - - /** - * A cache mapping package name to abstract file for package directory and subpackages of given package. - * - * ManifestResources can iterate through the collections of entries from e.g. remote jar file. - * We can't just specify the path to the concrete directory etc. so we can't just 'jump' into - * given package, when it's needed. On the other hand we can iterate over entries to get - * AbstractFiles, iterate over entries of these files etc. - * - * Instead of traversing a tree of AbstractFiles once and caching all entries or traversing each time, - * when we need subpackages of a given package or its classes, we traverse once and cache only packages. - * Classes for given package can be then easily loaded when they are needed. - */ - private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { - val packages = util.HashMap[String, PackageFileInfo]() - - def getSubpackages(dir: AbstractFile): List[AbstractFile] = - (for (file <- dir if file.isPackage) yield file).toList - - @tailrec - def traverse(packagePrefix: String, - filesForPrefix: List[AbstractFile], - subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { - case pkgFile :: remainingFiles => - val subpackages = getSubpackages(pkgFile) - val fullPkgName = packagePrefix + pkgFile.name - packages(fullPkgName) = PackageFileInfo(pkgFile, subpackages) - val newPackagePrefix = fullPkgName + "." - subpackagesQueue.enqueue(PackageInfo(newPackagePrefix, subpackages)) - traverse(packagePrefix, remainingFiles, subpackagesQueue) - case Nil if subpackagesQueue.nonEmpty => - val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() - traverse(packagePrefix, filesForPrefix, subpackagesQueue) - case _ => - } - - val subpackages = getSubpackages(file) - packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) - traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) - packages - } - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { - case None => Seq.empty - case Some(PackageFileInfo(_, subpackages)) => - subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) - } - - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { - case None => Seq.empty - case Some(PackageFileInfo(pkg, _)) => - (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq - } - - override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) - } - - private object ManifestResourcesClassPath { - case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) - case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) - } - - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = - if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) - else ZipArchiveClassPath(zipFile.file, release) - - private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { - case manifestRes: ManifestResources => - ManifestResourcesClassPath(manifestRes) - case _ => - val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" - throw new IllegalArgumentException(errorMsg) - } -} - -/** - * Manages creation of classpath for source files placed in zip and jar files. - * It should be the only way of creating them as it provides caching. - */ -object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { - private case class ZipArchiveSourcePath(zipFile: File) - extends ZipArchiveFileLookup[SourceFileEntryImpl] - with NoClassPaths { - - def release: Option[String] = None - - override def asSourcePathString: String = asClassPathString - - override private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) - - override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) - override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource - } - - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) -} - -final class FileBasedCache[T] { - private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] - - def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { - val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) - val lastModified = attrs.lastModifiedTime() - // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp - val fileKey = attrs.fileKey() - val stamp = Stamp(lastModified, fileKey) - cache.get(path) match { - case Some((cachedStamp, cached)) if cachedStamp == stamp => cached - case _ => - val value = create() - cache.put(path, (stamp, value)) - value - } - } - - def clear(): Unit = cache.synchronized { - // TODO support closing - // cache.valuesIterator.foreach(_.close()) - cache.clear() - } -} diff --git a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala b/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala deleted file mode 100644 index e241feee8244..000000000000 --- a/tests/pos-with-compiler-cc/dotc/classpath/ZipArchiveFileLookup.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2014 Contributor. All rights reserved. - */ -package dotty.tools.dotc.classpath - -import scala.language.unsafeNulls - -import java.io.File -import java.net.URL - -import dotty.tools.io.{ AbstractFile, FileZipArchive } -import FileUtils._ -import dotty.tools.io.{EfficientClassPath, ClassRepresentation} - -/** - * A trait allowing to look for classpath entries of given type in zip and jar files. - * It provides common logic for classes handling class and source files. - * It's aware of things like e.g. META-INF directory which is correctly skipped. - */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { - val zipFile: File - def release: Option[String] - - assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") - - override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) - override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - - private val archive = new FileZipArchive(zipFile.toPath, release) - - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { - for { - dirEntry <- findDirEntry(inPackage).toSeq - entry <- dirEntry.iterator if entry.isPackage - } - yield PackageEntryImpl(inPackage.entryName(entry.name)) - } - - protected def files(inPackage: PackageName): Seq[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage).toSeq - entry <- dirEntry.iterator if isRequiredFileType(entry) - } - yield createFileEntry(entry) - - protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage) - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } - yield createFileEntry(entry) - - override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined - def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = - findDirEntry(inPackage) match { - case Some(dirEntry) => - for (entry <- dirEntry.iterator) { - if (entry.isPackage) - onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) - else if (isRequiredFileType(entry)) - onClassesAndSources(createFileEntry(entry)) - } - case None => - } - - private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = - archive.allDirs.get(pkg.dirPathTrailingSlashJar) - - protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType - protected def isRequiredFileType(file: AbstractFile): Boolean -} diff --git a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala deleted file mode 100644 index 68c900e405da..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CliCommand.scala +++ /dev/null @@ -1,198 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import Settings._ -import core.Contexts._ -import printing.Highlighting - -import scala.util.chaining.given -import scala.PartialFunction.cond - -trait CliCommand: - - type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup - - def versionMsg: String - - def ifErrorsMsg: String - - /** The name of the command */ - def cmdName: String - - def isHelpFlag(using settings: ConcreteSettings)(using SettingsState): Boolean - - def helpMsg(using settings: ConcreteSettings)(using SettingsState, Context): String - - private def explainAdvanced = """ - |-- Notes on option parsing -- - |Boolean settings are always false unless set. - |Where multiple values are accepted, they should be comma-separated. - | example: -Xplugin:plugin1,plugin2 - | means one or a comma-separated list of: - | - (partial) phase names with an optional "+" suffix to include the next phase - | - the string "all" - | example: -Xprint:all prints all phases. - | example: -Xprint:typer,mixin prints the typer and mixin phases. - | example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase. - | This is useful because during the tree transform of phase X, we often - | already are in phase X + 1. - """ - - /** Distill arguments into summary detailing settings, errors and files to main */ - def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = - - // expand out @filename to the contents of that filename - def expandedArguments = args.toList flatMap { - case x if x startsWith "@" => CommandLineParser.expandArg(x) - case x => List(x) - } - - sg.processArguments(expandedArguments, processAll = true, settingsState = ss) - end distill - - /** Creates a help message for a subset of options based on cond */ - protected def availableOptionsMsg(p: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = - // result is (Option Name, descrption\ndefault: value\nchoices: x, y, z - def help(s: Setting[?]): (String, String) = - // For now, skip the default values that do not make sense for the end user, such as 'false' for the version command. - def defaultValue = s.default match - case _: Int | _: String => s.default.toString - case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices ${s.legalChoices}" else "") - (s.name, info.filter(_.nonEmpty).mkString("\n")) - end help - - val ss = settings.allSettings.filter(p).toList.sortBy(_.name) - val formatter = Columnator("", "", maxField = 30) - val fresh = ContextBase().initialCtx.fresh.setSettings(summon[SettingsState]) - formatter(List(ss.map(help) :+ ("@", "A text file containing compiler arguments (options and source files).")))(using fresh) - end availableOptionsMsg - - protected def shortUsage: String = s"Usage: $cmdName " - - protected def createUsageMsg(label: String, shouldExplain: Boolean, cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = - val prefix = List( - Some(shortUsage), - Some(explainAdvanced).filter(_ => shouldExplain), - Some(label + " options include:") - ).flatten.mkString("\n") - - prefix + "\n" + availableOptionsMsg(cond) - - protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" - protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-V") && s.name != "-V" - protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" - protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-X") && s.name != "-X" - protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - s.name.startsWith("-Y") && s.name != "-Y" - protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = - s.description.linesIterator.next() - protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - cond(s.value) { - case ss: List[?] if s.isMultivalue => ss.contains("help") - case s: String => "help" == s - } - - /** Messages explaining usage and options */ - protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("where possible standard", shouldExplain = false, isStandard) - protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) - protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible warning", shouldExplain = true, isWarning) - protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) - protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = - createUsageMsg("Possible private", shouldExplain = true, isPrivate) - - /** Used for the formatted output of -Xshow-phases */ - protected def phasesMessage(using Context): String = - val phases = new Compiler().phases - val formatter = Columnator("phase name", "description", maxField = 25) - formatter(phases.map(mega => mega.map(p => (p.phaseName, p.description)))) - - /** Provide usage feedback on argument summary, assuming that all settings - * are already applied in context. - * @return Either Some list of files passed as arguments or None if further processing should be interrupted. - */ - def checkUsage(summary: ArgsSummary, sourcesRequired: Boolean)(using settings: ConcreteSettings)(using SettingsState, Context): Option[List[String]] = - // Print all warnings encountered during arguments parsing - summary.warnings.foreach(report.warning(_)) - - if summary.errors.nonEmpty then - summary.errors foreach (report.error(_)) - report.echo(ifErrorsMsg) - None - else if settings.version.value then - report.echo(versionMsg) - None - else if isHelpFlag then - report.echo(helpMsg) - None - else if (sourcesRequired && summary.arguments.isEmpty) - report.echo(usageMessage) - None - else - Some(summary.arguments) - - extension [T](setting: Setting[T]) - protected def value(using ss: SettingsState): T = setting.valueIn(ss) - - extension (s: String) - def padLeft(width: Int): String = String.format(s"%${width}s", s) - - // Formatting for -help and -Vphases in two columns, handling long field1 and wrapping long field2 - class Columnator(heading1: String, heading2: String, maxField: Int, separation: Int = 2): - def apply(texts: List[List[(String, String)]])(using Context): String = StringBuilder().tap(columnate(_, texts)).toString - - private def columnate(sb: StringBuilder, texts: List[List[(String, String)]])(using Context): Unit = - import Highlighting.* - val colors = Seq(Green(_), Yellow(_), Magenta(_), Cyan(_), Red(_)) - val nocolor = texts.length == 1 - def color(index: Int): String => Highlight = if nocolor then NoColor(_) else colors(index % colors.length) - val maxCol = ctx.settings.pageWidth.value - val field1 = maxField.min(texts.flatten.map(_._1.length).filter(_ < maxField).max) // widest field under maxField - val field2 = if field1 + separation + maxField < maxCol then maxCol - field1 - separation else 0 // skinny window -> terminal wrap - val separator = " " * separation - val EOL = "\n" - def formatField1(text: String): String = if text.length <= field1 then text.padLeft(field1) else text + EOL + "".padLeft(field1) - def formatField2(text: String): String = - def loopOverField2(fld: String): List[String] = - if field2 == 0 || fld.length <= field2 then List(fld) - else - fld.lastIndexOf(" ", field2) match - case -1 => List(fld) - case i => val (prefix, rest) = fld.splitAt(i) ; prefix :: loopOverField2(rest.trim) - text.split("\n").toList.flatMap(loopOverField2).filter(_.nonEmpty).mkString(EOL + "".padLeft(field1) + separator) - end formatField2 - def format(first: String, second: String, index: Int, colorPicker: Int => String => Highlight) = - sb.append(colorPicker(index)(formatField1(first)).show) - .append(separator) - .append(formatField2(second)) - .append(EOL): Unit - def fancy(first: String, second: String, index: Int) = format(first, second, index, color) - def plain(first: String, second: String) = format(first, second, 0, _ => NoColor(_)) - - if heading1.nonEmpty then - plain(heading1, heading2) - plain("-" * heading1.length, "-" * heading2.length) - - def emit(index: Int)(textPair: (String, String)): Unit = fancy(textPair._1, textPair._2, index) - def group(index: Int)(body: Int => Unit): Unit = - if !ctx.useColors then plain(s"{", "") - body(index) - if !ctx.useColors then plain(s"}", "") - - texts.zipWithIndex.foreach { (text, index) => - text match - case List(single) => emit(index)(single) - case Nil => - case mega => group(index)(i => mega.foreach(emit(i))) - } - end Columnator diff --git a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala b/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala deleted file mode 100644 index 2e76561c9913..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CommandLineParser.scala +++ /dev/null @@ -1,125 +0,0 @@ -package dotty.tools.dotc.config - -import java.lang.Character.isWhitespace -import java.nio.file.{Files, Paths} -import scala.annotation.tailrec -import scala.collection.mutable.ArrayBuffer -import scala.jdk.CollectionConverters.* - -/** Split a line of text using shell conventions. - */ -object CommandLineParser: - inline private val DQ = '"' - inline private val SQ = '\'' - inline private val EOF = -1 - - /** Split the line into tokens separated by whitespace. - * - * Single or double quotes can be embedded to preserve internal whitespace: - * - * `""" echo "hello, world!" """` => "echo" :: "hello, world!" :: Nil - * `""" echo hello,' 'world! """` => "echo" :: "hello, world!" :: Nil - * `""" echo \"hello, world!\" """` => "echo" :: "\"hello," :: "world!\"" :: Nil - * - * The embedded quotes are stripped. Escaping backslash is not stripped. - * - * Invoke `errorFn` with a descriptive message if an end quote is missing. - */ - def tokenize(line: String, errorFn: String => Unit): List[String] = - - var accum: List[String] = Nil - - var pos = 0 - var start = 0 - val qpos = new ArrayBuffer[Int](16) // positions of paired quotes in current token - - inline def cur = if done then EOF else line.charAt(pos): Int - inline def bump() = pos += 1 - inline def done = pos >= line.length - - // Skip to the given unescaped end quote; false on no more input. - def skipToEndQuote(q: Int): Boolean = - var escaped = false - def terminal = cur match - case _ if escaped => escaped = false ; false - case '\\' => escaped = true ; false - case `q` | EOF => true - case _ => false - while !terminal do bump() - !done - - // Skip to the next whitespace word boundary; record unescaped embedded quotes; false on missing quote. - def skipToDelim(): Boolean = - var escaped = false - inline def quote() = { qpos += pos ; bump() } - @tailrec def advance(): Boolean = cur match - case _ if escaped => escaped = false ; bump() ; advance() - case '\\' => escaped = true ; bump() ; advance() - case q @ (DQ | SQ) => { quote() ; skipToEndQuote(q) } && { quote() ; advance() } - case EOF => true - case c if isWhitespace(c) => true - case _ => bump(); advance() - advance() - - def copyText(): String = - val buf = new java.lang.StringBuilder - var p = start - var i = 0 - while p < pos do - if i >= qpos.size then - buf.append(line, p, pos) - p = pos - else if p == qpos(i) then - buf.append(line, qpos(i)+1, qpos(i+1)) - p = qpos(i+1)+1 - i += 2 - else - buf.append(line, p, qpos(i)) - p = qpos(i) - buf.toString - - // the current token, stripped of any embedded quotes. - def text(): String = - val res = - if qpos.isEmpty then line.substring(start, pos) - else if qpos(0) == start && qpos(1) == pos then line.substring(start+1, pos-1) - else copyText() - qpos.clear() - res.nn - - inline def badquote() = errorFn(s"Unmatched quote [${qpos.last}](${line.charAt(qpos.last)})") - - inline def skipWhitespace() = while isWhitespace(cur) do bump() - - @tailrec def loop(): List[String] = - skipWhitespace() - start = pos - if done then - accum.reverse - else if !skipToDelim() then - badquote() - Nil - else - accum ::= text() - loop() - end loop - - loop() - end tokenize - - def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) - - /** Expands all arguments starting with @ to the contents of the file named like each argument. - */ - def expandArg(arg: String): List[String] = - val path = Paths.get(arg.stripPrefix("@")) - if !Files.exists(path) then - System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") - Nil - else - def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } - val lines = Files.readAllLines(path).nn - val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") - tokenize(params) - - class ParseException(msg: String) extends RuntimeException(msg) diff --git a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala b/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala deleted file mode 100644 index 41e123472a75..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/CompilerCommand.scala +++ /dev/null @@ -1,26 +0,0 @@ -package dotty.tools.dotc -package config - -import Settings._ -import core.Contexts._ - -abstract class CompilerCommand extends CliCommand: - type ConcreteSettings = ScalaSettings - - final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = - settings.allSettings.find(isHelping) match - case Some(s) => s.description - case _ => - if (settings.help.value) usageMessage - else if (settings.Vhelp.value) vusageMessage - else if (settings.Whelp.value) wusageMessage - else if (settings.Xhelp.value) xusageMessage - else if (settings.Yhelp.value) yusageMessage - else if (settings.showPlugins.value) ctx.base.pluginDescriptions - else if (settings.XshowPhases.value) phasesMessage - else "" - - final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = - import settings._ - val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) - flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/tests/pos-with-compiler-cc/dotc/config/Config.scala b/tests/pos-with-compiler-cc/dotc/config/Config.scala deleted file mode 100644 index cbd50429492e..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Config.scala +++ /dev/null @@ -1,256 +0,0 @@ -package dotty.tools.dotc.config - -object Config { - - inline val cacheMembersNamed = true - inline val cacheAsSeenFrom = true - inline val cacheMemberNames = true - inline val cacheImplicitScopes = true - inline val cacheMatchReduced = true - - /** If true, the `runWithOwner` operation uses a re-usable context, - * similar to explore. This requires that the context does not escape - * the call. If false, `runWithOwner` runs its operation argument - * in a fresh context. - */ - inline val reuseOwnerContexts = true - - inline val checkCacheMembersNamed = false - - /** When updating a constraint bound, check that the constrained parameter - * does not appear at the top-level of either of its bounds. - */ - inline val checkConstraintsNonCyclic = false - - /** Check that reverse dependencies in constraints are correct and complete. - * Can also be enabled using -Ycheck-constraint-deps. - */ - inline val checkConstraintDeps = false - - /** Check that each constraint resulting from a subtype test - * is satisfiable. Also check that a type variable instantiation - * satisfies its constraints. - * Note that this can fail when bad bounds are in scope, like in - * tests/neg/i4721a.scala. - */ - inline val checkConstraintsSatisfiable = false - - /** Check that each constraint is fully propagated. i.e. - * If P <: Q then the upper bound of P is a subtype of the upper bound of Q - * and the lower bound of Q is a subtype of the lower bound of P. - */ - inline val checkConstraintsPropagated = false - - /** Check that constraint bounds do not contain wildcard types */ - inline val checkNoWildcardsInConstraint = false - - /** If a constraint is over a type lambda `tl` and `tvar` is one of - * the type variables associated with `tl` in the constraint, check - * that the origin of `tvar` is a parameter of `tl`. - */ - inline val checkConsistentVars = false - - /** Check that constraints of globally committable typer states are closed. - * NOTE: When enabled, the check can cause CyclicReference errors because - * it traverses all elements of a type. Such failures were observed when - * compiling all of dotty together (source seems to be in GenBCode which - * accesses javac's settings.) - * - * It is recommended to turn this option on only when chasing down - * a TypeParamRef instantiation error. See comment in Types.TypeVar.instantiate. - */ - inline val debugCheckConstraintsClosed = false - - /** Check that no type appearing as the info of a SymDenotation contains - * skolem types. - */ - inline val checkNoSkolemsInInfo = false - - /** Check that Name#toString is not called directly from backend by analyzing - * the stack trace of each toString call on names. This is very expensive, - * so not suitable for continuous testing. But it can be used to find a problem - * when running a specific test. - */ - inline val checkBackendNames = false - - /** Check that re-used type comparers are in their initialization state */ - inline val checkTypeComparerReset = false - - /** Type comparer will fail with an assert if the upper bound - * of a constrained parameter becomes Nothing. This should be turned - * on only for specific debugging as normally instantiation to Nothing - * is not an error condition. - */ - inline val failOnInstantiationToNothing = false - - /** Enable noDoubleDef checking if option "-YnoDoubleDefs" is set. - * The reason to have an option as well as the present global switch is - * that the noDoubleDef checking is done in a hotspot, and we do not - * want to incur the overhead of checking an option each time. - */ - inline val checkNoDoubleBindings = true - - /** Check positions for consistency after parsing */ - inline val checkPositions = true - - /** Check that typed trees don't point to untyped ones */ - inline val checkTreesConsistent = false - - /** Show subtype traces for all deep subtype recursions */ - inline val traceDeepSubTypeRecursions = false - - /** When explaining subtypes and this flag is set, also show the classes of the compared types. */ - inline val verboseExplainSubtype = false - - /** If this flag is set, take the fast path when comparing same-named type-aliases and types */ - inline val fastPathForRefinedSubtype = true - - /** If this flag is set, and we compute `T1[X1]` & `T2[X2]` as a new - * upper bound of a constrained parameter, try to align the arguments by computing - * `S1 =:= S2` (which might instantiate type parameters). - * This rule is contentious because it cuts the constraint set. - * - * For more info, see the comment in `TypeComparer#glbArgs`. - */ - inline val alignArgsInAnd = true - - /** If this flag is set, higher-kinded applications are checked for validity - */ - inline val checkHKApplications = false - - /** If this flag is set, method types are checked for valid parameter references - */ - inline val checkMethodTypes = false - - /** If this flag is set, it is checked that TypeRefs don't refer directly - * to themselves. - */ - inline val checkTypeRefCycles = false - - /** If this flag is set, we check that types assigned to trees are error types only - * if some error was already reported. There are complicicated scenarios where this - * is not true. An example is TestNonCyclic in posTwice. If we remove the - * first (unused) import `import dotty.tools.dotc.core.Types.Type` in `CompilationUnit`, - * we end up assigning a CyclicReference error type to an import expression `annotation` - * before the cyclic reference is reported. What happens is that the error was reported - * as a result of a completion in a not-yet committed typerstate. So we cannot enforce - * this in all circumstances. But since it is almost always true it is useful to - * keep the Config option for debugging. - */ - inline val checkUnreportedErrors = false - - /** If this flag is set, it is checked that class type parameters are - * only references with NoPrefix or ThisTypes as prefixes. This option - * is usually disabled, because there are still some legitimate cases where - * this can arise (e.g. for pos/Map.scala, in LambdaType.integrate). - */ - inline val checkTypeParamRefs = false - - /** The recursion depth for showing a summarized string */ - inline val summarizeDepth = 2 - - /** Check that variances of lambda arguments match the - * variance of the underlying lambda class. - */ - inline val checkLambdaVariance = false - - /** Check that certain types cannot be created in erasedTypes phases. - * Note: Turning this option on will get some false negatives, since it is - * possible that And/Or types are still created during erasure as the result - * of some operation on an existing type. - */ - inline val checkUnerased = false - - /** Check that atoms-based comparisons match regular comparisons that do not - * take atoms into account. The two have to give the same results, since - * atoms comparison is intended to be just an optimization. - */ - inline val checkAtomsComparisons = false - - /** In `derivedSelect`, rewrite - * - * (S & T)#A --> S#A & T#A - * (S | T)#A --> S#A | T#A - * - * Not sure whether this is useful. Preliminary measurements show a slowdown of about - * 7% for the build when this option is enabled. - */ - inline val splitProjections = false - - /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for - * `[Xs] -> U` to `[Xs := Ts]U`. - * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite. - */ - inline val simplifyApplications = true - - /** Assume -indent by default */ - inline val defaultIndent = true - - /** If set, prints a trace of all symbol completions */ - inline val showCompletions = false - - /** If set, show variable/variable reverse dependencies when printing constraints. */ - inline val showConstraintDeps = true - - /** If set, method results that are context functions are flattened by adding - * the parameters of the context function results to the methods themselves. - * This is an optimization that reduces closure allocations. - */ - inline val flattenContextFunctionResults = true - - /** If set, enables tracing */ - inline val tracingEnabled = false - - /** Initial capacity of the uniques HashMap. - * Note: This should be a power of two to work with util.HashSet - */ - inline val initialUniquesCapacity = 0x8000 - - /** How many recursive calls to NamedType#underlying are performed before logging starts. */ - inline val LogPendingUnderlyingThreshold = 50 - - /** How many recursive calls to isSubType are performed before logging starts. */ - inline val LogPendingSubTypesThreshold = 50 - - /** How many recursive calls to findMember are performed before logging names starts - * Note: this threshold has to be chosen carefully. Too large, and programs - * like tests/pos/IterableSelfRec go into polynomial (or even exponential?) - * compile time slowdown. Too small and normal programs will cause the compiler to - * do inefficient operations on findMember. The current value is determined - * so that (1) IterableSelfRec still compiles in reasonable time (< 10sec) (2) Compiling - * dotty itself only causes small pending names lists to be generated (we measured - * at max 6 elements) and these lists are never searched with contains. - */ - inline val LogPendingFindMemberThreshold = 9 - - /** When in IDE, turn StaleSymbol errors into warnings instead of crashing */ - inline val ignoreStaleInIDE = true - - /** If true, `Denotation#asSeenFrom` is allowed to return an existing - * `SymDenotation` instead of allocating a new `SingleDenotation` if - * the two would only differ in their `prefix` (SymDenotation always - * have `NoPrefix` as their prefix). - * This is done for performance reasons: when compiling Dotty itself this - * reduces the number of allocated denotations by ~50%. - */ - inline val reuseSymDenotations = true - - /** If `checkLevelsOnConstraints` is true, check levels of type variables - * and create fresh ones as needed when bounds are first entered intot he constraint. - * If `checkLevelsOnInstantiation` is true, allow level-incorrect constraints but - * fix levels on type variable instantiation. - */ - inline val checkLevelsOnConstraints = false - inline val checkLevelsOnInstantiation = true - - /** If true, print capturing types in the form `{c} T`. - * If false, print them in the form `T @retains(c)`. - */ - inline val printCaptureSetsAsPrefix = true - - /** If true, allow mappping capture set variables under captureChecking with maps that are neither - * bijective nor idempotent. We currently do now know how to do this correctly in all - * cases, though. - */ - inline val ccAllowUnsoundMaps = false -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Feature.scala b/tests/pos-with-compiler-cc/dotc/config/Feature.scala deleted file mode 100644 index 1637c9268e30..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Feature.scala +++ /dev/null @@ -1,173 +0,0 @@ -package dotty.tools -package dotc -package config - -import core._ -import Contexts._, Symbols._, Names._ -import StdNames.nme -import Decorators.* -import util.{SrcPos, NoSourcePosition} -import SourceVersion._ -import reporting.Message -import NameKinds.QualifiedName -import language.experimental.pureFunctions - -object Feature: - - def experimental(str: PreName): TermName = - QualifiedName(nme.experimental, str.toTermName) - - private def deprecated(str: PreName): TermName = - QualifiedName(nme.deprecated, str.toTermName) - - private val namedTypeArguments = experimental("namedTypeArguments") - private val genericNumberLiterals = experimental("genericNumberLiterals") - val scala2macros = experimental("macros") - - val dependent = experimental("dependent") - val erasedDefinitions = experimental("erasedDefinitions") - val symbolLiterals = deprecated("symbolLiterals") - val fewerBraces = experimental("fewerBraces") - val saferExceptions = experimental("saferExceptions") - val pureFunctions = experimental("pureFunctions") - val captureChecking = experimental("captureChecking") - val into = experimental("into") - - val globalOnlyImports: Set[TermName] = Set(pureFunctions, captureChecking) - - /** Is `feature` enabled by by a command-line setting? The enabling setting is - * - * -language:feature - * - * where is the fully qualified name of `owner`, followed by a ".", - * but subtracting the prefix `scala.language.` at the front. - */ - def enabledBySetting(feature: TermName)(using Context): Boolean = - ctx.base.settings.language.value.contains(feature.toString) - - /** Is `feature` enabled by by an import? This is the case if the feature - * is imported by a named import - * - * import owner.feature - * - * and there is no visible nested import that excludes the feature, as in - * - * import owner.{ feature => _ } - */ - def enabledByImport(feature: TermName)(using Context): Boolean = - //atPhase(typerPhase) { - val info = ctx.importInfo - info != null && info.featureImported(feature) - //} - - /** Is `feature` enabled by either a command line setting or an import? - * @param feature The name of the feature - * @param owner The prefix symbol (nested in `scala.language`) where the - * feature is defined. - */ - def enabled(feature: TermName)(using Context): Boolean = - enabledBySetting(feature) || enabledByImport(feature) - - /** Is auto-tupling enabled? */ - def autoTuplingEnabled(using Context): Boolean = !enabled(nme.noAutoTupling) - - def dynamicsEnabled(using Context): Boolean = enabled(nme.dynamics) - - def dependentEnabled(using Context) = enabled(dependent) - - def namedTypeArgsEnabled(using Context) = enabled(namedTypeArguments) - - def genericNumberLiteralsEnabled(using Context) = enabled(genericNumberLiterals) - - def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) - - /** Is pureFunctions enabled for this compilation unit? */ - def pureFunsEnabled(using Context) = - enabledBySetting(pureFunctions) - || ctx.compilationUnit.knowsPureFuns - || ccEnabled - - /** Is captureChecking enabled for this compilation unit? */ - def ccEnabled(using Context) = - enabledBySetting(captureChecking) - || ctx.compilationUnit.needsCaptureChecking - - /** Is pureFunctions enabled for any of the currently compiled compilation units? */ - def pureFunsEnabledSomewhere(using Context) = - enabledBySetting(pureFunctions) - || ctx.run != null && ctx.run.nn.pureFunsImportEncountered - || ccEnabledSomewhere - - /** Is captureChecking enabled for any of the currently compiled compilation units? */ - def ccEnabledSomewhere(using Context) = - enabledBySetting(captureChecking) - || ctx.run != null && ctx.run.nn.ccImportEncountered - - def sourceVersionSetting(using Context): SourceVersion = - SourceVersion.valueOf(ctx.settings.source.value) - - def sourceVersion(using Context): SourceVersion = - ctx.compilationUnit.sourceVersion match - case Some(v) => v - case none => sourceVersionSetting - - def migrateTo3(using Context): Boolean = - sourceVersion == `3.0-migration` - - def fewerBracesEnabled(using Context) = - sourceVersion.isAtLeast(`3.3`) || enabled(fewerBraces) - - /** If current source migrates to `version`, issue given warning message - * and return `true`, otherwise return `false`. - */ - def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion)(using Context): Boolean = - if sourceVersion.isMigrating && sourceVersion.stable == version - || (version == `3.0` || version == `3.1`) && migrateTo3 - then - report.migrationWarning(msg, pos) - true - else - false - - def checkExperimentalFeature(which: String, srcPos: SrcPos, note: -> String = "")(using Context) = - if !isExperimentalEnabled then - report.error(em"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) - - def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = - if !isExperimentalEnabled then - val symMsg = - if sym.hasAnnotation(defn.ExperimentalAnnot) then - i"$sym is marked @experimental" - else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then - i"${sym.owner} is marked @experimental" - else - i"$sym inherits @experimental" - report.error(em"$symMsg and therefore may only be used in an experimental scope.", srcPos) - - /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ - def checkExperimentalSettings(using Context): Unit = - for setting <- ctx.settings.language.value - if setting.startsWith("experimental.") && setting != "experimental.macros" - do checkExperimentalFeature(s"feature $setting", NoSourcePosition) - - def isExperimentalEnabled(using Context): Boolean = - Properties.experimental && !ctx.settings.YnoExperimental.value - - /** Handle language import `import language..` if it is one - * of the global imports `pureFunctions` or `captureChecking`. In this case - * make the compilation unit's and current run's fields accordingly. - * @return true iff import that was handled - */ - def handleGlobalLanguageImport(prefix: TermName, imported: Name)(using Context): Boolean = - val fullFeatureName = QualifiedName(prefix, imported.asTermName) - if fullFeatureName == pureFunctions then - ctx.compilationUnit.knowsPureFuns = true - if ctx.run != null then ctx.run.nn.pureFunsImportEncountered = true - true - else if fullFeatureName == captureChecking then - ctx.compilationUnit.needsCaptureChecking = true - if ctx.run != null then ctx.run.nn.ccImportEncountered = true - true - else - false -end Feature diff --git a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala deleted file mode 100644 index 2b2f35e49451..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/JavaPlatform.scala +++ /dev/null @@ -1,69 +0,0 @@ -package dotty.tools -package dotc -package config - -import io._ -import classpath.AggregateClassPath -import core._ -import Symbols._, Types._, Contexts._, StdNames._ -import Flags._ -import transform.ExplicitOuter, transform.SymUtils._ - -class JavaPlatform extends Platform { - - private var currentClassPath: Option[ClassPath] = None - - def classPath(using Context): ClassPath = { - if (currentClassPath.isEmpty) - currentClassPath = Some(new PathResolver().result) - val cp = currentClassPath.get - cp - } - - // The given symbol is a method with the right name and signature to be a runnable java program. - def isMainMethod(sym: Symbol)(using Context): Boolean = - (sym.name == nme.main) && (sym.info match { - case MethodTpe(_, defn.ArrayOf(el) :: Nil, restpe) => el =:= defn.StringType && (restpe isRef defn.UnitClass) - case _ => false - }) - - /** Update classpath with a substituted subentry */ - def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { - case AggregateClassPath(entries) => - currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) - case cp: ClassPath => - currentClassPath = Some(subst.getOrElse(cp, cp)) - } - - def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) - - /** Is the SAMType `cls` also a SAM under the rules of the JVM? */ - def isSam(cls: ClassSymbol)(using Context): Boolean = - cls.isAllOf(NoInitsTrait) && - cls.superClass == defn.ObjectClass && - cls.directlyInheritedTraits.forall(_.is(NoInits)) && - !ExplicitOuter.needsOuterIfReferenced(cls) && - cls.typeRef.fields.isEmpty // Superaccessors already show up as abstract methods here, so no test necessary - - /** We could get away with excluding BoxedBooleanClass for the - * purpose of equality testing since it need not compare equal - * to anything but other booleans, but it should be present in - * case this is put to other uses. - */ - def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { - val d = defn - import d._ - (sym == ObjectClass) || - (sym == JavaSerializableClass) || - (sym == ComparableClass) || - (sym derivesFrom BoxedNumberClass) || - (sym derivesFrom BoxedCharClass) || - (sym derivesFrom BoxedBooleanClass) - } - - def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = - true - - def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = - new ClassfileLoader(bin) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala b/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala deleted file mode 100644 index 0411c5604768..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/OutputDirs.scala +++ /dev/null @@ -1,117 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import io._ - -/** A class for holding mappings from source directories to - * their output location. This functionality can be accessed - * only programmatically. The command line compiler uses a - * single output location, but tools may use this functionality - * to set output location per source directory. - */ -class OutputDirs { - /** Pairs of source directory - destination directory. */ - private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil - - /** If this is not None, the output location where all - * classes should go. - */ - private var singleOutDir: Option[AbstractFile] = None - - /** Add a destination directory for sources found under srcdir. - * Both directories should exits. - */ - def add(srcDir: String, outDir: String): Unit = - add(checkDir(AbstractFile.getDirectory(srcDir), srcDir), - checkDir(AbstractFile.getDirectory(outDir), outDir)) - - /** Check that dir is exists and is a directory. */ - private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( - if (dir != null && dir.isDirectory) - dir - // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) - else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) - new PlainFile(Path(name)) - else - throw new FatalError(name + " does not exist or is not a directory")) - - /** Set the single output directory. From now on, all files will - * be dumped in there, regardless of previous calls to 'add'. - */ - def setSingleOutput(outDir: String): Unit = { - val dst = AbstractFile.getDirectory(outDir) - setSingleOutput(checkDir(dst, outDir, true)) - } - - def getSingleOutput: Option[AbstractFile] = singleOutDir - - /** Set the single output directory. From now on, all files will - * be dumped in there, regardless of previous calls to 'add'. - */ - def setSingleOutput(dir: AbstractFile): Unit = - singleOutDir = Some(dir) - - def add(src: AbstractFile, dst: AbstractFile): Unit = { - singleOutDir = None - outputDirs ::= ((src, dst)) - } - - /** Return the list of source-destination directory pairs. */ - def outputs: List[(AbstractFile, AbstractFile)] = outputDirs - - /** Return the output directory for the given file. - */ - def outputDirFor(src: AbstractFile): AbstractFile = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = - src.path.startsWith(srcDir.path) - - singleOutDir match { - case Some(d) => d - case None => - (outputs find (isBelow _).tupled) match { - case Some((_, d)) => d - case _ => - throw new FatalError("Could not find an output directory for " - + src.path + " in " + outputs) - } - } - } - - /** Return the source file path(s) which correspond to the given - * classfile path and SourceFile attribute value, subject to the - * condition that source files are arranged in the filesystem - * according to Java package layout conventions. - * - * The given classfile path must be contained in at least one of - * the specified output directories. If it does not then this - * method returns Nil. - * - * Note that the source file is not required to exist, so assuming - * a valid classfile path this method will always return a list - * containing at least one element. - * - * Also that if two or more source path elements target the same - * output directory there will be two or more candidate source file - * paths. - */ - def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { - def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = - classFile.path.startsWith(outDir.path) - - singleOutDir match { - case Some(d) => - d match { - case _: VirtualDirectory | _: io.ZipArchive => Nil - case _ => List(d.lookupPathUnchecked(srcPath, false)) - } - case None => - (outputs filter (isBelow _).tupled) match { - case Nil => Nil - case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala b/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala deleted file mode 100644 index afa30e38dc2a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/PathResolver.scala +++ /dev/null @@ -1,268 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import WrappedProperties.AccessControl -import io.{ClassPath, Directory, Path} -import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath} -import ClassPath.split -import PartialFunction.condOpt -import core.Contexts._ -import Settings._ -import dotty.tools.io.File - -object PathResolver { - - // Imports property/environment functions which suppress - // security exceptions. - import AccessControl._ - - def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse "" - - /** Map all classpath elements to absolute paths and reconstruct the classpath. - */ - def makeAbsolute(cp: String): String = ClassPath.map(cp, x => Path(x).toAbsolute.path) - - /** pretty print class path - */ - def ppcp(s: String): String = split(s) match { - case Nil => "" - case Seq(x) => x - case xs => xs.map("\n" + _).mkString - } - - /** Values found solely by inspecting environment or property variables. - */ - object Environment { - private def searchForBootClasspath = ( - systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" - ) - - /** Environment variables which java pays attention to so it - * seems we do as well. - */ - def classPathEnv: String = envOrElse("CLASSPATH", "") - def sourcePathEnv: String = envOrElse("SOURCEPATH", "") - - def javaBootClassPath: String = propOrElse("sun.boot.class.path", searchForBootClasspath) - - def javaExtDirs: String = propOrEmpty("java.ext.dirs") - def scalaHome: String = propOrEmpty("scala.home") - def scalaExtDirs: String = propOrEmpty("scala.ext.dirs") - - /** The java classpath and whether to use it. - */ - def javaUserClassPath: String = propOrElse("java.class.path", "") - def useJavaClassPath: Boolean = propOrFalse("scala.usejavacp") - - override def toString: String = s""" - |object Environment { - | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath) - | javaBootClassPath = <${javaBootClassPath.length} chars> - | javaExtDirs = ${ppcp(javaExtDirs)} - | javaUserClassPath = ${ppcp(javaUserClassPath)} - | scalaExtDirs = ${ppcp(scalaExtDirs)} - |}""".trim.stripMargin - } - - /** Default values based on those in Environment as interpreted according - * to the path resolution specification. - */ - object Defaults { - def scalaSourcePath: String = Environment.sourcePathEnv - def javaBootClassPath: String = Environment.javaBootClassPath - def javaUserClassPath: String = Environment.javaUserClassPath - def javaExtDirs: String = Environment.javaExtDirs - def useJavaClassPath: Boolean = Environment.useJavaClassPath - - def scalaHome: String = Environment.scalaHome - def scalaHomeDir: Directory = Directory(scalaHome) - def scalaHomeExists: Boolean = scalaHomeDir.isDirectory - def scalaLibDir: Directory = (scalaHomeDir / "lib").toDirectory - def scalaClassesDir: Directory = (scalaHomeDir / "classes").toDirectory - - def scalaLibAsJar: File = (scalaLibDir / "scala-library.jar").toFile - def scalaLibAsDir: Directory = (scalaClassesDir / "library").toDirectory - - def scalaLibDirFound: Option[Directory] = - if (scalaLibAsJar.isFile) Some(scalaLibDir) - else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir) - else None - - def scalaLibFound: String = - if (scalaLibAsJar.isFile) scalaLibAsJar.path - else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path - else "" - - // XXX It must be time for someone to figure out what all these things - // are intended to do. This is disabled here because it was causing all - // the scala jars to end up on the classpath twice: one on the boot - // classpath as set up by the runner (or regular classpath under -nobootcp) - // and then again here. - def scalaBootClassPath: String = "" - // scalaLibDirFound match { - // case Some(dir) if scalaHomeExists => - // val paths = ClassPath expandDir dir.path - // join(paths: _*) - // case _ => "" - // } - - def scalaExtDirs: String = Environment.scalaExtDirs - - def scalaPluginPath: String = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path - - override def toString: String = """ - |object Defaults { - | scalaHome = %s - | javaBootClassPath = %s - | scalaLibDirFound = %s - | scalaLibFound = %s - | scalaBootClassPath = %s - | scalaPluginPath = %s - |}""".trim.stripMargin.format( - scalaHome, - ppcp(javaBootClassPath), - scalaLibDirFound, scalaLibFound, - ppcp(scalaBootClassPath), ppcp(scalaPluginPath) - ) - } - - def fromPathString(path: String)(using Context): ClassPath = { - val settings = ctx.settings.classpath.update(path) - inContext(ctx.fresh.setSettings(settings)) { - new PathResolver().result - } - } - - /** Show values in Environment and Defaults when no argument is provided. - * Otherwise, show values in Calculated as if those options had been given - * to a scala runner. - */ - def main(args: Array[String]): Unit = - if (args.isEmpty) { - println(Environment) - println(Defaults) - } - else inContext(ContextBase().initialCtx) { - val ArgsSummary(sstate, rest, errors, warnings) = - ctx.settings.processArguments(args.toList, true, ctx.settingsState) - errors.foreach(println) - val pr = inContext(ctx.fresh.setSettings(sstate)) { - new PathResolver() - } - println(" COMMAND: 'scala %s'".format(args.mkString(" "))) - println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - - pr.result match { - case cp: AggregateClassPath => - println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") - } - } -} - -import PathResolver.{Defaults, ppcp} - -class PathResolver(using c: Context) { - import c.base.settings - - private val classPathFactory = new ClassPathFactory - - private def cmdLineOrElse(name: String, alt: String) = - commandLineFor(name) match { - case Some("") | None => alt - case Some(x) => x - } - - private def commandLineFor(s: String): Option[String] = condOpt(s) { - case "javabootclasspath" => settings.javabootclasspath.value - case "javaextdirs" => settings.javaextdirs.value - case "bootclasspath" => settings.bootclasspath.value - case "extdirs" => settings.extdirs.value - case "classpath" | "cp" => settings.classpath.value - case "sourcepath" => settings.sourcepath.value - } - - /** Calculated values based on any given command line options, falling back on - * those in Defaults. - */ - object Calculated { - def scalaHome: String = Defaults.scalaHome - def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath - def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) - def javaExtDirs: String = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs) - def javaUserClassPath: String = if (useJavaClassPath) Defaults.javaUserClassPath else "" - def scalaBootClassPath: String = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath) - def scalaExtDirs: String = cmdLineOrElse("extdirs", Defaults.scalaExtDirs) - /** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as: - * [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect - * [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg) - * [scaladoc] ^ - * Because bootstrapping looks at the sourcepath and creates the package "reflect" in "" it will cause the - * typedIdentifier to pick .reflect instead of the .scala.reflect package. Thus, no bootstrapping for scaladoc! - */ - def sourcePath: String = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath) - - def userClassPath: String = - if (!settings.classpath.isDefault) settings.classpath.value - else sys.env.getOrElse("CLASSPATH", ".") - - import classPathFactory._ - - // Assemble the elements! - def basis: List[Traversable[ClassPath]] = - val release = Option(ctx.settings.javaOutputVersion.value).filter(_.nonEmpty) - - List( - JrtClassPath(release), // 1. The Java 9+ classpath (backed by the jrt:/ virtual system, if available) - classesInPath(javaBootClassPath), // 2. The Java bootstrap class path. - contentsOfDirsInPath(javaExtDirs), // 3. The Java extension class path. - classesInExpandedPath(javaUserClassPath), // 4. The Java application class path. - classesInPath(scalaBootClassPath), // 5. The Scala boot class path. - contentsOfDirsInPath(scalaExtDirs), // 6. The Scala extension class path. - classesInExpandedPath(userClassPath), // 7. The Scala application class path. - sourcesInPath(sourcePath) // 8. The Scala source path. - ) - - lazy val containers: List[ClassPath] = basis.flatten.distinct - - override def toString: String = """ - |object Calculated { - | scalaHome = %s - | javaBootClassPath = %s - | javaExtDirs = %s - | javaUserClassPath = %s - | useJavaClassPath = %s - | scalaBootClassPath = %s - | scalaExtDirs = %s - | userClassPath = %s - | sourcePath = %s - |}""".trim.stripMargin.format( - scalaHome, - ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath), - useJavaClassPath, - ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), - ppcp(sourcePath) - ) - } - - def containers: List[ClassPath] = Calculated.containers - - lazy val result: ClassPath = { - val cp = AggregateClassPath(containers.toIndexedSeq) - - if (settings.YlogClasspath.value) { - Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) - Console.println("Defaults: " + PathResolver.Defaults) - Console.println("Calculated: " + Calculated) - - val xs = (Calculated.basis drop 2).flatten.distinct - println("After java boot/extdirs classpath has %d entries:" format xs.size) - xs foreach (x => println(" " + x)) - } - cp - } - - def asURLs: Seq[java.net.URL] = result.asURLs -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Platform.scala b/tests/pos-with-compiler-cc/dotc/config/Platform.scala deleted file mode 100644 index 0faacf1bcebb..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Platform.scala +++ /dev/null @@ -1,46 +0,0 @@ -package dotty.tools -package dotc -package config - -import io.{ClassPath, AbstractFile} -import core.Contexts._, core.Symbols._ -import core.SymbolLoader -import core.StdNames.nme -import core.Flags.Module - -/** The platform dependent pieces of Global. - */ -abstract class Platform { - - /** The root symbol loader. */ - def rootLoader(root: TermSymbol)(using Context): SymbolLoader - - /** The compiler classpath. */ - def classPath(using Context): ClassPath - - /** Update classpath with a substitution that maps entries to entries */ - def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit - - /** Any platform-specific phases. */ - //def platformPhases: List[SubComponent] - - /** Is the SAMType `cls` also a SAM under the rules of the platform? */ - def isSam(cls: ClassSymbol)(using Context): Boolean - - /** The various ways a boxed primitive might materialize at runtime. */ - def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean - - /** Is the given class symbol eligible for Java serialization-specific methods? */ - def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean - - /** Create a new class loader to load class file `bin` */ - def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader - - /** The given symbol is a method with the right name and signature to be a runnable program. */ - def isMainMethod(sym: Symbol)(using Context): Boolean - - /** The given class has a main method. */ - final def hasMainMethod(sym: Symbol)(using Context): Boolean = - sym.info.member(nme.main).hasAltWith(d => - isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Printers.scala b/tests/pos-with-compiler-cc/dotc/config/Printers.scala deleted file mode 100644 index ecb189de9bb3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Printers.scala +++ /dev/null @@ -1,52 +0,0 @@ -package dotty.tools.dotc.config - -object Printers { - - class Printer { - def println(msg: => String): Unit = System.out.nn.println(msg) - } - - object noPrinter extends Printer { - inline override def println(msg: => String): Unit = () - } - - val default = new Printer - - val capt = noPrinter - val constr = noPrinter - val core = noPrinter - val checks = noPrinter - val config = noPrinter - val cyclicErrors = noPrinter - val debug = noPrinter - val derive = noPrinter - val desugar = noPrinter - val scaladoc = noPrinter - val exhaustivity = noPrinter - val gadts = noPrinter - val gadtsConstr = noPrinter - val hk = noPrinter - val implicits = noPrinter - val implicitsDetailed = noPrinter - val lexical = noPrinter - val init = noPrinter - val inlining = noPrinter - val interactiv = noPrinter - val matchTypes = noPrinter - val nullables = noPrinter - val overload = noPrinter - val patmatch = noPrinter - val pickling = noPrinter - val quotePickling = noPrinter - val plugins = noPrinter - val recheckr = noPrinter - val refcheck = noPrinter - val simplify = noPrinter - val staging = noPrinter - val subtyping = noPrinter - val tailrec = noPrinter - val transforms = noPrinter - val typr = noPrinter - val unapp = noPrinter - val variances = noPrinter -} diff --git a/tests/pos-with-compiler-cc/dotc/config/Properties.scala b/tests/pos-with-compiler-cc/dotc/config/Properties.scala deleted file mode 100644 index 1e9cc82112af..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Properties.scala +++ /dev/null @@ -1,142 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -import scala.annotation.internal.sharable - -import java.io.IOException -import java.util.jar.Attributes.{ Name => AttributeName } -import java.nio.charset.StandardCharsets - -/** Loads `library.properties` from the jar. */ -object Properties extends PropertiesTrait { - protected def propCategory: String = "compiler" - protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] - - /** Scala manifest attributes. - */ - @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") -} - -trait PropertiesTrait { - protected def propCategory: String // specializes the remainder of the values - protected def pickJarBasedOn: Class[?] // props file comes from jar containing this - - /** The name of the properties file */ - protected val propFilename: String = "/" + propCategory + ".properties" - - /** The loaded properties */ - @sharable protected lazy val scalaProps: java.util.Properties = { - val props = new java.util.Properties - val stream = pickJarBasedOn getResourceAsStream propFilename - if (stream ne null) - quietlyDispose(props load stream, stream.close) - - props - } - - private def quietlyDispose(action: => Unit, disposal: => Unit) = - try { action } - finally - try { disposal } - catch { case _: IOException => } - - def propIsSet(name: String): Boolean = System.getProperty(name) != null - def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value - def propOrElse(name: String, alt: String): String = System.getProperty(name, alt) - def propOrEmpty(name: String): String = propOrElse(name, "") - def propOrNull(name: String): String = propOrElse(name, null) - def propOrNone(name: String): Option[String] = Option(propOrNull(name)) - def propOrFalse(name: String): Boolean = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase) - def setProp(name: String, value: String): String = System.setProperty(name, value) - def clearProp(name: String): String = System.clearProperty(name) - - def envOrElse(name: String, alt: String): String = Option(System getenv name) getOrElse alt - def envOrNone(name: String): Option[String] = Option(System getenv name) - - // for values based on propFilename - def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt) - def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "") - def scalaPropOrNone(name: String): Option[String] = Option(scalaProps.getProperty(name)) - - /** Either the development or release version if known, otherwise - * the empty string. - */ - def versionNumberString: String = scalaPropOrEmpty("version.number") - - /** The version number of the jar this was loaded from, - * or `"(unknown)"` if it cannot be determined. - */ - val simpleVersionString: String = { - val v = scalaPropOrElse("version.number", "(unknown)") - v + ( - if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) - "-git-" + scalaPropOrElse("git.hash", "(unknown)") - else - "" - ) - } - - /** The version number of the jar this was loaded from plus `"version "` prefix, - * or `"version (unknown)"` if it cannot be determined. - */ - val versionString: String = "version " + simpleVersionString - - /** Whether the current version of compiler is experimental - * - * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. - * 2. Features supported by experimental versions of the compiler: - * - research plugins - */ - val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") - - val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") - - /** This is the encoding to use reading in source files, overridden with -encoding - * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. - */ - def sourceEncoding: String = scalaPropOrElse("file.encoding", StandardCharsets.UTF_8.name) - def sourceReader: String = scalaPropOrElse("source.reader", "scala.tools.nsc.io.SourceReader") - - /** This is the default text encoding, overridden (unreliably) with - * `JAVA_OPTS="-Dfile.encoding=Foo"` - */ - def encodingString: String = propOrElse("file.encoding", StandardCharsets.UTF_8.name) - - /** The default end of line character. - */ - def lineSeparator: String = propOrElse("line.separator", "\n") - - /** Various well-known properties. - */ - def javaClassPath: String = propOrEmpty("java.class.path") - def javaHome: String = propOrEmpty("java.home") - def javaVendor: String = propOrEmpty("java.vendor") - def javaVersion: String = propOrEmpty("java.version") - def javaVmInfo: String = propOrEmpty("java.vm.info") - def javaVmName: String = propOrEmpty("java.vm.name") - def javaVmVendor: String = propOrEmpty("java.vm.vendor") - def javaVmVersion: String = propOrEmpty("java.vm.version") - def osName: String = propOrEmpty("os.name") - def scalaHome: String = propOrEmpty("scala.home") - def tmpDir: String = propOrEmpty("java.io.tmpdir") - def userDir: String = propOrEmpty("user.dir") - def userHome: String = propOrEmpty("user.home") - def userName: String = propOrEmpty("user.name") - - /** Some derived values. - */ - def isWin: Boolean = osName startsWith "Windows" - def isMac: Boolean = javaVendor startsWith "Apple" - - // This is looking for javac, tools.jar, etc. - // Tries JDK_HOME first, then the more common but likely jre JAVA_HOME, - // and finally the system property based javaHome. - def jdkHome: String = envOrElse("JDK_HOME", envOrElse("JAVA_HOME", javaHome)) - - def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) - def scalaCmd: String = if (isWin) "scala.bat" else "scala" - def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" -} diff --git a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala b/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala deleted file mode 100644 index ae417b717ca3..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/SJSPlatform.scala +++ /dev/null @@ -1,35 +0,0 @@ -package dotty.tools.dotc.config - -import dotty.tools.dotc.core._ -import Contexts._ -import Symbols._ - -import dotty.tools.backend.sjs.JSDefinitions - -object SJSPlatform { - /** The `SJSPlatform` for the current context. */ - def sjsPlatform(using Context): SJSPlatform = - ctx.platform.asInstanceOf[SJSPlatform] -} - -class SJSPlatform()(using DetachedContext) extends JavaPlatform { - - /** Scala.js-specific definitions. */ - val jsDefinitions: JSDefinitions = new JSDefinitions() - - /** Is the SAMType `cls` also a SAM under the rules of the Scala.js back-end? */ - override def isSam(cls: ClassSymbol)(using Context): Boolean = - defn.isFunctionClass(cls) - || cls.superClass == jsDefinitions.JSFunctionClass - - /** Is the given class symbol eligible for Java serialization-specific methods? - * - * This is not simply false because we still want to add them to Scala classes - * and objects. They might be transitively used by macros and other compile-time - * code. It feels safer to have them be somewhat equivalent to the ones we would - * get in a JVM project. The JVM back-end will slap an extends `java.io.Serializable` - * to them, so we should be consistent and also emit the proper serialization methods. - */ - override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = - !sym.isSubClass(jsDefinitions.JSAnyClass) -} diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala deleted file mode 100644 index 407171f1a0dd..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaRelease.scala +++ /dev/null @@ -1,21 +0,0 @@ -package dotty.tools.dotc.config - -enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: - case Release3_0 extends ScalaRelease(3, 0) - case Release3_1 extends ScalaRelease(3, 1) - case Release3_2 extends ScalaRelease(3, 2) - - def show = s"$majorVersion.$minorVersion" - - def compare(that: ScalaRelease) = - val ord = summon[Ordering[(Int, Int)]] - ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) - -object ScalaRelease: - def latest = Release3_1 - - def parse(name: String) = name match - case "3.0" => Some(Release3_0) - case "3.1" => Some(Release3_1) - case "3.2" => Some(Release3_2) - case _ => None diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala deleted file mode 100644 index 558eb3e0a12b..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaSettings.scala +++ /dev/null @@ -1,347 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import dotty.tools.dotc.config.PathResolver.Defaults -import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} -import dotty.tools.dotc.config.SourceVersion -import dotty.tools.dotc.core.Contexts._ -import dotty.tools.dotc.rewrites.Rewrites -import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} - -import scala.util.chaining._ - -class ScalaSettings extends SettingGroup with AllScalaSettings - -object ScalaSettings: - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` - private val minTargetVersion = 8 - private val maxTargetVersion = 22 - - def supportedTargetVersions: List[String] = - (minTargetVersion to maxTargetVersion).toList.map(_.toString) - - def supportedReleaseVersions: List[String] = - if scala.util.Properties.isJavaAtLeast("9") then - val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - val maxVersion = Math.min(jdkVersion, maxTargetVersion) - (minTargetVersion to maxVersion).toList.map(_.toString) - else List(minTargetVersion).map(_.toString) - - def supportedScalaReleaseVersions: List[String] = - ScalaRelease.values.toList.map(_.show) - - def supportedSourceVersions: List[String] = - SourceVersion.values.toList.map(_.toString) - - def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") - - def defaultPageWidth: Int = { - val defaultWidth = 80 - val columnsVar = System.getenv("COLUMNS") - if columnsVar != null then columnsVar.toInt - else if Properties.isWin then - val ansiconVar = System.getenv("ANSICON") // eg. "142x32766 (142x26)" - if ansiconVar != null && ansiconVar.matches("[0-9]+x.*") then - ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt - else defaultWidth - else defaultWidth - } - -trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: - self: SettingGroup => - - /* Path related settings */ - val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") - - val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", ScalaSettings.supportedSourceVersions, SourceVersion.defaultSourceVersion.toString, aliases = List("--source")) - val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) - val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) - val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) - - val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") - val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") - val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") - val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - - /* Decompiler settings */ - val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) - val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) - - /* Scala.js-related settings */ - val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") - val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") - - val projectUrl: Setting[String] = StringSetting ( - "-project-url", - "project repository homepage", - "The source repository of your project.", - "" - ) - - val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") - - val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") - val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") -end AllScalaSettings - -/** Settings shared by compiler and scaladoc */ -trait CommonScalaSettings: - self: SettingGroup => - - /* Path related settings */ - val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) - val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) - val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) - val javaextdirs: Setting[String] = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs, aliases = List("--java-extension-directories")) - val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) - val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") - - val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) - val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", - new PlainDirectory(Directory("."))) - val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) - val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) - val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) - val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help", "-h")) - val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) - val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) - - val javaOutputVersion: Setting[String] = ChoiceSetting("-java-output-version", "version", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version. Corresponds to -release flag in javac.", ScalaSettings.supportedReleaseVersions, "", aliases = List("-release", "--release")) - - val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) - val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) - val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) - // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance - // it is otherwise subsumed by -explain, and should be dropped as soon as we can. - val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) - val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) - - /* Coverage settings */ - val coverageOutputDir = PathSetting("-coverage-out", "Destination for coverage classfiles and instrumentation data.", "", aliases = List("--coverage-out")) - - /* Other settings */ - val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) - val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) - val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) -end CommonScalaSettings - -/** -P "plugin" settings. Various tools might support plugins. */ -private sealed trait PluginSettings: - self: SettingGroup => - val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") - val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") - val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") - val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") - val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) - val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") - -/** -V "Verbose" settings */ -private sealed trait VerboseSettings: - self: SettingGroup => - val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") - val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) - val XshowPhases: Setting[Boolean] = BooleanSetting("-Vphases", "List compiler phases.", aliases = List("-Xshow-phases")) - - val Vprofile: Setting[Boolean] = BooleanSetting("-Vprofile", "Show metrics about sources and internal representations to estimate compile-time complexity.") - val VprofileSortedBy = ChoiceSetting("-Vprofile-sorted-by", "key", "Show metrics about sources and internal representations sorted by given column name", List("name", "path", "lines", "tokens", "tasty", "complexity"), "") - val VprofileDetails = IntSetting("-Vprofile-details", "Show metrics about sources and internal representations of the most complex methods", 0) - val VreplMaxPrintElements: Setting[Int] = IntSetting("-Vrepl-max-print-elements", "Number of elements to be printed before output is truncated.", 1000) - val VreplMaxPrintCharacters: Setting[Int] = IntSetting("-Vrepl-max-print-characters", "Number of characters to be printed before output is truncated.", 50000) - -/** -W "Warnings" settings - */ -private sealed trait WarningSettings: - self: SettingGroup => - val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") - val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - - val Wunused: Setting[List[String]] = MultiChoiceSetting( - name = "-Wunused", - helpArg = "warning", - descr = "Enable or disable specific `unused` warnings", - choices = List("nowarn", "all"), - default = Nil - ) - object WunusedHas: - def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) - def nowarn(using Context) = allOr("nowarn") - - val Wconf: Setting[List[String]] = MultiStringSetting( - "-Wconf", - "patterns", - default = List(), - descr = - s"""Configure compiler warnings. - |Syntax: -Wconf::,:,... - |multiple are combined with &, i.e., &...& - | - | - | - Any message: any - | - | - Message categories: cat=deprecation, cat=feature, cat=unchecked - | - | - Message content: msg=regex - | The regex need only match some part of the message, not all of it. - | - | - Message id: id=E129 - | The message id is printed with the warning. - | - | - Message name: name=PureExpressionInStatementPosition - | The message name is printed with the warning in verbose warning mode. - | - |In verbose warning mode the compiler prints matching filters for warnings. - |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally - |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). - | - | - | - error / e - | - warning / w - | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) - | - info / i (infos are not counted as warnings and not affected by `-Werror`) - | - silent / s - | - |The default configuration is empty. - | - |User-defined configurations are added to the left. The leftmost rule matching - |a warning message defines the action. - | - |Examples: - | - change every warning into an error: -Wconf:any:error - | - silence deprecations: -Wconf:cat=deprecation:s - | - |Note: on the command-line you might need to quote configurations containing `*` or `&` - |to prevent the shell from expanding patterns.""".stripMargin, - ) - -/** -X "Extended" or "Advanced" settings */ -private sealed trait XSettings: - self: SettingGroup => - - val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") - val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") - val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) - val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) - val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") - val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") - val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") - val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") - val XprintInline: Setting[Boolean] = BooleanSetting("-Xprint-inline", "Show where inlined code comes from.") - val XprintSuspension: Setting[Boolean] = BooleanSetting("-Xprint-suspension", "Show when code is suspended until macros are compiled.") - val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") - val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") - val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") - val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") - val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) - val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) - val XuncheckedJavaOutputVersion: Setting[String] = ChoiceSetting("-Xunchecked-java-output-version", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. Corresponds to -target flag in javac. When on JDK 9+, consider -java-output-version as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("-Xtarget", "--Xtarget")) - val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) - val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") - val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) - - val XmixinForceForwarders = ChoiceSetting( - name = "-Xmixin-force-forwarders", - helpArg = "mode", - descr = "Generate forwarder methods in classes inhering concrete methods from traits.", - choices = List("true", "junit", "false"), - default = "true") - - object mixinForwarderChoices { - def isTruthy(using Context) = XmixinForceForwarders.value == "true" - def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" - } - - val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") -end XSettings - -/** -Y "Forking" as in forked tongue or "Private" settings */ -private sealed trait YSettings: - self: SettingGroup => - - val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") - val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") - val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") - val Ydebug: Setting[Boolean] = BooleanSetting("-Ydebug", "Increase the quantity of debugging output.") - val YdebugTrace: Setting[Boolean] = BooleanSetting("-Ydebug-trace", "Trace core operations.") - val YdebugFlags: Setting[Boolean] = BooleanSetting("-Ydebug-flags", "Print all flags of definitions.") - val YdebugMissingRefs: Setting[Boolean] = BooleanSetting("-Ydebug-missing-refs", "Print a stacktrace when a required symbol is missing.") - val YdebugNames: Setting[Boolean] = BooleanSetting("-Ydebug-names", "Show internal representation of names.") - val YdebugPos: Setting[Boolean] = BooleanSetting("-Ydebug-pos", "Show full source positions including spans.") - val YdebugTreeWithId: Setting[Int] = IntSetting("-Ydebug-tree-with-id", "Print the stack trace when the tree with the given id is created.", Int.MinValue) - val YdebugTypeError: Setting[Boolean] = BooleanSetting("-Ydebug-type-error", "Print the stack trace when a TypeError is caught", false) - val YdebugError: Setting[Boolean] = BooleanSetting("-Ydebug-error", "Print the stack trace when any error is caught.", false) - val YdebugUnpickling: Setting[Boolean] = BooleanSetting("-Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) - val YtermConflict: Setting[String] = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") - val Ylog: Setting[List[String]] = PhasesSetting("-Ylog", "Log operations during") - val YlogClasspath: Setting[Boolean] = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.") - val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") - - val Yscala2Unpickler: Setting[String] = StringSetting("-Yscala2-unpickler", "", "Control where we may get Scala 2 symbols from. This is either \"always\", \"never\", or a classpath.", "always") - - val YnoImports: Setting[Boolean] = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") - val YnoGenericSig: Setting[Boolean] = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.") - val YnoPredef: Setting[Boolean] = BooleanSetting("-Yno-predef", "Compile without importing Predef.") - val Yskip: Setting[List[String]] = PhasesSetting("-Yskip", "Skip") - val Ydumpclasses: Setting[String] = StringSetting("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val YstopAfter: Setting[List[String]] = PhasesSetting("-Ystop-after", "Stop after", aliases = List("-stop")) // backward compat - val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully - val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") - val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "enable", "underscores"), "disable") - val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") - val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") - val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") - val YnoPatmatOpt: Setting[Boolean] = BooleanSetting("-Yno-patmat-opt", "Disable all pattern matching optimizations.") - val YplainPrinter: Setting[Boolean] = BooleanSetting("-Yplain-printer", "Pretty-print using a plain printer.") - val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") - val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") - val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") - val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") - val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") - val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") - val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) - val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) - val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") - val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") - val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") - val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") - val YcheckConstraintDeps: Setting[Boolean] = BooleanSetting("-Ycheck-constraint-deps", "Check dependency tracking in constraints (used for testing the algorithm).") - val YretainTrees: Setting[Boolean] = BooleanSetting("-Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") - val YshowTreeIds: Setting[Boolean] = BooleanSetting("-Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") - val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting("-Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty") - val YnoExperimental: Setting[Boolean] = BooleanSetting("-Yno-experimental", "Disable experimental language features") - - val YprofileEnabled: Setting[Boolean] = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting("-Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - - // Experimental language features - val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting("-Yno-kind-polymorphism", "Disable kind polymorphism.") - val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") - val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") - val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") - val YrecheckTest: Setting[Boolean] = BooleanSetting("-Yrecheck-test", "Run basic rechecking (internal test only)") - val YccDebug: Setting[Boolean] = BooleanSetting("-Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references") - val YccNoAbbrev: Setting[Boolean] = BooleanSetting("-Ycc-no-abbrev", "Used in conjunction with captureChecking language import, suppress type abbreviations") - val YlightweightLazyVals: Setting[Boolean] = BooleanSetting("-Ylightweight-lazy-vals", "Use experimental lightweight implementation of lazy vals") - - /** Area-specific debug output */ - val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") - val YnoDoubleBindings: Setting[Boolean] = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") - val YshowVarBounds: Setting[Boolean] = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds.") - - val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting("-Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - - val Yinstrument: Setting[Boolean] = BooleanSetting("-Yinstrument", "Add instrumentation code that counts allocations and closure creations.") - val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") - - val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") -end YSettings - diff --git a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala b/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala deleted file mode 100644 index 7fdf57478f1a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/ScalaVersion.scala +++ /dev/null @@ -1,188 +0,0 @@ -/* @author James Iry - */ -package dotty.tools -package dotc.config - -import scala.language.unsafeNulls - -import scala.annotation.internal.sharable -import scala.util.{Try, Success, Failure} - -/** - * Represents a single Scala version in a manner that - * supports easy comparison and sorting. - */ -sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { - def unparse: String -} - -/** - * A scala version that sorts higher than all actual versions - */ -@sharable case object NoScalaVersion extends ScalaVersion { - def unparse: String = "none" - - def compare(that: ScalaVersion): Int = that match { - case NoScalaVersion => 0 - case _ => 1 - } -} - -/** - * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion - * may or may not be a released version - i.e. this same class is used to represent - * final, release candidate, milestone, and development builds. The build argument is used - * to segregate builds - */ -case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { - def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" - - def compare(that: ScalaVersion): Int = that match { - case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => - // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these - // comparisons a lot so I'm using brute force direct style code - if (major < thatMajor) -1 - else if (major > thatMajor) 1 - else if (minor < thatMinor) -1 - else if (minor > thatMinor) 1 - else if (rev < thatRev) -1 - else if (rev > thatRev) 1 - else build compare thatBuild - case AnyScalaVersion => 1 - case NoScalaVersion => -1 - } -} - -/** - * A Scala version that sorts lower than all actual versions - */ -@sharable case object AnyScalaVersion extends ScalaVersion { - def unparse: String = "any" - - def compare(that: ScalaVersion): Int = that match { - case AnyScalaVersion => 0 - case _ => -1 - } -} - -/** - * Methods for parsing ScalaVersions - */ -@sharable object ScalaVersion { - private val dot = "\\." - private val dash = "\\-" - private def not(s:String) = s"[^${s}]" - private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r - - def parse(versionString : String): Try[ScalaVersion] = { - def failure = Failure(new NumberFormatException( - s"There was a problem parsing ${versionString}. " + - "Versions should be in the form major[.minor[.revision]] " + - "where each part is a positive number, as in 2.10.1. " + - "The minor and revision parts are optional." - )) - - def toInt(s: String) = s match { - case null | "" => 0 - case _ => s.toInt - } - - def isInt(s: String) = Try(toInt(s)).isSuccess - - import ScalaBuild._ - - def toBuild(s: String) = s match { - case null | "FINAL" => Final - case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) - case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) - case _ => Development(s) - } - - try versionString match { - case "" | "any" => Success(AnyScalaVersion) - case "none" => Success(NoScalaVersion) - case R(_, majorS, _, minorS, _, revS, _, buildS) => - Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) - case _ => failure - } - catch { - case e: NumberFormatException => failure - } - } - - /** - * The version of the compiler running now - */ - val current: ScalaVersion = parse(util.Properties.versionNumberString).get -} - -/** - * Represents the data after the dash in major.minor.rev-build - */ -abstract class ScalaBuild extends Ordered[ScalaBuild] { - /** - * Return a version of this build information that can be parsed back into the - * same ScalaBuild - */ - def unparse: String -} - -object ScalaBuild { - - /** A development, test, nightly, snapshot or other "unofficial" build - */ - case class Development(id: String) extends ScalaBuild { - def unparse: String = s"-${id}" - - def compare(that: ScalaBuild): Int = that match { - // sorting two development builds based on id is reasonably valid for two versions created with the same schema - // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions - // this is a pragmatic compromise - case Development(thatId) => id compare thatId - // assume a development build is newer than anything else, that's not really true, but good luck - // mapping development build versions to other build types - case _ => 1 - } - } - - /** A final build - */ - case object Final extends ScalaBuild { - def unparse: String = "" - - def compare(that: ScalaBuild): Int = that match { - case Final => 0 - // a final is newer than anything other than a development build or another final - case Development(_) => -1 - case _ => 1 - } - } - - /** A candidate for final release - */ - case class RC(n: Int) extends ScalaBuild { - def unparse: String = s"-RC${n}" - - def compare(that: ScalaBuild): Int = that match { - // compare two rcs based on their RC numbers - case RC(thatN) => n - thatN - // an rc is older than anything other than a milestone or another rc - case Milestone(_) => 1 - case _ => -1 - } - } - - /** An intermediate release - */ - case class Milestone(n: Int) extends ScalaBuild { - def unparse: String = s"-M${n}" - - def compare(that: ScalaBuild): Int = that match { - // compare two milestones based on their milestone numbers - case Milestone(thatN) => n - thatN - // a milestone is older than anything other than another milestone - case _ => -1 - } - } -} - diff --git a/tests/pos-with-compiler-cc/dotc/config/Settings.scala b/tests/pos-with-compiler-cc/dotc/config/Settings.scala deleted file mode 100644 index 277833afbd5d..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/Settings.scala +++ /dev/null @@ -1,295 +0,0 @@ -package dotty.tools.dotc -package config - -import scala.language.unsafeNulls - -import core.Contexts._ - -import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} - -import annotation.tailrec -import collection.mutable.ArrayBuffer -import reflect.ClassTag -import scala.util.{Success, Failure} - -object Settings: - - val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean - val IntTag: ClassTag[Int] = ClassTag.Int - val StringTag: ClassTag[String] = ClassTag(classOf[String]) - val ListTag: ClassTag[List[?]] = ClassTag(classOf[List[?]]) - val VersionTag: ClassTag[ScalaVersion] = ClassTag(classOf[ScalaVersion]) - val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) - val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) - - class SettingsState(initialValues: Seq[Any]): - private val values = ArrayBuffer(initialValues: _*) - private var _wasRead: Boolean = false - - override def toString: String = s"SettingsState(values: ${values.toList})" - - def value(idx: Int): Any = - _wasRead = true - values(idx) - - def update(idx: Int, x: Any): SettingsState = - if (_wasRead) then SettingsState(values.toSeq).update(idx, x) - else - values(idx) = x - this - end SettingsState - - case class ArgsSummary( - sstate: SettingsState, - arguments: List[String], - errors: List[String], - warnings: List[String]) { - - def fail(msg: String): Settings.ArgsSummary = - ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) - - def warn(msg: String): Settings.ArgsSummary = - ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) - } - - case class Setting[T: ClassTag] private[Settings] ( - name: String, - description: String, - default: T, - helpArg: String = "", - choices: Option[Seq[?]] = None, - prefix: String = "", - aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, - propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { - - private var changed: Boolean = false - - def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] - - def updateIn(state: SettingsState, x: Any): SettingsState = x match - case _: T => state.update(idx, x) - case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") - - def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default - - def isMultivalue: Boolean = implicitly[ClassTag[T]] == ListTag - - def legalChoices: String = - choices match { - case Some(xs) if xs.isEmpty => "" - case Some(r: Range) => s"${r.head}..${r.last}" - case Some(xs) => xs.mkString(", ") - case None => "" - } - - def tryToSet(state: ArgsSummary): ArgsSummary = { - val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked - def update(value: Any, args: List[String]): ArgsSummary = - var dangers = warnings - val value1 = - if changed && isMultivalue then - val value0 = value.asInstanceOf[List[String]] - val current = valueIn(sstate).asInstanceOf[List[String]] - value0.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") - current ++ value0 - else - if changed then dangers :+= s"Flag $name set repeatedly" - value - changed = true - ArgsSummary(updateIn(sstate, value1), args, errors, dangers) - end update - - def fail(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors :+ msg, warnings) - - def missingArg = - fail(s"missing argument for option $name", args) - - def setString(argValue: String, args: List[String]) = - choices match - case Some(xs) if !xs.contains(argValue) => - fail(s"$argValue is not a valid choice for $name", args) - case _ => - update(argValue, args) - - def setInt(argValue: String, args: List[String]) = - try - val x = argValue.toInt - choices match - case Some(r: Range) if x < r.head || r.last < x => - fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) - case Some(xs) if !xs.contains(x) => - fail(s"$argValue is not a valid choice for $name", args) - case _ => - update(x, args) - catch case _: NumberFormatException => - fail(s"$argValue is not an integer argument for $name", args) - - def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { - case (BooleanTag, _) => - update(true, args) - case (OptionTag, _) => - update(Some(propertyClass.get.getConstructor().newInstance()), args) - case (ListTag, _) => - if (argRest.isEmpty) missingArg - else - val strings = argRest.split(",").toList - choices match - case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) - case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) - case _ => update(strings, args) - case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => - setString(argRest, args) - case (StringTag, arg2 :: args2) => - if (arg2 startsWith "-") missingArg - else setString(arg2, args2) - case (OutputTag, arg :: args) => - val path = Directory(arg) - val isJar = path.extension == "jar" - if (!isJar && !path.isDirectory) - fail(s"'$arg' does not exist or is not a directory or .jar file", args) - else { - val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) - update(output, args) - } - case (IntTag, args) if argRest.nonEmpty => - setInt(argRest, args) - case (IntTag, arg2 :: args2) => - setInt(arg2, args2) - case (VersionTag, _) => - ScalaVersion.parse(argRest) match { - case Success(v) => update(v, args) - case Failure(ex) => fail(ex.getMessage, args) - } - case (_, Nil) => - missingArg - } - - def matches(argName: String) = (name :: aliases).exists(_ == argName) - - if (prefix != "" && arg.startsWith(prefix)) - doSet(arg drop prefix.length) - else if (prefix == "" && matches(arg.takeWhile(_ != ':'))) - doSet(arg.dropWhile(_ != ':').drop(1)) - else - state - } - } - - object Setting: - extension [T](setting: Setting[T]) - def value(using Context): T = setting.valueIn(ctx.settingsState) - def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) - def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) - - class SettingGroup { - - private val _allSettings = new ArrayBuffer[Setting[?]] - def allSettings: Seq[Setting[?]] = _allSettings.toSeq - - def defaultState: SettingsState = new SettingsState(allSettings map (_.default)) - - def userSetSettings(state: SettingsState): Seq[Setting[?]] = - allSettings filterNot (_.isDefaultIn(state)) - - def toConciseString(state: SettingsState): String = - userSetSettings(state).mkString("(", " ", ")") - - private def checkDependencies(state: ArgsSummary): ArgsSummary = - userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) - - private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = - setting.depends.foldLeft(state) { (s, dep) => - val (depSetting, reqValue) = dep - if (depSetting.valueIn(state.sstate) == reqValue) s - else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") - } - - /** Iterates over the arguments applying them to settings where applicable. - * Then verifies setting dependencies are met. - * - * This takes a boolean indicating whether to keep - * processing if an argument is seen which is not a command line option. - * This is an expedience for the moment so that you can say - * - * scalac -d /tmp foo.scala -optimise - * - * while also allowing - * - * scala Program opt opt - * - * to get their arguments. - */ - @tailrec - final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = - def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) - state.arguments match - case Nil => - checkDependencies(stateWithArgs(skipped)) - case "--" :: args => - checkDependencies(stateWithArgs(skipped ++ args)) - case x :: _ if x startsWith "-" => - @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match - case setting :: settings1 => - val state1 = setting.tryToSet(state) - if state1 ne state then state1 - else loop(settings1) - case Nil => - state.warn(s"bad option '$x' was ignored") - processArguments(loop(allSettings.toList), processAll, skipped) - case arg :: args => - if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) - else state - end processArguments - - def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = - processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) - - def publish[T](settingf: Int => Setting[T]): Setting[T] = { - val setting = settingf(_allSettings.length) - _allSettings += setting - setting - } - - def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(name, descr, initialValue, aliases = aliases)) - - def StringSetting(name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) - - def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) - - def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) - - def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = - publish(Setting(name, descr, default, aliases = aliases)) - - def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = - publish(Setting(name, descr, default, choices = Some(choices))) - - def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, default, helpArg, aliases = aliases)) - - def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(name, descr, default, helpArg)) - - def PathSetting(name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(name, descr, default, aliases = aliases)) - - def PhasesSetting(name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) - - def PrefixSetting(name: String, pre: String, descr: String): Setting[List[String]] = - publish(Setting(name, descr, Nil, prefix = pre)) - - def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] = - publish(Setting(name, descr, default)) - - def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) - } -end Settings diff --git a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala b/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala deleted file mode 100644 index 4b9b1b247856..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/SourceVersion.scala +++ /dev/null @@ -1,32 +0,0 @@ -package dotty.tools -package dotc -package config - -import core.Decorators.* -import util.Property - -enum SourceVersion: - case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. - case `3.2-migration`, `3.2` - case `3.3-migration`, `3.3` - case `future-migration`, `future` - - val isMigrating: Boolean = toString.endsWith("-migration") - - def stable: SourceVersion = - if isMigrating then SourceVersion.values(ordinal + 1) else this - - def isAtLeast(v: SourceVersion) = stable.ordinal >= v.ordinal - -object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.3` - - /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ - val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) - - /** language versions that the compiler recognises. */ - val validSourceVersionNames = values.toList.map(_.toString.toTermName) - - /** All source versions that can be recognised from a language import. e.g. `import language.3.1` */ - val allSourceVersionNames = validSourceVersionNames ::: illegalSourceVersionNames -end SourceVersion diff --git a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala b/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala deleted file mode 100644 index 5b79432a97e7..000000000000 --- a/tests/pos-with-compiler-cc/dotc/config/WrappedProperties.scala +++ /dev/null @@ -1,42 +0,0 @@ -package dotty.tools -package dotc -package config - -import scala.language.unsafeNulls - -/** For placing a wrapper function around property functions. - * Motivated by places like google app engine throwing exceptions - * on property lookups. - */ -trait WrappedProperties extends PropertiesTrait { - def wrap[T](body: => T): Option[T] - - protected def propCategory: String = "wrapped" - protected def pickJarBasedOn: Class[?] = this.getClass - - override def propIsSet(name: String): Boolean = wrap(super.propIsSet(name)) exists (x => x) - override def propOrElse(name: String, alt: String): String = wrap(super.propOrElse(name, alt)) getOrElse alt - override def setProp(name: String, value: String): String = wrap(super.setProp(name, value)).orNull - override def clearProp(name: String): String = wrap(super.clearProp(name)).orNull - override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt - override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten - - def systemProperties: Iterator[(String, String)] = { - import scala.jdk.CollectionConverters._ - wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty - } -} - -object WrappedProperties { - object AccessControl extends WrappedProperties { - def wrap[T](body: => T): Option[T] = - try Some(body) - catch { - // the actual exception we are concerned with is AccessControlException, - // but that's deprecated on JDK 17, so catching its superclass is a convenient - // way to avoid a deprecation warning - case _: SecurityException => - None - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala b/tests/pos-with-compiler-cc/dotc/core/Annotations.scala deleted file mode 100644 index 2061bddb9e8a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Annotations.scala +++ /dev/null @@ -1,274 +0,0 @@ -package dotty.tools -package dotc -package core - -import Symbols._, Types._, Contexts._, Constants._ -import dotty.tools.dotc.ast.tpd, tpd.* -import util.Spans.Span -import printing.{Showable, Printer} -import printing.Texts.Text -import annotation.internal.sharable -import language.experimental.pureFunctions -import annotation.retains - -object Annotations { - - def annotClass(tree: Tree)(using Context) = - if (tree.symbol.isConstructor) tree.symbol.owner - else tree.tpe.typeSymbol - - abstract class Annotation extends Showable, Pure { - - def tree(using Context): Tree - - def symbol(using Context): Symbol = annotClass(tree) - - def hasSymbol(sym: Symbol)(using Context) = symbol == sym - - def matches(cls: Symbol)(using Context): Boolean = symbol.derivesFrom(cls) - - def appliesToModule: Boolean = true // for now; see remark in SymDenotations - - def derivedAnnotation(tree: Tree)(using Context): Annotation = - if (tree eq this.tree) this else Annotation(tree) - - /** All arguments to this annotation in a single flat list */ - def arguments(using Context): List[Tree] = tpd.allArguments(tree) - - def argument(i: Int)(using Context): Option[Tree] = { - val args = arguments - if (i < args.length) Some(args(i)) else None - } - def argumentConstant(i: Int)(using Context): Option[Constant] = - for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c - - def argumentConstantString(i: Int)(using Context): Option[String] = - for (case Constant(s: String) <- argumentConstant(i)) yield s - - /** The tree evaluaton is in progress. */ - def isEvaluating: Boolean = false - - /** The tree evaluation has finished. */ - def isEvaluated: Boolean = true - - /** Normally, applies a type map to all tree nodes of this annotation, but can - * be overridden. Returns EmptyAnnotation if type type map produces a range - * type, since ranges cannot be types of trees. - */ - def mapWith(tm: TypeMap @retains(caps.cap))(using Context) = - val args = arguments - if args.isEmpty then this - else - val findDiff = new TreeAccumulator[Type]: - def apply(x: Type, tree: Tree)(using Context): Type = - if tm.isRange(x) then x - else - val tp1 = tm(tree.tpe) - foldOver(if tp1 frozen_=:= tree.tpe then x else tp1, tree) - val diff = findDiff(NoType, args) - if tm.isRange(diff) then EmptyAnnotation - else if diff.exists then derivedAnnotation(tm.mapOver(tree)) - else this - - /** Does this annotation refer to a parameter of `tl`? */ - def refersToParamOf(tl: TermLambda)(using Context): Boolean = - val args = arguments - if args.isEmpty then false - else tree.existsSubTree { - case id: Ident => id.tpe.stripped match - case TermParamRef(tl1, _) => tl eq tl1 - case _ => false - case _ => false - } - - /** A string representation of the annotation. Overridden in BodyAnnotation. - */ - def toText(printer: Printer): Text = printer.annotText(this) - - def ensureCompleted(using Context): Unit = tree - - def sameAnnotation(that: Annotation)(using Context): Boolean = - symbol == that.symbol && tree.sameTree(that.tree) - - /** Operations for hash-consing, can be overridden */ - def hash: Int = System.identityHashCode(this) - def eql(that: Annotation) = this eq that - } - - case class ConcreteAnnotation(t: Tree) extends Annotation: - def tree(using Context): Tree = t - - abstract class LazyAnnotation extends Annotation { - protected var mySym: Symbol | (Context ?-> Symbol) | Null - override def symbol(using parentCtx: Context): Symbol = - assert(mySym != null) - mySym match { - case symFn: (Context ?-> Symbol) @unchecked => - mySym = null - mySym = atPhaseBeforeTransforms(symFn) - // We should always produce the same annotation tree, no matter when the - // annotation is evaluated. Setting the phase to a pre-transformation phase - // seems to be enough to ensure this (note that after erasure, `ctx.typer` - // will be the Erasure typer, but that doesn't seem to affect the annotation - // trees we create, so we leave it as is) - case sym: Symbol if sym.defRunId != parentCtx.runId => - mySym = sym.denot.current.symbol - case _ => - } - mySym.asInstanceOf[Symbol] - - protected var myTree: Tree | (Context ?-> Tree) | Null - def tree(using Context): Tree = - assert(myTree != null) - myTree match { - case treeFn: (Context ?-> Tree) @unchecked => - myTree = null - myTree = atPhaseBeforeTransforms(treeFn) - case _ => - } - myTree.asInstanceOf[Tree] - - override def isEvaluating: Boolean = myTree == null - override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } - - class DeferredSymAndTree(symFn: Context ?-> Symbol, treeFn: Context ?-> Tree) - extends LazyAnnotation: - protected var mySym: Symbol | (Context ?-> Symbol) | Null = ctx ?=> symFn(using ctx) - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) - - /** An annotation indicating the body of a right-hand side, - * typically of an inline method. Treated specially in - * pickling/unpickling and TypeTreeMaps - */ - abstract class BodyAnnotation extends Annotation { - override def symbol(using Context): ClassSymbol = defn.BodyAnnot - override def derivedAnnotation(tree: Tree)(using Context): Annotation = - if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) - override def arguments(using Context): List[Tree] = Nil - override def ensureCompleted(using Context): Unit = () - override def toText(printer: Printer): Text = "@Body" - } - - class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { - def tree(using Context): Tree = body - } - - abstract class LazyBodyAnnotation extends BodyAnnotation { - // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait - protected var myTree: Tree | (Context ?-> Tree) | Null - def tree(using Context): Tree = - assert(myTree != null) - myTree match { - case treeFn: (Context ?-> Tree) @unchecked => - myTree = null - myTree = atPhaseBeforeTransforms(treeFn) - case _ => - } - myTree.asInstanceOf[Tree] - - override def isEvaluating: Boolean = myTree == null - override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } - - object LazyBodyAnnotation { - def apply(bodyFn: Context ?-> Tree): LazyBodyAnnotation = - new LazyBodyAnnotation: - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> bodyFn(using ctx) - } - - object Annotation { - - def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) - - def apply(cls: ClassSymbol)(using Context): Annotation = - apply(cls, Nil) - - def apply(cls: ClassSymbol, arg: Tree)(using Context): Annotation = - apply(cls, arg :: Nil) - - def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(cls, arg1 :: arg2 :: Nil) - - def apply(cls: ClassSymbol, args: List[Tree])(using Context): Annotation = - apply(cls.typeRef, args) - - def apply(atp: Type, arg: Tree)(using Context): Annotation = - apply(atp, arg :: Nil) - - def apply(atp: Type, arg1: Tree, arg2: Tree)(using Context): Annotation = - apply(atp, arg1 :: arg2 :: Nil) - - def apply(atp: Type, args: List[Tree])(using Context): Annotation = - apply(New(atp, args)) - - /** Create an annotation where the tree is computed lazily. */ - def deferred(sym: Symbol)(treeFn: Context ?-> Tree): Annotation = - new LazyAnnotation { - protected var myTree: Tree | (Context ?-> Tree) | Null = ctx ?=> treeFn(using ctx) - protected var mySym: Symbol | (Context ?-> Symbol) | Null = sym - } - - /** Create an annotation where the symbol and the tree are computed lazily. */ - def deferredSymAndTree(symFn: Context ?-> Symbol)(treeFn: Context ?-> Tree): Annotation = - DeferredSymAndTree(symFn, treeFn) - - /** Extractor for child annotations */ - object Child { - - /** A deferred annotation to the result of a given child computation */ - def later(delayedSym: Context ?-> Symbol, span: Span)(using Context): Annotation = { - def makeChildLater(using Context) = { - val sym = delayedSym - New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) - .withSpan(span) - } - deferred(defn.ChildAnnot)(makeChildLater) - } - - /** A regular, non-deferred Child annotation */ - def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) - - def unapply(ann: Annotation)(using Context): Option[Symbol] = - if (ann.symbol == defn.ChildAnnot) { - val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked - Some(arg.symbol) - } - else None - } - - def makeSourceFile(path: String)(using Context): Annotation = - apply(defn.SourceFileAnnot, Literal(Constant(path))) - } - - @sharable val EmptyAnnotation = Annotation(EmptyTree) - - def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { - val tref = cls.typeRef - Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) - } - - /** Extracts the type of the thrown exception from an annotation. - * - * Supports both "old-style" `@throws(classOf[Exception])` - * as well as "new-style" `@throws[Exception]("cause")` annotations. - */ - object ThrownException { - def unapply(a: Annotation)(using Context): Option[Type] = - if (a.symbol ne defn.ThrowsAnnot) - None - else a.argumentConstant(0) match { - // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) - case Some(Constant(tpe: Type)) => - Some(tpe) - // new-style: @throws[Exception], @throws[Exception]("cause") - case _ => - stripApply(a.tree) match { - case TypeApply(_, List(tpt)) => - Some(tpt.tpe) - case _ => - None - } - } - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala b/tests/pos-with-compiler-cc/dotc/core/Atoms.scala deleted file mode 100644 index bcaaf6794107..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Atoms.scala +++ /dev/null @@ -1,36 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._ - -/** Indicates the singleton types that a type must or may consist of. - * @param lo The lower bound: singleton types in this set are guaranteed - * to be in the carrier type. - * @param hi The upper bound: all singleton types in the carrier type are - * guaranteed to be in this set - * If the underlying type of a singleton type is another singleton type, - * only the latter type ends up in the sets. - */ -enum Atoms: - case Range(lo: Set[Type], hi: Set[Type]) - case Unknown - - def & (that: Atoms): Atoms = this match - case Range(lo1, hi1) => - that match - case Range(lo2, hi2) => Range(lo1 & lo2, hi1 & hi2) - case Unknown => Range(Set.empty, hi1) - case Unknown => - that match - case Range(lo2, hi2) => Range(Set.empty, hi2) - case Unknown => Unknown - - def | (that: Atoms): Atoms = this match - case Range(lo1, hi1) => - that match - case Range(lo2, hi2) => Range(lo1 | lo2, hi1 | hi2) - case Unknown => Unknown - case Unknown => Unknown - -end Atoms diff --git a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala b/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala deleted file mode 100644 index d166cec11573..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/CheckRealizable.scala +++ /dev/null @@ -1,216 +0,0 @@ -package dotty.tools -package dotc -package core - -import Contexts._, Types._, Symbols._, Names._, Flags._ -import Denotations.SingleDenotation -import Decorators._ -import collection.mutable -import config.SourceVersion.future -import config.Feature.sourceVersion -import annotation.constructorOnly - -/** Realizability status */ -object CheckRealizable { - - sealed abstract class Realizability(val msg: String) extends Pure { - def andAlso(other: => Realizability): Realizability = - if (this == Realizable) other else this - def mapError(f: Realizability -> Context ?-> Realizability)(using Context): Realizability = - if (this == Realizable) this else f(this) - } - - object Realizable extends Realizability("") - - object NotConcrete extends Realizability(" is not a concrete type") - - class NotFinal(sym: Symbol)(using @constructorOnly ctx: Context) - extends Realizability(i" refers to nonfinal $sym") - - class HasProblemBounds(name: Name, info: Type)(using @constructorOnly ctx: Context) - extends Realizability(i" has a member $name with possibly conflicting bounds ${info.bounds.lo} <: ... <: ${info.bounds.hi}") - - class HasProblemBaseArg(typ: Type, argBounds: TypeBounds)(using @constructorOnly ctx: Context) - extends Realizability(i" has a base type $typ with possibly conflicting parameter bounds ${argBounds.lo} <: ... <: ${argBounds.hi}") - - class HasProblemBase(base1: Type, base2: Type)(using @constructorOnly ctx: Context) - extends Realizability(i" has conflicting base types $base1 and $base2") - - class HasProblemField(fld: SingleDenotation, problem: Realizability)(using @constructorOnly ctx: Context) - extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") - - class ProblemInUnderlying(tp: Type, problem: Realizability)(using @constructorOnly ctx: Context) - extends Realizability(i"s underlying type ${tp}${problem.msg}") { - assert(problem != Realizable) - } - - def realizability(tp: Type)(using Context): Realizability = - new CheckRealizable().realizability(tp) - - def boundsRealizability(tp: Type)(using Context): Realizability = - new CheckRealizable().boundsRealizability(tp) - - private val LateInitializedFlags = Lazy | Erased -} - -/** Compute realizability status. - * - * A type T is realizable iff it is inhabited by non-null values. This ensures that its type members have good bounds - * (in the sense from DOT papers). A type projection T#L is legal if T is realizable, and can be understood as - * Scala 2's `v.L forSome { val v: T }`. - * - * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of - * Type.isStable). - */ -class CheckRealizable(using Context) { - import CheckRealizable._ - - /** A set of all fields that have already been checked. Used - * to avoid infinite recursions when analyzing recursive types. - */ - private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]() - - /** Is symbol's definitition a lazy or erased val? - * (note we exclude modules here, because their realizability is ensured separately) - */ - private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) - - /** The realizability status of given type `tp`*/ - def realizability(tp: Type): Realizability = tp.dealias match { - /* - * A `TermRef` for a path `p` is realizable if - * - `p`'s type is stable and realizable, or - * - its underlying path is idempotent (that is, *stable*), total, and not null. - * We don't check yet the "not null" clause: that will require null-safety checking. - * - * We assume that stability of tp.prefix is checked elsewhere, since that's necessary for the path to be legal in - * the first place. - */ - case tp: TermRef => - val sym = tp.symbol - lazy val tpInfoRealizable = realizability(tp.info) - if (sym.is(StableRealizable)) realizability(tp.prefix) - else { - val r = - if (sym.isStableMember && !isLateInitialized(sym)) - // it's realizable because we know that a value of type `tp` has been created at run-time - Realizable - else if (!sym.isEffectivelyFinal) - // it's potentially not realizable since it might be overridden with a member of nonrealizable type - new NotFinal(sym) - else - // otherwise we need to look at the info to determine realizability - // roughly: it's realizable if the info does not have bad bounds - tpInfoRealizable.mapError(r => new ProblemInUnderlying(tp, r)) - r andAlso { - if (sym.isStableMember) sym.setFlag(StableRealizable) // it's known to be stable and realizable - realizability(tp.prefix) - } mapError { r => - // A mutable path is in fact stable and realizable if it has a realizable singleton type. - if (tp.info.isStable && tpInfoRealizable == Realizable) { - sym.setFlag(StableRealizable) - Realizable - } - else r - } - } - case _: SingletonType | NoPrefix => - Realizable - case tp => - def isConcrete(tp: Type): Boolean = tp.dealias match { - case tp: TypeRef => tp.symbol.isClass - case tp: TypeParamRef => false - case tp: TypeProxy => isConcrete(tp.underlying) - case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) - case _ => false - } - if (!isConcrete(tp)) NotConcrete - else boundsRealizability(tp).andAlso(memberRealizability(tp)) - } - - private def refinedNames(tp: Type): Set[Name] = tp.dealias match { - case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName - case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) - case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) - case tp: TypeProxy => refinedNames(tp.superType) - case _ => Set.empty - } - - /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance - * pointing to a bad bounds member otherwise. "Has good bounds" means: - * - * - all type members have good bounds (except for opaque helpers) - * - all refinements of the underlying type have good bounds (except for opaque companions) - * - all base types are class types, and if their arguments are wildcards - * they have good bounds. - * - base types do not appear in multiple instances with different arguments. - * (depending on the simplification scheme for AndTypes employed, this could - * also lead to base types with bad bounds). - */ - private def boundsRealizability(tp: Type) = { - - val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { - for { - mbr <- tp.nonClassTypeMembers - if !(mbr.info.loBound <:< mbr.info.hiBound) - } - yield new HasProblemBounds(mbr.name, mbr.info) - } - - val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { - for { - name <- refinedNames(tp) - if (name.isTypeName) - mbr <- tp.member(name).alternatives - if !(mbr.info.loBound <:< mbr.info.hiBound) - } - yield - new HasProblemBounds(name, mbr.info) - } - - def baseTypeProblems(base: Type) = base match { - case AndType(base1, base2) => - new HasProblemBase(base1, base2) :: Nil - case base => - base.argInfos.collect { - case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => - new HasProblemBaseArg(base, bounds) - } - } - val baseProblems = - tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) - - baseProblems.foldLeft( - refinementProblems.foldLeft( - memberProblems.foldLeft( - Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) - } - - /** `Realizable` if all of `tp`'s non-strict fields have realizable types, - * a `HasProblemField` instance pointing to a bad field otherwise. - */ - private def memberRealizability(tp: Type) = { - def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = - sofar andAlso { - if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) - // if field is private it cannot be part of a visible path - // if field is mutable it cannot be part of a path - // if field is lazy or erased it does not need to be initialized when the owning object is - // so in all cases the field does not influence realizability of the enclosing object. - Realizable - else { - checkedFields += fld.symbol - realizability(fld.info).mapError(r => new HasProblemField(fld, r)) - } - } - if sourceVersion.isAtLeast(future) then - // check fields only from version 3.x. - // Reason: An embedded field could well be nullable, which means it - // should not be part of a path and need not be checked; but we cannot recognize - // this situation until we have a typesystem that tracks nullability. - tp.fields.foldLeft(Realizable: Realizability)(checkField) - else - Realizable - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Comments.scala b/tests/pos-with-compiler-cc/dotc/core/Comments.scala deleted file mode 100644 index 1b20b75ad8ac..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Comments.scala +++ /dev/null @@ -1,462 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.language.unsafeNulls - -import ast.{ untpd, tpd } -import Symbols._, Contexts._ -import util.{SourceFile, ReadOnlyMap} -import util.Spans._ -import util.CommentParsing._ -import util.Property.Key -import parsing.Parsers.Parser -import reporting.ProperDefinitionNotFound - -object Comments { - val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] - - /** Decorator for getting docbase out of context */ - given CommentsContext: AnyRef with - extension (c: Context) def docCtx: Option[ContextDocstrings] = c.property(ContextDoc) - - /** Context for Docstrings, contains basic functionality for getting - * docstrings via `Symbol` and expanding templates - */ - class ContextDocstrings { - - private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" - - val templateExpander: CommentExpander = new CommentExpander - - def docstrings: ReadOnlyMap[Symbol, Comment] = _docstrings - - def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym) - - def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = - doc.foreach(d => _docstrings.update(sym, d)) - } - - /** - * A `Comment` contains the unformatted docstring, it's position and potentially more - * information that is populated when the comment is "cooked". - * - * @param span The position span of this `Comment`. - * @param raw The raw comment, as seen in the source code, without any expansion. - * @param expanded If this comment has been expanded, it's expansion, otherwise `None`. - * @param usecases The usecases for this comment. - */ - final case class Comment( - span: Span, - raw: String, - expanded: Option[String], - usecases: List[UseCase], - variables: Map[String, String], - ) { - - /** Has this comment been cooked or expanded? */ - def isExpanded: Boolean = expanded.isDefined - - /** The body of this comment, without the `@usecase` and `@define` sections, after expansion. */ - lazy val expandedBody: Option[String] = - expanded.map(removeSections(_, "@usecase", "@define")) - - val isDocComment: Boolean = Comment.isDocComment(raw) - - /** - * Expands this comment by giving its content to `f`, and then parsing the `@usecase` sections. - * Typically, `f` will take care of expanding the variables. - * - * @param f The expansion function. - * @return The expanded comment, with the `usecases` populated. - */ - def expand(f: String => String)(using Context): Comment = { - val expandedComment = f(raw) - val useCases = Comment.parseUsecases(expandedComment, span) - Comment(span, raw, Some(expandedComment), useCases, Map.empty) - } - } - - object Comment { - - def isDocComment(comment: String): Boolean = comment.startsWith("/**") - - def apply(span: Span, raw: String): Comment = - Comment(span, raw, None, Nil, Map.empty) - - private def parseUsecases(expandedComment: String, span: Span)(using Context): List[UseCase] = - if (!isDocComment(expandedComment)) - Nil - else - tagIndex(expandedComment) - .filter { startsWithTag(expandedComment, _, "@usecase") } - .map { case (start, end) => decomposeUseCase(expandedComment, span, start, end) } - - /** Turns a usecase section into a UseCase, with code changed to: - * {{{ - * // From: - * def foo: A - * // To: - * def foo: A = ??? - * }}} - */ - private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { - def subPos(start: Int, end: Int) = - if (span == NoSpan) NoSpan - else { - val start1 = span.start + start - val end1 = span.end + end - span withStart start1 withPoint start1 withEnd end1 - } - - val codeStart = skipWhitespace(body, start + "@usecase".length) - val codeEnd = skipToEol(body, codeStart) - val code = body.substring(codeStart, codeEnd) + " = ???" - val codePos = subPos(codeStart, codeEnd) - - UseCase(code, codePos) - } - } - - final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { - def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) - } - - object UseCase { - def apply(code: String, codePos: Span)(using Context): UseCase = { - val tree = { - val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) - tree match { - case tree: untpd.DefDef => - val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) - untpd.cpy.DefDef(tree)(name = newName) - case _ => - report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) - tree - } - } - UseCase(code, codePos, tree, None) - } - } - - /** - * Port of DocComment.scala from nsc - * @author Martin Odersky - * @author Felix Mulder - */ - class CommentExpander { - import dotc.config.Printers.scaladoc - import scala.collection.mutable - - def expand(sym: Symbol, site: Symbol)(using Context): String = { - val parent = if (site != NoSymbol) site else sym - defineVariables(parent) - expandedDocComment(sym, parent) - } - - /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. - * - * @param sym The symbol for which doc comment is returned - * @param site The class for which doc comments are generated - * @throws ExpansionLimitExceeded when more than 10 successive expansions - * of the same string are done, which is - * interpreted as a recursive variable definition. - */ - def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { - // when parsing a top level class or module, use the (module-)class itself to look up variable definitions - val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym - else site - expandVariables(cookedDocComment(sym, docStr), sym, parent) - } - - private def template(raw: String): String = - removeSections(raw, "@define") - - private def defines(raw: String): List[String] = { - val sections = tagIndex(raw) - val defines = sections filter { startsWithTag(raw, _, "@define") } - val usecases = sections filter { startsWithTag(raw, _, "@usecase") } - val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) - - defines map { case (start, end) => raw.substring(start, end) } - } - - private def replaceInheritDocToInheritdoc(docStr: String): String = - docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") - - /** The cooked doc comment of an overridden symbol */ - protected def superComment(sym: Symbol)(using Context): Option[String] = - allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "") - - private val cookedDocComments = MutableSymbolMap[String]() - - /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by - * missing sections of an inherited doc comment. - * If a symbol does not have a doc comment but some overridden version of it does, - * the doc comment of the overridden version is copied instead. - */ - def cookedDocComment(sym: Symbol, docStr: String = "")(using Context): String = cookedDocComments.getOrElseUpdate(sym, { - var ownComment = - if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("") - else template(docStr) - ownComment = replaceInheritDocToInheritdoc(ownComment) - - superComment(sym) match { - case None => - // SI-8210 - The warning would be false negative when this symbol is a setter - if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) - scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") - ownComment.replace("@inheritdoc", "") - case Some(sc) => - if (ownComment == "") sc - else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) - } - }) - - private def isMovable(str: String, sec: (Int, Int)): Boolean = - startsWithTag(str, sec, "@param") || - startsWithTag(str, sec, "@tparam") || - startsWithTag(str, sec, "@return") - - def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { - val srcSections = tagIndex(src) - val dstSections = tagIndex(dst) - val srcParams = paramDocs(src, "@param", srcSections) - val dstParams = paramDocs(dst, "@param", dstSections) - val srcTParams = paramDocs(src, "@tparam", srcSections) - val dstTParams = paramDocs(dst, "@tparam", dstSections) - val out = new StringBuilder - var copied = 0 - var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) - - if (copyFirstPara) { - val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment - (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) - out append src.substring(0, eop).trim - copied = 3 - tocopy = 3 - } - - def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { - case Some((start, end)) => - if (end > tocopy) tocopy = end - case None => - srcSec match { - case Some((start1, end1)) => - out append dst.substring(copied, tocopy).trim - out append "\n" - copied = tocopy - out append src.substring(start1, end1).trim - case None => - } - } - - //TODO: enable this once you know how to get `sym.paramss` - /* - for (params <- sym.paramss; param <- params) - mergeSection(srcParams get param.name.toString, dstParams get param.name.toString) - for (tparam <- sym.typeParams) - mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString) - - mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections)) - mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections)) - */ - - if (out.length == 0) dst - else { - out append dst.substring(copied) - out.toString - } - } - - /** - * Expand inheritdoc tags - * - for the main comment we transform the inheritdoc into the super variable, - * and the variable expansion can expand it further - * - for the param, tparam and throws sections we must replace comments on the spot - * - * This is done separately, for two reasons: - * 1. It takes longer to run compared to merge - * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely - * impacts performance - * - * @param parent The source (or parent) comment - * @param child The child (overriding member or usecase) comment - * @param sym The child symbol - * @return The child comment with the inheritdoc sections expanded - */ - def expandInheritdoc(parent: String, child: String, sym: Symbol): String = - if (child.indexOf("@inheritdoc") == -1) - child - else { - val parentSections = tagIndex(parent) - val childSections = tagIndex(child) - val parentTagMap = sectionTagMap(parent, parentSections) - val parentNamedParams = Map() + - ("@param" -> paramDocs(parent, "@param", parentSections)) + - ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) + - ("@throws" -> paramDocs(parent, "@throws", parentSections)) - - val out = new StringBuilder - - def replaceInheritdoc(childSection: String, parentSection: => String) = - if (childSection.indexOf("@inheritdoc") == -1) - childSection - else - childSection.replace("@inheritdoc", parentSection) - - def getParentSection(section: (Int, Int)): String = { - - def getSectionHeader = extractSectionTag(child, section) match { - case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) - case other => other - } - - def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = - paramMap.get(param) match { - case Some(section) => - // Cleanup the section tag and parameter - val sectionTextBounds = extractSectionText(parent, section) - cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) - case None => - scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + - " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") - "" - } - - child.substring(section._1, section._1 + 7) match { - case param@("@param "|"@tparam"|"@throws") => - sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) - case _ => - sectionString(extractSectionTag(child, section), parentTagMap) - } - } - - def mainComment(str: String, sections: List[(Int, Int)]): String = - if (str.trim.length > 3) - str.trim.substring(3, startTag(str, sections)) - else - "" - - // Append main comment - out.append("/**") - out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections))) - - // Append sections - for (section <- childSections) - out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section))) - - out.append("*/") - out.toString - } - - protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { - val expandLimit = 10 - - def expandInternal(str: String, depth: Int): String = { - if (depth >= expandLimit) - throw new ExpansionLimitExceeded(str) - - val out = new StringBuilder - var copied, idx = 0 - // excluding variables written as \$foo so we can use them when - // necessary to document things like Symbol#decode - def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\' - while (idx < str.length) - if ((str charAt idx) != '$' || isEscaped) - idx += 1 - else { - val vstart = idx - idx = skipVariable(str, idx + 1) - def replaceWith(repl: String) = { - out append str.substring(copied, vstart) - out append repl - copied = idx - } - variableName(str.substring(vstart + 1, idx)) match { - case "super" => - superComment(sym) foreach { sc => - val superSections = tagIndex(sc) - replaceWith(sc.substring(3, startTag(sc, superSections))) - for (sec @ (start, end) <- superSections) - if (!isMovable(sc, sec)) out append sc.substring(start, end) - } - case "" => idx += 1 - case vname => - lookupVariable(vname, site) match { - case Some(replacement) => replaceWith(replacement) - case None => - scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") - } - } - } - if (out.length == 0) str - else { - out append str.substring(copied) - expandInternal(out.toString, depth + 1) - } - } - - // We suppressed expanding \$ throughout the recursion, and now we - // need to replace \$ with $ so it looks as intended. - expandInternal(initialStr, 0).replace("""\$""", "$") - } - - def defineVariables(sym: Symbol)(using Context): Unit = { - val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r - - val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") - defs(sym) ++= defines(raw).map { - str => { - val start = skipWhitespace(str, "@define".length) - val (key, value) = str.splitAt(skipVariable(str, start)) - key.drop(start) -> value - } - } map { - case (key, Trim(value)) => - variableName(key) -> value.replaceAll("\\s+\\*+$", "") - } - } - - /** Maps symbols to the variable -> replacement maps that are defined - * in their doc comments - */ - private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map() - - /** Lookup definition of variable. - * - * @param vble The variable for which a definition is searched - * @param site The class for which doc comments are generated - */ - def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { - case NoSymbol => None - case _ => - val searchList = - if (site.flags.is(Flags.Module)) site :: site.info.baseClasses - else site.info.baseClasses - - searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { - case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) - case res => res orElse lookupVariable(vble, site.owner) - } - } - - /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing - * If a symbol does not have a doc comment but some overridden version of it does, - * the position of the doc comment of the overridden version is returned instead. - */ - def docCommentPos(sym: Symbol)(using Context): Span = - ctx.docCtx.flatMap(_.docstring(sym).map(_.span)).getOrElse(NoSpan) - - /** A version which doesn't consider self types, as a temporary measure: - * an infinite loop has broken out between superComment and cookedDocComment - * since r23926. - */ - private def allInheritedOverriddenSymbols(sym: Symbol)(using Context): List[Symbol] = - if (!sym.owner.isClass) Nil - else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..` - //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) - - class ExpansionLimitExceeded(str: String) extends Exception - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constants.scala b/tests/pos-with-compiler-cc/dotc/core/Constants.scala deleted file mode 100644 index f45e9e5217de..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Constants.scala +++ /dev/null @@ -1,261 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._, Symbols._, Contexts._ -import printing.Printer -import printing.Texts.Text - -object Constants { - - inline val NoTag = 0 - inline val UnitTag = 1 - inline val BooleanTag = 2 - inline val ByteTag = 3 - inline val ShortTag = 4 - inline val CharTag = 5 - inline val IntTag = 6 - inline val LongTag = 7 - inline val FloatTag = 8 - inline val DoubleTag = 9 - inline val StringTag = 10 - inline val NullTag = 11 - inline val ClazzTag = 12 - - class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { - import java.lang.Double.doubleToRawLongBits - import java.lang.Float.floatToRawIntBits - - def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue - def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue - def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue - def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag - def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag - def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag - def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag - def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag - def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag - - def tpe(using Context): Type = tag match { - case UnitTag => defn.UnitType - case BooleanTag => defn.BooleanType - case ByteTag => defn.ByteType - case ShortTag => defn.ShortType - case CharTag => defn.CharType - case IntTag => defn.IntType - case LongTag => defn.LongType - case FloatTag => defn.FloatType - case DoubleTag => defn.DoubleType - case StringTag => defn.StringType - case NullTag => defn.NullType - case ClazzTag => defn.ClassType(typeValue) - } - - /** We need the equals method to take account of tags as well as values. - */ - override def equals(other: Any): Boolean = other match { - case that: Constant => - this.tag == that.tag && equalHashValue == that.equalHashValue - case _ => false - } - - def isNaN: Boolean = value match { - case f: Float => f.isNaN - case d: Double => d.isNaN - case _ => false - } - - def booleanValue: Boolean = - if (tag == BooleanTag) value.asInstanceOf[Boolean] - else throw new Error("value " + value + " is not a boolean") - - def byteValue: Byte = tag match { - case ByteTag => value.asInstanceOf[Byte] - case ShortTag => value.asInstanceOf[Short].toByte - case CharTag => value.asInstanceOf[Char].toByte - case IntTag => value.asInstanceOf[Int].toByte - case LongTag => value.asInstanceOf[Long].toByte - case FloatTag => value.asInstanceOf[Float].toByte - case DoubleTag => value.asInstanceOf[Double].toByte - case _ => throw new Error("value " + value + " is not a Byte") - } - - def shortValue: Short = tag match { - case ByteTag => value.asInstanceOf[Byte].toShort - case ShortTag => value.asInstanceOf[Short] - case CharTag => value.asInstanceOf[Char].toShort - case IntTag => value.asInstanceOf[Int].toShort - case LongTag => value.asInstanceOf[Long].toShort - case FloatTag => value.asInstanceOf[Float].toShort - case DoubleTag => value.asInstanceOf[Double].toShort - case _ => throw new Error("value " + value + " is not a Short") - } - - def charValue: Char = tag match { - case ByteTag => value.asInstanceOf[Byte].toChar - case ShortTag => value.asInstanceOf[Short].toChar - case CharTag => value.asInstanceOf[Char] - case IntTag => value.asInstanceOf[Int].toChar - case LongTag => value.asInstanceOf[Long].toChar - case FloatTag => value.asInstanceOf[Float].toChar - case DoubleTag => value.asInstanceOf[Double].toChar - case _ => throw new Error("value " + value + " is not a Char") - } - - def intValue: Int = tag match { - case ByteTag => value.asInstanceOf[Byte].toInt - case ShortTag => value.asInstanceOf[Short].toInt - case CharTag => value.asInstanceOf[Char].toInt - case IntTag => value.asInstanceOf[Int] - case LongTag => value.asInstanceOf[Long].toInt - case FloatTag => value.asInstanceOf[Float].toInt - case DoubleTag => value.asInstanceOf[Double].toInt - case _ => throw new Error("value " + value + " is not an Int") - } - - def longValue: Long = tag match { - case ByteTag => value.asInstanceOf[Byte].toLong - case ShortTag => value.asInstanceOf[Short].toLong - case CharTag => value.asInstanceOf[Char].toLong - case IntTag => value.asInstanceOf[Int].toLong - case LongTag => value.asInstanceOf[Long] - case FloatTag => value.asInstanceOf[Float].toLong - case DoubleTag => value.asInstanceOf[Double].toLong - case _ => throw new Error("value " + value + " is not a Long") - } - - def floatValue: Float = tag match { - case ByteTag => value.asInstanceOf[Byte].toFloat - case ShortTag => value.asInstanceOf[Short].toFloat - case CharTag => value.asInstanceOf[Char].toFloat - case IntTag => value.asInstanceOf[Int].toFloat - case LongTag => value.asInstanceOf[Long].toFloat - case FloatTag => value.asInstanceOf[Float] - case DoubleTag => value.asInstanceOf[Double].toFloat - case _ => throw new Error("value " + value + " is not a Float") - } - - def doubleValue: Double = tag match { - case ByteTag => value.asInstanceOf[Byte].toDouble - case ShortTag => value.asInstanceOf[Short].toDouble - case CharTag => value.asInstanceOf[Char].toDouble - case IntTag => value.asInstanceOf[Int].toDouble - case LongTag => value.asInstanceOf[Long].toDouble - case FloatTag => value.asInstanceOf[Float].toDouble - case DoubleTag => value.asInstanceOf[Double] - case _ => throw new Error("value " + value + " is not a Double") - } - - /** Convert constant value to conform to given type. - */ - def convertTo(pt: Type)(using Context): Constant | Null = { - def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { - case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => - classBound(tref.info.bounds.lo) - case param: TypeParamRef => - ctx.typerState.constraint.entry(param) match { - case TypeBounds(lo, hi) => - if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound - else classBound(lo) - case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) - case inst => classBound(inst) - } - case pt => pt - } - pt match - case ConstantType(value) if value == this => this - case _: SingletonType => null - case _ => - val target = classBound(pt).typeSymbol - if (target == tpe.typeSymbol) - this - else if ((target == defn.ByteClass) && isByteRange) - Constant(byteValue) - else if (target == defn.ShortClass && isShortRange) - Constant(shortValue) - else if (target == defn.CharClass && isCharRange) - Constant(charValue) - else if (target == defn.IntClass && isIntRange) - Constant(intValue) - else if (target == defn.LongClass && isLongRange) - Constant(longValue) - else if (target == defn.FloatClass && isFloatRange) - Constant(floatValue) - else if (target == defn.DoubleClass && isNumeric) - Constant(doubleValue) - else - null - } - - def stringValue: String = value.toString - - def toText(printer: Printer): Text = printer.toText(this) - - def typeValue: Type = value.asInstanceOf[Type] - - /** - * Consider two `NaN`s to be identical, despite non-equality - * Consider -0d to be distinct from 0d, despite equality - * - * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) - * to avoid treating different encodings of `NaN` as the same constant. - * You probably can't express different `NaN` varieties as compile time - * constants in regular Scala code, but it is conceivable that you could - * conjure them with a macro. - */ - private def equalHashValue: Any = value match { - case f: Float => floatToRawIntBits(f) - case d: Double => doubleToRawLongBits(d) - case v => v - } - - override def hashCode: Int = { - import scala.util.hashing.MurmurHash3._ - val seed = 17 - var h = seed - h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. - h = mix(h, equalHashValue.##) - finalizeHash(h, length = 2) - } - - override def toString: String = s"Constant($value)" - def canEqual(x: Any): Boolean = true - def get: Any = value - def isEmpty: Boolean = false - def _1: Any = value - } - - object Constant { - def apply(x: Null): Constant = new Constant(x, NullTag) - def apply(x: Unit): Constant = new Constant(x, UnitTag) - def apply(x: Boolean): Constant = new Constant(x, BooleanTag) - def apply(x: Byte): Constant = new Constant(x, ByteTag) - def apply(x: Short): Constant = new Constant(x, ShortTag) - def apply(x: Int): Constant = new Constant(x, IntTag) - def apply(x: Long): Constant = new Constant(x, LongTag) - def apply(x: Float): Constant = new Constant(x, FloatTag) - def apply(x: Double): Constant = new Constant(x, DoubleTag) - def apply(x: String): Constant = new Constant(x, StringTag) - def apply(x: Char): Constant = new Constant(x, CharTag) - def apply(x: Type): Constant = new Constant(x, ClazzTag) - def apply(value: Any): Constant = - new Constant(value, - value match { - case null => NullTag - case x: Unit => UnitTag - case x: Boolean => BooleanTag - case x: Byte => ByteTag - case x: Short => ShortTag - case x: Int => IntTag - case x: Long => LongTag - case x: Float => FloatTag - case x: Double => DoubleTag - case x: String => StringTag - case x: Char => CharTag - case x: Type => ClazzTag - } - ) - - def unapply(c: Constant): Constant = c - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala b/tests/pos-with-compiler-cc/dotc/core/Constraint.scala deleted file mode 100644 index fb87aed77c41..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Constraint.scala +++ /dev/null @@ -1,214 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._, Contexts._ -import printing.Showable -import util.{SimpleIdentitySet, SimpleIdentityMap} - -/** Constraint over undetermined type parameters. Constraints are built - * over values of the following types: - * - * - TypeLambda A constraint constrains the type parameters of a set of TypeLambdas - * - TypeParamRef The parameters of the constrained type lambdas - * - TypeVar Every constrained parameter might be associated with a TypeVar - * that has the TypeParamRef as origin. - */ -abstract class Constraint extends Showable { - - type This <: Constraint - - /** Does the constraint's domain contain the type parameters of `tl`? */ - def contains(tl: TypeLambda): Boolean - - /** Does the constraint's domain contain the type parameter `param`? */ - def contains(param: TypeParamRef): Boolean - - /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */ - def contains(tvar: TypeVar): Boolean - - /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of - * the constraint domain. Note: Low level, implementation dependent. - */ - def entry(param: TypeParamRef): Type - - /** The type variable corresponding to parameter `param`, or - * NoType, if `param` is not in constrained or is not paired with a type variable. - */ - def typeVarOfParam(param: TypeParamRef): Type - - /** Is it known that `param1 <:< param2`? */ - def isLess(param1: TypeParamRef, param2: TypeParamRef): Boolean - - /** The parameters that are known to be smaller wrt <: than `param` */ - def lower(param: TypeParamRef): List[TypeParamRef] - - /** The parameters that are known to be greater wrt <: than `param` */ - def upper(param: TypeParamRef): List[TypeParamRef] - - /** The lower dominator set. - * - * This is like `lower`, except that each parameter returned is no smaller than every other returned parameter. - */ - def minLower(param: TypeParamRef): List[TypeParamRef] - - /** The upper dominator set. - * - * This is like `upper`, except that each parameter returned is no greater than every other returned parameter. - */ - def minUpper(param: TypeParamRef): List[TypeParamRef] - - /** lower(param) \ lower(butNot) */ - def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] - - /** upper(param) \ upper(butNot) */ - def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] - - /** The constraint bounds for given type parameter `param`. - * Poly params that are known to be smaller or greater than `param` - * are not contained in the return bounds. - * @pre `param` is not part of the constraint domain. - */ - def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds - - /** A new constraint which is derived from this constraint by adding - * entries for all type parameters of `poly`. - * @param tvars A list of type variables associated with the params, - * or Nil if the constraint will just be checked for - * satisfiability but will solved to give instances of - * type variables. - */ - def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This - - /** A new constraint which is derived from this constraint by updating - * the entry for parameter `param` to `tp`. - * `tp` can be one of the following: - * - * - A TypeBounds value, indicating new constraint bounds - * - Another type, indicating a solution for the parameter - * - * @pre `this contains param`. - */ - def updateEntry(param: TypeParamRef, tp: Type)(using Context): This - - /** A constraint that includes the relationship `p1 <: p2`. - * `<:` relationships between parameters ("edges") are propagated, but - * non-parameter bounds are left alone. - * - * @param direction Must be set to `KeepParam1` or `KeepParam2` when - * `p2 <: p1` is already true depending on which parameter - * the caller intends to keep. This will avoid propagating - * bounds that will be redundant after `p1` and `p2` are - * unified. - */ - def addLess(p1: TypeParamRef, p2: TypeParamRef, - direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This - - /** A new constraint which is derived from this constraint by removing - * the type parameter `param` from the domain and replacing all top-level occurrences - * of the parameter elsewhere in the constraint by type `tp`, or a conservative - * approximation of it if that is needed to avoid cycles. - * Occurrences nested inside a refinement or prefix are not affected. - */ - def replace(param: TypeParamRef, tp: Type)(using Context): This - - /** Is entry associated with `tl` removable? This is the case if - * all type parameters of the entry are associated with type variables - * which have their `inst` fields set. - */ - def isRemovable(tl: TypeLambda): Boolean - - /** A new constraint with all entries coming from `tl` removed. */ - def remove(tl: TypeLambda)(using Context): This - - /** A new constraint with entry `from` replaced with `to` - * Rerences to `from` from within other constraint bounds are updated to `to`. - * Type variables are left alone. - */ - def subst(from: TypeLambda, to: TypeLambda)(using Context): This - - /** Is `tv` marked as hard in the constraint? */ - def isHard(tv: TypeVar): Boolean - - /** The same as this constraint, but with `tv` marked as hard. */ - def withHard(tv: TypeVar)(using Context): This - - /** Gives for each instantiated type var that does not yet have its `inst` field - * set, the instance value stored in the constraint. Storing instances in constraints - * is done only in a temporary way for contexts that may be retracted - * without also retracting the type var as a whole. - */ - def instType(tvar: TypeVar): Type - - /** The given `tl` in case it is not contained in this constraint, - * a fresh copy of `tl` otherwise. - */ - def ensureFresh(tl: TypeLambda)(using Context): TypeLambda - - /** The type lambdas constrained by this constraint */ - def domainLambdas: List[TypeLambda] - - /** The type lambda parameters constrained by this constraint */ - def domainParams: List[TypeParamRef] - - /** Check whether predicate holds for all parameters in constraint */ - def forallParams(p: TypeParamRef => Boolean): Boolean - - /** Perform operation `op` on all typevars that do not have their `inst` field set. */ - def foreachTypeVar(op: TypeVar => Unit): Unit - - /** The uninstantiated typevars of this constraint, which still have a bounds constraint - */ - def uninstVars: collection.Seq[TypeVar] - - /** Whether `tl` is present in both `this` and `that` but is associated with - * different TypeVars there, meaning that the constraints cannot be merged. - */ - def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean - - /** Does `param` occur at the toplevel in `tp` ? - * Toplevel means: the type itself or a factor in some - * combination of `&` or `|` types. - */ - def occursAtToplevel(param: TypeParamRef, tp: Type)(using Context): Boolean - - /** A string that shows the reverse dependencies maintained by this constraint - * (coDeps and contraDeps for OrderingConstraints). - */ - def depsToString(using Context): String - - /** Does the constraint restricted to variables outside `except` depend on `tv` - * in the given direction `co`? - * @param `co` If true, test whether the constraint would change if the variable is made larger - * otherwise, test whether the constraint would change if the variable is made smaller. - */ - def dependsOn(tv: TypeVar, except: TypeVars, co: Boolean)(using Context): Boolean - - /** Depending on Config settngs: - * - Under `checkConstraintsNonCyclic`, check that no constrained - * parameter contains itself as a bound. - * - Under `checkConstraintDeps`, check hat reverse dependencies in - * constraints are correct and complete. - */ - def checkWellFormed()(using Context): this.type - - /** Check that constraint only refers to TypeParamRefs bound by itself */ - def checkClosed()(using Context): Unit - - /** Check that every typevar om this constraint has as origin a type parameter - * of athe type lambda that is associated with the typevar itself. - */ - def checkConsistentVars()(using Context): Unit -} - -/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up - * unifying one parameter with the other, this enum lets `addLess` know which - * direction the unification will take. - */ -enum UnificationDirection: - /** Neither p1 nor p2 will be instantiated. */ - case NoUnification - /** `p2 := p1`, p1 left uninstantiated. */ - case KeepParam1 - /** `p1 := p2`, p2 left uninstantiated. */ - case KeepParam2 diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala deleted file mode 100644 index 96e965903010..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintHandling.scala +++ /dev/null @@ -1,891 +0,0 @@ -package dotty.tools -package dotc -package core - -import Types._ -import Contexts._ -import Symbols._ -import Decorators._ -import Flags._ -import config.Config -import config.Printers.typr -import typer.ProtoTypes.{newTypeVar, representedParamRef} -import UnificationDirection.* -import NameKinds.AvoidNameKind -import util.SimpleIdentitySet -import NullOpsDecorator.stripNull - -/** Methods for adding constraints and solving them. - * - * What goes into a Constraint as opposed to a ConstrainHandler? - * - * Constraint code is purely functional: Operations get constraints and produce new ones. - * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done - * elsewhere. - * - * By comparison: Constraint handlers are parts of type comparers and can use their functionality. - * Constraint handlers update the current constraint as a side effect. - */ -trait ConstraintHandling { - - def constr: config.Printers.Printer = config.Printers.constr - - protected def isSub(tp1: Type, tp2: Type)(using Context): Boolean - protected def isSame(tp1: Type, tp2: Type)(using Context): Boolean - - protected def constraint: Constraint - protected def constraint_=(c: Constraint): Unit - - private var addConstraintInvocations = 0 - - /** If the constraint is frozen we cannot add new bounds to the constraint. */ - protected var frozenConstraint: Boolean = false - - /** Potentially a type lambda that is still instantiatable, even though the constraint - * is generally frozen. - */ - protected var caseLambda: Type = NoType - - /** If set, align arguments `S1`, `S2`when taking the glb - * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. - * Aligning means computing `S1 =:= S2` which may change the current constraint. - * See note in TypeComparer#distributeAnd. - */ - protected var homogenizeArgs: Boolean = false - - /** We are currently comparing type lambdas. Used as a flag for - * optimization: when `false`, no need to do an expensive `pruneLambdaParams` - */ - protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty - - /** Used for match type reduction: If false, we don't recognize an abstract type - * to be a subtype type of any of its base classes. This is in place only at the - * toplevel; it is turned on again when we add parts of the scrutinee to the constraint. - */ - protected var canWidenAbstract: Boolean = true - - protected var myNecessaryConstraintsOnly = false - /** When collecting the constraints needed for a particular subtyping - * judgment to be true, we sometimes need to approximate the constraint - * set (see `TypeComparer#either` for example). - * - * Normally, this means adding extra constraints which may not be necessary - * for the subtyping judgment to be true, but if this variable is set to true - * we will instead under-approximate and keep only the constraints that must - * always be present for the subtyping judgment to hold. - * - * This is needed for GADT bounds inference to be sound, but it is also used - * when constraining a method call based on its expected type to avoid adding - * constraints that would later prevent us from typechecking method - * arguments, see or-inf.scala and and-inf.scala for examples. - */ - protected def necessaryConstraintsOnly(using Context): Boolean = - ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly - - /** If `trustBounds = false` we perform comparisons in a pessimistic way as follows: - * Given an abstract type `A >: L <: H`, a subtype comparison of any type - * with `A` will compare against both `L` and `H`. E.g. - * - * T <:< A if T <:< L and T <:< H - * A <:< T if L <:< T and H <:< T - * - * This restricted form makes sure we don't "forget" types when forming - * unions and intersections with abstract types that have bad bounds. E.g. - * the following example from neg/i8900.scala that @smarter came up with: - * We have a type variable X with constraints - * - * X >: 1, X >: x.M - * - * where `x` is a locally nested variable and `x.M` has bad bounds - * - * x.M >: Int | String <: Int & String - * - * If we trust bounds, then the lower bound of `X` is `x.M` since `x.M >: 1`. - * Then even if we correct levels on instantiation to eliminate the local `x`, - * it is alreay too late, we'd get `Int & String` as instance, which does not - * satisfy the original constraint `X >: 1`. - * - * But if `trustBounds` is false, we do not conclude the `x.M >: 1` since - * we compare both bounds and the upper bound `Int & String` is not a supertype - * of `1`. So the lower bound is `1 | x.M` and when we level-avoid that we - * get `1 | Int & String`, which simplifies to `Int`. - */ - private var myTrustBounds = true - - inline def withUntrustedBounds(op: => Type): Type = - val saved = myTrustBounds - myTrustBounds = false - try op finally myTrustBounds = saved - - def trustBounds: Boolean = - !Config.checkLevelsOnInstantiation || myTrustBounds - - def checkReset() = - assert(addConstraintInvocations == 0) - assert(frozenConstraint == false) - assert(caseLambda == NoType) - assert(homogenizeArgs == false) - assert(comparedTypeLambdas == Set.empty) - - def nestingLevel(param: TypeParamRef)(using Context) = constraint.typeVarOfParam(param) match - case tv: TypeVar => tv.nestingLevel - case _ => - // This should only happen when reducing match types (in - // TrackingTypeComparer#matchCases) or in uncommitable TyperStates (as - // asserted in ProtoTypes.constrained) and is special-cased in `levelOK` - // below. - Int.MaxValue - - /** Is `level` <= `maxLevel` or legal in the current context? */ - def levelOK(level: Int, maxLevel: Int)(using Context): Boolean = - level <= maxLevel - || ctx.isAfterTyper || !ctx.typerState.isCommittable // Leaks in these cases shouldn't break soundness - || level == Int.MaxValue // See `nestingLevel` above. - || !Config.checkLevelsOnConstraints - - /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a - * fresh type variable of level `maxLevel` and return the new variable. - * If this isn't possible, throw a TypeError. - */ - def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = - if levelOK(nestingLevel(param), maxLevel) then - return param - LevelAvoidMap(0, maxLevel)(param) match - case freshVar: TypeVar => freshVar.origin - case _ => throw TypeError( - em"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") - - def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) - - /** The full lower bound of `param` includes both the `nonParamBounds` and the - * params in the constraint known to be `<: param`, except that - * params with a `nestingLevel` higher than `param` will be instantiated - * to a fresh param at a legal level. See the documentation of `TypeVar` - * for details. - */ - def fullLowerBound(param: TypeParamRef)(using Context): Type = - val maxLevel = nestingLevel(param) - var loParams = constraint.minLower(param) - if maxLevel != Int.MaxValue then - loParams = loParams.mapConserve(atLevel(maxLevel, _)) - loParams.foldLeft(nonParamBounds(param).lo)(_ | _) - - /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ - def fullUpperBound(param: TypeParamRef)(using Context): Type = - val maxLevel = nestingLevel(param) - var hiParams = constraint.minUpper(param) - if maxLevel != Int.MaxValue then - hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) - hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) - - /** Full bounds of `param`, including other lower/upper params. - * - * Note that underlying operations perform subtype checks - for this reason, recursing on `fullBounds` - * of some param when comparing types might lead to infinite recursion. Consider `bounds` instead. - */ - def fullBounds(param: TypeParamRef)(using Context): TypeBounds = - nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) - - /** An approximating map that prevents types nested deeper than maxLevel as - * well as WildcardTypes from leaking into the constraint. - */ - class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: - variance = topLevelVariance - - def toAvoid(tp: NamedType): Boolean = - tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel, maxLevel) - - /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: - * - lower-bounded by `tp` if variance >= 0 - * - upper-bounded by `tp` if variance <= 0 - * If this isn't possible, return the empty range. - */ - def legalVar(tp: TypeVar): Type = - val oldParam = tp.origin - val nameKind = - if variance > 0 then AvoidNameKind.UpperBound - else if variance < 0 then AvoidNameKind.LowerBound - else AvoidNameKind.BothBounds - - /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` - * with the appropriate level and variance. - */ - def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = - params.find(p => - nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && - (p.paramName.is(AvoidNameKind.BothBounds) || - variance != 0 && p.paramName.is(nameKind))) - - // First, check if we can reuse an existing parameter, this is more than an optimization - // since it avoids an infinite loop in tests/pos/i8900-cycle.scala - findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match - case Some(param) => - constraint.typeVarOfParam(param) - case _ => - // Otherwise, try to return a fresh type variable at `maxLevel` with - // the appropriate constraints. - val name = nameKind(oldParam.paramName.toTermName).toTypeName - val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, - nestingLevel = maxLevel, represents = oldParam) - val ok = - if variance < 0 then - addLess(freshVar.origin, oldParam) - else if variance > 0 then - addLess(oldParam, freshVar.origin) - else - unify(freshVar.origin, oldParam) - if ok then freshVar else emptyRange - end legalVar - - override def apply(tp: Type): Type = tp match - case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel, maxLevel) => - legalVar(tp) - // TypeParamRef can occur in tl bounds - case tp: TypeParamRef => - constraint.typeVarOfParam(tp) match - case tvar: TypeVar => - apply(tvar) - case _ => super.apply(tp) - case _ => - super.apply(tp) - - override def mapWild(t: WildcardType) = - if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) - else - val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) - tvar - end LevelAvoidMap - - /** Approximate `rawBound` if needed to make it a legal bound of `param` by - * avoiding wildcards and types with a level strictly greater than its - * `nestingLevel`. - * - * Note that level-checking must be performed here and cannot be delayed - * until instantiation because if we allow level-incorrect bounds, then we - * might end up reasoning with bad bounds outside of the scope where they are - * defined. This can lead to level-correct but unsound instantiations as - * demonstrated by tests/neg/i8900.scala. - */ - protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = - // Over-approximate for soundness. - var variance = if isUpper then -1 else 1 - // ...unless we can only infer necessary constraints, in which case we - // flip the variance to under-approximate. - if necessaryConstraintsOnly then variance = -variance - - val approx = new LevelAvoidMap(variance, nestingLevel(param)): - override def legalVar(tp: TypeVar): Type = - // `legalVar` will create a type variable whose bounds depend on - // `variance`, but whether the variance is positive or negative, - // we can still infer necessary constraints since just creating a - // type variable doesn't reduce the set of possible solutions. - // Therefore, we can safely "unflip" the variance flipped above. - // This is necessary for i8900-unflip.scala to typecheck. - val v = if necessaryConstraintsOnly then -this.variance else this.variance - atVariance(v)(super.legalVar(tp)) - approx(rawBound) - end legalBound - - protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = - if !constraint.contains(param) then true - else if !isUpper && param.occursIn(rawBound) then - // We don't allow recursive lower bounds when defining a type, - // so we shouldn't allow them as constraints either. - false - else - val bound = legalBound(param, rawBound, isUpper) - val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) - val equalBounds = (if isUpper then lo else hi) eq bound - if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then - // The narrowed bounds are equal and not recursive, - // so we can remove `param` from the constraint. - constraint = constraint.replace(param, bound) - true - else - // Narrow one of the bounds of type parameter `param` - // If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure - // that `param >: bound`. - val narrowedBounds = - val saved = homogenizeArgs - homogenizeArgs = Config.alignArgsInAnd - try - withUntrustedBounds( - if isUpper then oldBounds.derivedTypeBounds(lo, hi & bound) - else oldBounds.derivedTypeBounds(lo | bound, hi)) - finally - homogenizeArgs = saved - //println(i"narrow bounds for $param from $oldBounds to $narrowedBounds") - val c1 = constraint.updateEntry(param, narrowedBounds) - (c1 eq constraint) - || { - constraint = c1 - val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) - } - end addOneBound - - protected def addBoundTransitively(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = - - /** Adjust the bound `tp` in the following ways: - * - * 1. Toplevel occurrences of TypeRefs that are instantiated in the current - * constraint are also dereferenced. - * 2. Toplevel occurrences of ExprTypes lead to a `NoType` return, which - * causes the addOneBound operation to fail. - * - * An occurrence is toplevel if it is the bound itself, or a term in some - * combination of `&` or `|` types. - */ - def adjust(tp: Type): Type = tp match - case tp: AndOrType => - val p1 = adjust(tp.tp1) - val p2 = adjust(tp.tp2) - if p1.exists && p2.exists then tp.derivedAndOrType(p1, p2) else NoType - case tp: TypeVar if constraint.contains(tp.origin) => - adjust(tp.underlying) - case tp: ExprType => - // ExprTypes are not value types, so type parameters should not - // be instantiated to ExprTypes. A scenario where such an attempted - // instantiation can happen is if we unify (=> T) => () with A => () - // where A is a TypeParamRef. See the comment on EtaExpansion.etaExpand - // why types such as (=> T) => () can be constructed and i7969.scala - // as a test where this happens. - // Note that scalac by contrast allows such instantiations. But letting - // type variables be ExprTypes has its own problems (e.g. you can't write - // the resulting types down) and is largely unknown terrain. - NoType - case _ => - tp - - def description = i"constraint $param ${if isUpper then "<:" else ":>"} $rawBound to\n$constraint" - constr.println(i"adding $description$location") - if isUpper && rawBound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable then - def msg = i"!!! instantiated to Nothing: $param, constraint = $constraint" - if Config.failOnInstantiationToNothing - then assert(false, msg) - else report.log(msg) - def others = if isUpper then constraint.lower(param) else constraint.upper(param) - val bound = adjust(rawBound) - bound.exists - && addOneBound(param, bound, isUpper) && others.forall(addOneBound(_, bound, isUpper)) - .showing(i"added $description = $result$location", constr) - end addBoundTransitively - - protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { - def description = i"ordering $p1 <: $p2 to\n$constraint" - val res = - if (constraint.isLess(p2, p1)) unify(p2, p1) - else { - val down1 = p1 :: constraint.exclusiveLower(p1, p2) - val up2 = p2 :: constraint.exclusiveUpper(p2, p1) - val lo1 = constraint.nonParamBounds(p1).lo - val hi2 = constraint.nonParamBounds(p2).hi - constr.println(i"adding $description down1 = $down1, up2 = $up2$location") - constraint = constraint.addLess(p1, p2) - down1.forall(addOneBound(_, hi2, isUpper = true)) && - up2.forall(addOneBound(_, lo1, isUpper = false)) - } - constr.println(i"added $description = $res$location") - res - } - - def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging - - /** Unify p1 with p2: one parameter will be kept in the constraint, the - * other will be removed and its bounds transferred to the remaining one. - * - * If p1 and p2 have different `nestingLevel`, the parameter with the lowest - * level will be kept and the transferred bounds from the other parameter - * will be adjusted for level-correctness. - */ - private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { - constr.println(s"unifying $p1 $p2") - if !constraint.isLess(p1, p2) then - constraint = constraint.addLess(p1, p2) - - val level1 = nestingLevel(p1) - val level2 = nestingLevel(p2) - val pKept = if level1 <= level2 then p1 else p2 - val pRemoved = if level1 <= level2 then p2 else p1 - - val down = constraint.exclusiveLower(p2, p1) - val up = constraint.exclusiveUpper(p1, p2) - - constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) - - val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) - var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) - - if level1 != level2 then - boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) - val TypeBounds(lo, hi) = boundRemoved: @unchecked - // After avoidance, the interval might be empty, e.g. in - // tests/pos/i8900-promote.scala: - // >: x.type <: Singleton - // becomes: - // >: Int <: Singleton - // In that case, we can still get a legal constraint - // by replacing the lower-bound to get: - // >: Int & Singleton <: Singleton - if !isSub(lo, hi) then - boundRemoved = TypeBounds(lo & hi, hi) - - val newBounds = (boundKept & boundRemoved).bounds - constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) - - val lo = newBounds.lo - val hi = newBounds.hi - isSub(lo, hi) && - down.forall(addOneBound(_, hi, isUpper = true)) && - up.forall(addOneBound(_, lo, isUpper = false)) - } - - protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = - if (whenFrozen) - isSubTypeWhenFrozen(tp1, tp2) - else - isSub(tp1, tp2) - - inline final def inFrozenConstraint[T](op: => T): T = { - val savedFrozen = frozenConstraint - val savedLambda = caseLambda - frozenConstraint = true - caseLambda = NoType - try op - finally { - frozenConstraint = savedFrozen - caseLambda = savedLambda - } - } - - final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) - final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) - - /** Test whether the lower bounds of all parameters in this - * constraint are a solution to the constraint. - */ - protected final def isSatisfiable(using Context): Boolean = - constraint.forallParams { param => - val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) || { - report.log(i"sub fail $lo <:< $hi") - false - } - } - - /** Fix instance type `tp` by avoidance so that it does not contain references - * to types at level > `maxLevel`. - * @param tp the type to be fixed - * @param fromBelow whether type was obtained from lower bound - * @param maxLevel the maximum level of references allowed - * @param param the parameter that was instantiated - */ - private def fixLevels(tp: Type, fromBelow: Boolean, maxLevel: Int, param: TypeParamRef)(using Context) = - - def needsFix(tp: NamedType)(using Context) = - (tp.prefix eq NoPrefix) && tp.symbol.nestingLevel > maxLevel - - /** An accumulator that determines whether levels need to be fixed - * and computes on the side sets of nested type variables that need - * to be instantiated. - */ - def needsLeveling = new TypeAccumulator[Boolean]: - if !fromBelow then variance = -1 - - def apply(need: Boolean, tp: Type) = - need || tp.match - case tp: NamedType => - needsFix(tp) - || !stopBecauseStaticOrLocal(tp) && apply(need, tp.prefix) - case tp: TypeVar => - val inst = tp.instanceOpt - if inst.exists then apply(need, inst) - else if tp.nestingLevel > maxLevel then - // Change the nesting level of inner type variable to `maxLevel`. - // This means that the type variable will be instantiated later to a - // less nested type. If there are other references to the same type variable - // that do not come from the type undergoing `fixLevels`, this could lead - // to coarser types than intended. An alternative is to instantiate the - // type variable right away, but this also loses information. See - // i15934.scala for a test where the current strategey works but an early instantiation - // of `tp` would fail. - constr.println(i"widening nesting level of type variable $tp from ${tp.nestingLevel} to $maxLevel") - ctx.typerState.setNestingLevel(tp, maxLevel) - true - else false - case _ => - foldOver(need, tp) - end needsLeveling - - def levelAvoid = new TypeOps.AvoidMap: - if !fromBelow then variance = -1 - def toAvoid(tp: NamedType) = needsFix(tp) - - if Config.checkLevelsOnInstantiation && !ctx.isAfterTyper && needsLeveling(false, tp) then - typr.println(i"instance $tp for $param needs leveling to $maxLevel") - levelAvoid(tp) - else tp - end fixLevels - - /** Solve constraint set for given type parameter `param`. - * If `fromBelow` is true the parameter is approximated by its lower bound, - * otherwise it is approximated by its upper bound, unless the upper bound - * contains a reference to the parameter itself (such occurrences can arise - * for F-bounded types, `addOneBound` ensures that they never occur in the - * lower bound). - * The solved type is not allowed to contain references to types nested deeper - * than `maxLevel`. - * Wildcard types in bounds are approximated by their upper or lower bounds. - * The constraint is left unchanged. - * @return the instantiating type - * @pre `param` is in the constraint's domain. - */ - final def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int)(using Context): Type = - constraint.entry(param) match - case entry: TypeBounds => - val useLowerBound = fromBelow || param.occursIn(entry.hi) - val rawInst = withUntrustedBounds( - if useLowerBound then fullLowerBound(param) else fullUpperBound(param)) - val levelInst = fixLevels(rawInst, fromBelow, maxLevel, param) - if levelInst ne rawInst then - typr.println(i"level avoid for $maxLevel: $rawInst --> $levelInst") - typr.println(i"approx $param, from below = $fromBelow, inst = $levelInst") - levelInst - case inst => - assert(inst.exists, i"param = $param\nconstraint = $constraint") - inst - end approximation - - private def isTransparent(tp: Type, traitOnly: Boolean)(using Context): Boolean = tp match - case AndType(tp1, tp2) => - isTransparent(tp1, traitOnly) && isTransparent(tp2, traitOnly) - case _ => - val cls = tp.underlyingClassRef(refinementOK = false).typeSymbol - cls.isTransparentClass && (!traitOnly || cls.is(Trait)) - - /** If `tp` is an intersection such that some operands are transparent trait instances - * and others are not, replace as many transparent trait instances as possible with Any - * as long as the result is still a subtype of `bound`. But fall back to the - * original type if the resulting widened type is a supertype of all dropped - * types (since in this case the type was not a true intersection of transparent traits - * and other types to start with). - */ - def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = - var kept: Set[Type] = Set() // types to keep since otherwise bound would not fit - var dropped: List[Type] = List() // the types dropped so far, last one on top - - def dropOneTransparentTrait(tp: Type): Type = - if isTransparent(tp, traitOnly = true) && !kept.contains(tp) then - dropped = tp :: dropped - defn.AnyType - else tp match - case AndType(tp1, tp2) => - val tp1w = dropOneTransparentTrait(tp1) - if tp1w ne tp1 then tp1w & tp2 - else - val tp2w = dropOneTransparentTrait(tp2) - if tp2w ne tp2 then tp1 & tp2w - else tp - case _ => - tp - - def recur(tp: Type): Type = - val tpw = dropOneTransparentTrait(tp) - if tpw eq tp then tp - else if tpw <:< bound then recur(tpw) - else - kept += dropped.head - dropped = dropped.tail - recur(tp) - - val saved = ctx.typerState.snapshot() - val tpw = recur(tp) - if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then - // Rollback any constraint change that would lead to `tp` no longer - // being a valid solution. - ctx.typerState.resetTo(saved) - tp - else - tpw - end dropTransparentTraits - - /** If `tp` is an applied match type alias which is also an unreducible application - * of a higher-kinded type to a wildcard argument, widen to the match type's bound, - * in order to avoid an unreducible application of higher-kinded type ... in inferred type" - * error in PostTyper. Fixes #11246. - */ - def widenIrreducible(tp: Type)(using Context): Type = tp match - case tp @ AppliedType(tycon, _) if tycon.isLambdaSub && tp.hasWildcardArg => - tp.superType match - case MatchType(bound, _, _) => bound - case _ => tp - case _ => - tp - - /** Widen inferred type `inst` with upper `bound`, according to the following rules: - * 1. If `inst` is a singleton type, or a union containing some singleton types, - * widen (all) the singleton type(s), provided the result is a subtype of `bound` - * (i.e. `inst.widenSingletons <:< bound` succeeds with satisfiable constraint) and - * is not transparent according to `isTransparent`. - * 2a. If `inst` is a union type and `widenUnions` is true, approximate the union type - * from above by an intersection of all common base types, provided the result - * is a subtype of `bound`. - * 2b. If `inst` is a union type and `widenUnions` is false, turn it into a hard - * union type (except for unions | Null, which are kept in the state they were). - * 3. Widen some irreducible applications of higher-kinded types to wildcard arguments - * (see @widenIrreducible). - * 4. Drop transparent traits from intersections (see @dropTransparentTraits). - * - * Don't do these widenings if `bound` is a subtype of `scala.Singleton`. - * Also, if the result of these widenings is a TypeRef to a module class, - * and this type ref is different from `inst`, replace by a TermRef to - * its source module instead. - * - * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, - * as those could leak the annotation to users (see run/inferred-repeated-result). - */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - def widenOr(tp: Type) = - if widenUnions then - val tpw = tp.widenUnion - if (tpw ne tp) && !isTransparent(tpw, traitOnly = false) && (tpw <:< bound) then tpw else tp - else tp.hardenUnions - - def widenSingle(tp: Type) = - val tpw = tp.widenSingletons - if (tpw ne tp) && (tpw <:< bound) then tpw else tp - - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - - val wideInst = - if isSingleton(bound) then inst - else - val widenedFromSingle = widenSingle(inst) - val widenedFromUnion = widenOr(widenedFromSingle) - val widened = dropTransparentTraits(widenedFromUnion, bound) - widenIrreducible(widened) - - wideInst match - case wideInst: TypeRef if wideInst.symbol.is(Module) => - TermRef(wideInst.prefix, wideInst.symbol.sourceModule) - case _ => - wideInst.dropRepeatedAnnot - end widenInferred - - /** Convert all toplevel union types in `tp` to hard unions */ - extension (tp: Type) private def hardenUnions(using Context): Type = tp.widen match - case tp: AndType => - tp.derivedAndType(tp.tp1.hardenUnions, tp.tp2.hardenUnions) - case tp: RefinedType => - tp.derivedRefinedType(tp.parent.hardenUnions, tp.refinedName, tp.refinedInfo) - case tp: RecType => - tp.rebind(tp.parent.hardenUnions) - case tp: HKTypeLambda => - tp.derivedLambdaType(resType = tp.resType.hardenUnions) - case tp: OrType => - val tp1 = tp.stripNull - if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) - else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) - case _ => - tp - - /** The instance type of `param` in the current constraint (which contains `param`). - * If `fromBelow` is true, the instance type is the lub of the parameter's - * lower bounds; otherwise it is the glb of its upper bounds. However, - * a lower bound instantiation can be a singleton type only if the upper bound - * is also a singleton type. - * The instance type is not allowed to contain references to types nested deeper - * than `maxLevel`. - */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { - val approx = approximation(param, fromBelow, maxLevel).simplified - if fromBelow then - val widened = widenInferred(approx, param, widenUnions) - // Widening can add extra constraints, in particular the widened type might - // be a type variable which is now instantiated to `param`, and therefore - // cannot be used as an instantiation of `param` without creating a loop. - // If that happens, we run `instanceType` again to find a new instantation. - // (we do not check for non-toplevel occurrences: those should never occur - // since `addOneBound` disallows recursive lower bounds). - if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) - else - widened - else - approx - } - - /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have - * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: - * - * c1 defines p with bounds p >: L1 <: U1, and - * L2 <: L1, and - * U1 <: U2 - * - * Both `c1` and `c2` are required to derive from constraint `pre`, without adding - * any new type variables but possibly narrowing already registered ones with further bounds. - */ - protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = - if (c2 eq pre) true - else if (c1 eq pre) false - else { - val saved = constraint - try - // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. - // As neither `c1` nor `c2` can have more params than `pre`, this only matters in one edge case. - // Constraint#forallParams only iterates over params that can be directly constrained. - // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, - // we could miss that param being instantiated to an incompatible type in `c1`. - pre.forallParams(p => - c1.entry(p).exists - && c2.upper(p).forall(c1.isLess(p, _)) - && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) - ) - finally constraint = saved - } - - /** The current bounds of type parameter `param` */ - def bounds(param: TypeParamRef)(using Context): TypeBounds = { - val e = constraint.entry(param) - if (e.exists) e.bounds - else { - // TODO: should we change the type of paramInfos to nullable? - val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos - if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala - else TypeBounds.empty - } - } - - /** Add type lambda `tl`, possibly with type variables `tvars`, to current constraint - * and propagate all bounds. - * @param tvars See Constraint#add - */ - def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = - checkPropagated(i"initialized $tl") { - constraint = constraint.add(tl, tvars) - tl.paramRefs.forall { param => - val lower = constraint.lower(param) - val upper = constraint.upper(param) - constraint.entry(param) match { - case bounds: TypeBounds => - if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) - || upper.nonEmpty && !bounds.hi.isAny - then constr.println(i"INIT*** $tl") - lower.forall(addOneBound(_, bounds.hi, isUpper = true)) && - upper.forall(addOneBound(_, bounds.lo, isUpper = false)) - case x => - // Happens if param was already solved while processing earlier params of the same TypeLambda. - // See #4720. - true - } - } - } - - /** Can `param` be constrained with new bounds? */ - final def canConstrain(param: TypeParamRef): Boolean = - (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) - - /** Is `param` assumed to be a sub- and super-type of any other type? - * This holds if `TypeVarsMissContext` is set unless `param` is a part - * of a MatchType that is currently normalized. - */ - final def assumedTrue(param: TypeParamRef)(using Context): Boolean = - ctx.mode.is(Mode.TypevarsMissContext) && (caseLambda `ne` param.binder) - - /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. - * `bound` is assumed to be in normalized form, as specified in `firstTry` and - * `secondTry` of `TypeComparer`. In particular, it should not be an alias type, - * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may - * not be AndTypes and lower bounds may not be OrTypes. This is assured by the - * way isSubType is organized. - */ - protected def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = - if !bound.isValueTypeOrLambda then return false - - /** When comparing lambdas we might get constraints such as - * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter - * and `X0` is a lambda parameter. The constraint for `A` is not allowed - * to refer to such a lambda parameter because the lambda parameter is - * not visible where `A` is defined. Consequently, we need to - * approximate the bound so that the lambda parameter does not appear in it. - * If `tp` is an upper bound, we need to approximate with something smaller, - * otherwise something larger. - * Test case in pos/i94-nada.scala. This test crashes with an illegal instance - * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is - * missing. - */ - def avoidLambdaParams(tp: Type) = - if comparedTypeLambdas.nonEmpty then - val approx = new ApproximatingTypeMap { - if (!fromBelow) variance = -1 - def apply(t: Type): Type = t match { - case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => - val bounds = tl.paramInfos(n) - range(bounds.lo, bounds.hi) - case tl: TypeLambda => - val saved = comparedTypeLambdas - comparedTypeLambdas -= tl - try mapOver(tl) - finally comparedTypeLambdas = saved - case _ => - mapOver(t) - } - } - approx(tp) - else tp - - def addParamBound(bound: TypeParamRef) = - constraint.entry(param) match { - case _: TypeBounds => - if (fromBelow) addLess(bound, param) else addLess(param, bound) - case tp => - if (fromBelow) isSub(bound, tp) else isSub(tp, bound) - } - - def kindCompatible(tp1: Type, tp2: Type): Boolean = - val tparams1 = tp1.typeParams - val tparams2 = tp2.typeParams - tparams1.corresponds(tparams2)((p1, p2) => kindCompatible(p1.paramInfo, p2.paramInfo)) - && (tparams1.isEmpty || kindCompatible(tp1.hkResult, tp2.hkResult)) - || tp1.hasAnyKind - || tp2.hasAnyKind - - def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" - - //checkPropagated(s"adding $description")(true) // DEBUG in case following fails - checkPropagated(s"added $description") { - addConstraintInvocations += 1 - val saved = canWidenAbstract - canWidenAbstract = true - try bound match - case bound: TypeParamRef if constraint contains bound => - addParamBound(bound) - case _ => - val pbound = avoidLambdaParams(bound) - kindCompatible(param, pbound) && addBoundTransitively(param, pbound, !fromBelow) - finally - canWidenAbstract = saved - addConstraintInvocations -= 1 - } - end addConstraint - - /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ - def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { - if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) - inFrozenConstraint { - for (p <- constraint.domainParams) { - def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = - assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") - for (u <- constraint.upper(p)) - check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") - for (l <- constraint.lower(p)) { - check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") - check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") - } - } - } - result - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala b/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala deleted file mode 100644 index d2b1246a8149..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ConstraintRunInfo.scala +++ /dev/null @@ -1,23 +0,0 @@ -package dotty.tools.dotc -package core - -import Contexts._ -import config.Printers.{default, typr} - -trait ConstraintRunInfo { self: Run => - private var maxSize = 0 - private var maxConstraint: Constraint | Null = _ - def recordConstraintSize(c: Constraint, size: Int): Unit = - if (size > maxSize) { - maxSize = size - maxConstraint = c - } - def printMaxConstraint()(using Context): Unit = - if maxSize > 0 then - val printer = if ctx.settings.YdetailedStats.value then default else typr - printer.println(s"max constraint size: $maxSize") - try printer.println(s"max constraint = ${maxConstraint.nn.show}") - catch case ex: StackOverflowError => printer.println("max constraint cannot be printed due to stack overflow") - - protected def reset(): Unit = maxConstraint = null -} diff --git a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala b/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala deleted file mode 100644 index 20687dc1663a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/ContextOps.scala +++ /dev/null @@ -1,115 +0,0 @@ -package dotty.tools.dotc -package core - -import Contexts._, Symbols._, Types._, Flags._ -import Denotations._, SymDenotations._ -import Names.Name, StdNames.nme -import ast.untpd -import caps.unsafe.unsafeBoxFunArg - -/** Extension methods for contexts where we want to keep the ctx. syntax */ -object ContextOps: - - extension (ctx: Context) - - /** Enter symbol into current class, if current class is owner of current context, - * or into current scope, if not. Should always be called instead of scope.enter - * in order to make sure that updates to class members are reflected in - * finger prints. - */ - def enter(sym: Symbol): Symbol = inContext(ctx) { - ctx.owner match - case cls: ClassSymbol => cls.classDenot.enter(sym) - case _ => ctx.scope.openForMutations.enter(sym) - sym - } - - /** The denotation with the given `name` and all `required` flags in current context - */ - def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = - inContext(ctx) { - if (ctx.owner.isClass) - if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self - if (ctx.scope.size == 1) { - val elem = ctx.scope.lastEntry.nn - if (elem.name == name) return elem.sym.denot // return self - } - val pre = ctx.owner.thisType - if ctx.isJava then javaFindMember(name, pre, required, excluded) - else pre.findMember(name, pre, required, excluded) - } - else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. - ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) - else - ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) - } - - final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = - assert(ctx.isJava) - inContext(ctx) { - - val preSym = pre.typeSymbol - - // 1. Try to search in current type and parents. - val directSearch = pre.findMember(name, pre, required, excluded) - - // 2. Try to search in companion class if current is an object. - def searchCompanionClass = if preSym.is(Flags.Module) then - preSym.companionClass.thisType.findMember(name, pre, required, excluded) - else NoDenotation - - // 3. Try to search in companion objects of super classes. - // In Java code, static inner classes, which we model as members of the companion object, - // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. - def searchSuperCompanionObjects = - val toSearch = if preSym.is(Flags.Module) then - if preSym.companionClass.exists then - preSym.companionClass.asClass.baseClasses - else Nil - else - preSym.asClass.baseClasses - - toSearch.iterator.map { bc => - val pre1 = bc.companionModule.namedType - pre1.findMember(name, pre1, required, excluded) - }.find(_.exists).getOrElse(NoDenotation) - - if preSym.isClass then - directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects - else - directSearch - } - - /** A fresh local context with given tree and owner. - * Owner might not exist (can happen for self valdefs), in which case - * no owner is set in result context - */ - def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { - val freshCtx = ctx.fresh.setTree(tree) - if owner.exists then freshCtx.setOwner(owner) else freshCtx - } - - /** Context where `sym` is defined, assuming we are in a nested context. */ - def defContext(sym: Symbol): Context = inContext(ctx) { - ctx.outersIterator - .dropWhile(((ctx: Context) => ctx.owner != sym).unsafeBoxFunArg) - .dropWhile(((ctx: Context) => ctx.owner == sym).unsafeBoxFunArg) - .next() - } - - /** A new context for the interior of a class */ - def inClassContext(selfInfo: TypeOrSymbol): Context = - inline def op(using Context): Context = - val localCtx: Context = ctx.fresh.setNewScope - selfInfo match { - case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) - case _ => - } - localCtx - op(using ctx) - - def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { - if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) - else ctx - } -end ContextOps diff --git a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala b/tests/pos-with-compiler-cc/dotc/core/Contexts.scala deleted file mode 100644 index 37fde2d7b604..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Contexts.scala +++ /dev/null @@ -1,1041 +0,0 @@ -package dotty.tools -package dotc -package core - -import interfaces.CompilerCallback -import Decorators._ -import Periods._ -import Names._ -import Phases._ -import Types._ -import Symbols._ -import Scopes._ -import Uniques._ -import ast.Trees._ -import ast.untpd -import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} -import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} -import inlines.Inliner -import Nullables._ -import Implicits.ContextualImplicits -import config.Settings._ -import config.Config -import reporting._ -import io.{AbstractFile, NoAbstractFile, PlainFile, Path} -import scala.io.Codec -import collection.mutable -import printing._ -import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} -import classfile.ReusableDataReader -import StdNames.nme -import compiletime.uninitialized - -import annotation.internal.sharable -import annotation.retains - -import DenotTransformers.DenotTransformer -import dotty.tools.dotc.profile.Profiler -import util.Property.Key -import util.Store -import xsbti.AnalysisCallback -import plugins._ -import java.util.concurrent.atomic.AtomicInteger -import java.nio.file.InvalidPathException -import language.experimental.pureFunctions - -object Contexts { - - //@sharable var nextId = 0 - - private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() - private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() - private val (printerFnLoc, store3) = store2.newLocation[DetachedContext -> Printer](new RefinedPrinter(_)) - private val (settingsStateLoc, store4) = store3.newLocation[SettingsState]() - private val (compilationUnitLoc, store5) = store4.newLocation[CompilationUnit]() - private val (runLoc, store6) = store5.newLocation[Run | Null]() - private val (profilerLoc, store7) = store6.newLocation[Profiler]() - private val (notNullInfosLoc, store8) = store7.newLocation[List[NotNullInfo]]() - private val (importInfoLoc, store9) = store8.newLocation[ImportInfo | Null]() - private val (typeAssignerLoc, store10) = store9.newLocation[TypeAssigner](TypeAssigner) - - private val initialStore = store10 - - /** The current context */ - inline def ctx(using ctx: Context): Context = ctx - - /** Run `op` with given context */ - inline def inContext[T](c: Context)(inline op: Context ?-> T): T = - op(using c) - - /** Execute `op` at given period */ - inline def atPeriod[T](pd: Period)(inline op: Context ?-> T)(using Context): T = - op(using ctx.fresh.setPeriod(pd)) - - /** Execute `op` at given phase id */ - inline def atPhase[T](pid: PhaseId)(inline op: Context ?-> T)(using Context): T = - op(using ctx.withPhase(pid)) - - /** Execute `op` at given phase */ - inline def atPhase[T](phase: Phase)(inline op: Context ?-> T)(using Context): T = - op(using ctx.withPhase(phase)) - - inline def atNextPhase[T](inline op: Context ?-> T)(using Context): T = - atPhase(ctx.phase.next)(op) - - /** Execute `op` at the current phase if it's before the first transform phase, - * otherwise at the last phase before the first transform phase. - * - * Note: this should be used instead of `atPhaseNoLater(ctx.picklerPhase)` - * because the later won't work if the `Pickler` phase is not present (for example, - * when using `QuoteCompiler`). - */ - inline def atPhaseBeforeTransforms[T](inline op: Context ?-> T)(using Context): T = - atPhaseNoLater(firstTransformPhase.prev)(op) - - inline def atPhaseNoLater[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = - op(using if !limit.exists || ctx.phase <= limit then ctx else ctx.withPhase(limit)) - - inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?-> T)(using Context): T = - op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) - - inline def inMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) - - inline def withMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - inMode(ctx.mode | mode)(op) - - inline def withoutMode[T](mode: Mode)(inline op: Context ?-> T)(using ctx: Context): T = - inMode(ctx.mode &~ mode)(op) - - inline def inDetachedContext[T](inline op: DetachedContext ?-> T)(using ctx: Context): T = - op(using ctx.detach) - - type Context = ContextCls @retains(caps.cap) - - /** A context is passed basically everywhere in dotc. - * This is convenient but carries the risk of captured contexts in - * objects that turn into space leaks. To combat this risk, here are some - * conventions to follow: - * - * - Never let an implicit context be an argument of a class whose instances - * live longer than the context. - * - Classes that need contexts for their initialization take an explicit parameter - * named `initctx`. They pass initctx to all positions where it is needed - * (and these positions should all be part of the intialization sequence of the class). - * - Classes that need contexts that survive initialization are instead passed - * a "condensed context", typically named `cctx` (or they create one). Condensed contexts - * just add some basic information to the context base without the - * risk of capturing complete trees. - * - To make sure these rules are kept, it would be good to do a sanity - * check using bytecode inspection with javap or scalap: Keep track - * of all class fields of type context; allow them only in whitelisted - * classes (which should be short-lived). - */ - abstract class ContextCls(val base: ContextBase) { - - //val id = nextId - //nextId += 1 - //assert(id != 35599) - - protected given Context = this - - def outer: ContextCls @retains(this) - def period: Period - def mode: Mode - def owner: Symbol - def tree: Tree[?] - def scope: Scope - def typerState: TyperState - def gadt: GadtConstraint - def searchHistory: SearchHistory - def source: SourceFile - - /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ - def outersIterator: Iterator[ContextCls @retains(this)] - - /** A map in which more contextual properties can be stored - * Typically used for attributes that are read and written only in special situations. - */ - def moreProperties: Map[Key[Any], Any] - - def property[T](key: Key[T]): Option[T] = - moreProperties.get(key).asInstanceOf[Option[T]] - - /** A store that can be used by sub-components. - * Typically used for attributes that are defined only once per compilation unit. - * Access to store entries is much faster than access to properties, and only - * slightly slower than a normal field access would be. - */ - def store: Store - - /** The compiler callback implementation, or null if no callback will be called. */ - def compilerCallback: CompilerCallback = store(compilerCallbackLoc) - - /** The sbt callback implementation if we are run from sbt, null otherwise */ - def sbtCallback: AnalysisCallback = store(sbtCallbackLoc) - - /** The current plain printer */ - def printerFn: DetachedContext -> Printer = store(printerFnLoc) - - /** A function creating a printer */ - def printer: Printer = - val pr = printerFn(detach) - if this.settings.YplainPrinter.value then pr.plain else pr - - /** The current settings values */ - def settingsState: SettingsState = store(settingsStateLoc) - - /** The current compilation unit */ - def compilationUnit: CompilationUnit = store(compilationUnitLoc) - - /** The current compiler-run */ - def run: Run | Null = store(runLoc) - - /** The current compiler-run profiler */ - def profiler: Profiler = store(profilerLoc) - - /** The paths currently known to be not null */ - def notNullInfos: List[NotNullInfo] = store(notNullInfosLoc) - - /** The currently active import info */ - def importInfo: ImportInfo | Null = store(importInfoLoc) - - /** The current type assigner or typer */ - def typeAssigner: TypeAssigner = store(typeAssignerLoc) - - /** The new implicit references that are introduced by this scope */ - private var implicitsCache: ContextualImplicits | Null = null - def implicits: ContextualImplicits = { - if (implicitsCache == null) - implicitsCache = { - val implicitRefs: List[ImplicitRef] = - if (isClassDefContext) - try owner.thisType.implicitMembers - catch { - case ex: CyclicReference => Nil - } - else if (isImportContext) importInfo.nn.importedImplicits - else if (isNonEmptyScopeContext) scope.implicitDecls - else Nil - val outerImplicits = - if (isImportContext && importInfo.nn.unimported.exists) - outer.implicits exclude importInfo.nn.unimported - else - outer.implicits - if (implicitRefs.isEmpty) outerImplicits - else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(detach) - } - implicitsCache.nn - } - - /** Either the current scope, or, if the current context owner is a class, - * the declarations of the current class. - */ - def effectiveScope(using Context): Scope = - val myOwner: Symbol | Null = owner - if myOwner != null && myOwner.isClass then myOwner.asClass.unforcedDecls - else scope - - def nestingLevel: Int = effectiveScope.nestingLevel - - /** Sourcefile corresponding to given abstract file, memoized */ - def getSource(file: AbstractFile, codec: -> Codec = Codec(settings.encoding.value)) = { - util.Stats.record("Context.getSource") - base.sources.getOrElseUpdate(file, SourceFile(file, codec)) - } - - /** SourceFile with given path name, memoized */ - def getSource(path: TermName): SourceFile = getFile(path) match - case NoAbstractFile => NoSource - case file => getSource(file) - - /** SourceFile with given path, memoized */ - def getSource(path: String): SourceFile = getSource(path.toTermName) - - /** AbstraFile with given path name, memoized */ - def getFile(name: TermName): AbstractFile = base.files.get(name) match - case Some(file) => - file - case None => - try - val file = new PlainFile(Path(name.toString)) - base.files(name) = file - file - catch - case ex: InvalidPathException => - report.error(em"invalid file path: ${ex.getMessage}") - NoAbstractFile - - /** AbstractFile with given path, memoized */ - def getFile(name: String): AbstractFile = getFile(name.toTermName) - - final def withPhase(phase: Phase): Context = ctx.fresh.setPhase(phase.id) - final def withPhase(pid: PhaseId): Context = ctx.fresh.setPhase(pid) - - private var related: SimpleIdentityMap[SourceFile, DetachedContext] | Null = null - - private def lookup(key: SourceFile): DetachedContext | Null = - util.Stats.record("Context.related.lookup") - if related == null then - related = SimpleIdentityMap.empty - null - else - related.nn(key) - - final def withSource(source: SourceFile): Context = - util.Stats.record("Context.withSource") - if this.source eq source then - this - else - var ctx1 = lookup(source) - if ctx1 == null then - util.Stats.record("Context.withSource.new") - val ctx2 = fresh.setSource(source) - if ctx2.compilationUnit eq NoCompilationUnit then - // `source` might correspond to a file not necessarily - // in the current project (e.g. when inlining library code), - // so set `mustExist` to false. - ctx2.setCompilationUnit(CompilationUnit(source, mustExist = false)) - val dctx = ctx2.detach - ctx1 = dctx - related = related.nn.updated(source, dctx) - ctx1 - - // `creationTrace`-related code. To enable, uncomment the code below and the - // call to `setCreationTrace()` in this file. - /* - /** If -Ydebug is on, the top of the stack trace where this context - * was created, otherwise `null`. - */ - private var creationTrace: Array[StackTraceElement] = uninitialized - - private def setCreationTrace() = - creationTrace = (new Throwable).getStackTrace().take(20) - - /** Print all enclosing context's creation stacktraces */ - def printCreationTraces() = { - println("=== context creation trace =======") - for (ctx <- outersIterator) { - println(s">>>>>>>>> $ctx") - if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n")) - } - println("=== end context creation trace ===") - } - */ - - /** The current reporter */ - def reporter: Reporter = typerState.reporter - - final def phase: Phase = base.phases(period.firstPhaseId) - final def runId = period.runId - final def phaseId = period.phaseId - - final def lastPhaseId = base.phases.length - 1 - - /** Does current phase use an erased types interpretation? */ - final def erasedTypes = phase.erasedTypes - - /** Are we in a Java compilation unit? */ - final def isJava: Boolean = compilationUnit.isJava - - /** Is current phase after TyperPhase? */ - final def isAfterTyper = base.isAfterTyper(phase) - final def isTyper = base.isTyper(phase) - - /** Is this a context for the members of a class definition? */ - def isClassDefContext: Boolean = - owner.isClass && (owner ne outer.owner) - - /** Is this a context that introduces an import clause? */ - def isImportContext: Boolean = - (this ne NoContext) - && (outer ne NoContext) - && (this.importInfo nen outer.importInfo) - - /** Is this a context that introduces a non-empty scope? */ - def isNonEmptyScopeContext: Boolean = - (this.scope ne outer.scope) && !this.scope.isEmpty - - /** Is this a context for typechecking an inlined body? */ - def isInlineContext: Boolean = - typer.isInstanceOf[Inliner#InlineTyper] - - /** The next outer context whose tree is a template or package definition - * Note: Currently unused - def enclTemplate: Context = { - var c = this - while (c != NoContext && !c.tree.isInstanceOf[Template[?]] && !c.tree.isInstanceOf[PackageDef[?]]) - c = c.outer - c - }*/ - - /** The context for a supercall. This context is used for elaborating - * the parents of a class and their arguments. - * The context is computed from the current class context. It has - * - * - as owner: The primary constructor of the class - * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context - * - * The reasons for this peculiar choice of attributes are as follows: - * - * - The constructor must be the owner, because that's where any local methods or closures - * should go. - * - The context may not see any class members (inherited or defined), and should - * instead see definitions defined in the outer context which might be shadowed by - * such class members. That's why the outer context must be the outer context of the class. - * - At the same time the context should see the parameter accessors of the current class, - * that's why they get added to the local scope. An alternative would have been to have the - * context see the constructor parameters instead, but then we'd need a final substitution step - * from constructor parameters to class parameter accessors. - */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) - superOrThisCallContext(owner.primaryConstructor, locals) - } - - /** The context for the arguments of a this(...) constructor call. - * The context is computed from the local auxiliary constructor context. - * It has - * - * - as owner: The auxiliary constructor - * - as outer context: The context enclosing the enclosing class context - * - as scope: The parameters of the auxiliary constructor. - */ - def thisCallArgContext: Context = { - val constrCtx = detach.outersIterator.dropWhile(_.outer.owner == owner).next() - superOrThisCallContext(owner, constrCtx.scope) - .setTyperState(typerState) - .setGadt(gadt) - .fresh - .setScope(this.scope) - } - - /** The super- or this-call context with given owner and locals. */ - private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { - var classCtx = detach.outersIterator.dropWhile(!_.isClassDefContext).next() - classCtx.outer.fresh.setOwner(owner) - .setScope(locals) - .setMode(classCtx.mode) - } - - /** The context of expression `expr` seen as a member of a statement sequence */ - def exprContext(stat: Tree[?], exprOwner: Symbol): Context = - if (exprOwner == this.owner) this - else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext - else fresh.setOwner(exprOwner) - - /** A new context that summarizes an import statement */ - def importContext(imp: Import[?], sym: Symbol): FreshContext = - fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) - - /** Is the debug option set? */ - def debug: Boolean = base.settings.Ydebug.value - - /** Is the verbose option set? */ - def verbose: Boolean = base.settings.verbose.value - - /** Should use colors when printing? */ - def useColors: Boolean = - base.settings.color.value == "always" - - /** Is the explicit nulls option set? */ - def explicitNulls: Boolean = base.settings.YexplicitNulls.value - - /** A fresh clone of this context embedded in this context. */ - def fresh: FreshContext = freshOver(this) - - /** A fresh clone of this context embedded in the specified `outer` context. */ - def freshOver(outer: Context): FreshContext = - util.Stats.record("Context.fresh") - FreshContext(base).init(outer, this).setTyperState(this.typerState) - - final def withOwner(owner: Symbol): Context = - if (owner ne this.owner) fresh.setOwner(owner) else this - - final def withTyperState(typerState: TyperState): Context = - if typerState ne this.typerState then fresh.setTyperState(typerState) else this - - final def withUncommittedTyperState: Context = - withTyperState(typerState.uncommittedAncestor) - - final def withProperty[T](key: Key[T], value: Option[T]): Context = - if (property(key) == value) this - else value match { - case Some(v) => fresh.setProperty(key, v) - case None => fresh.dropProperty(key) - } - - def typer: Typer = this.typeAssigner match { - case typer: Typer => typer - case _ => new Typer - } - - override def toString: String = - //if true then - // outersIterator.map { ctx => - // i"${ctx.id} / ${ctx.owner} / ${ctx.moreProperties.valuesIterator.map(_.getClass).toList.mkString(", ")}" - // }.mkString("\n") - //else - def iinfo(using Context) = - val info = ctx.importInfo - if (info == null) "" else i"${info.selectors}%, %" - def cinfo(using Context) = - val core = s" owner = ${ctx.owner}, scope = ${ctx.scope}, import = $iinfo" - if (ctx ne NoContext) && (ctx.implicits ne ctx.outer.implicits) then - s"$core, implicits = ${ctx.implicits}" - else - core - s"""Context( - |${outersIterator.map(ctx => cinfo(using ctx)).mkString("\n\n")})""".stripMargin - - def settings: ScalaSettings = base.settings - def definitions: Definitions = base.definitions - def platform: Platform = base.platform - def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying - def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes - def uniques: util.WeakHashSet[Type] = base.uniques - - def initialize()(using Context): Unit = base.initialize() - - protected def resetCaches(): Unit = - implicitsCache = null - related = null - - /** Reuse this context as a fresh context nested inside `outer` */ - def reuseIn(outer: Context): this.type - - def detach: DetachedContext - } - - object detached: - opaque type DetachedContext <: ContextCls = ContextCls - inline def apply(c: ContextCls): DetachedContext = c - - type DetachedContext = detached.DetachedContext - - /** A condensed context provides only a small memory footprint over - * a Context base, and therefore can be stored without problems in - * long-lived objects. - abstract class CondensedContext extends Context { - override def condensed = this - } - */ - - /** A fresh context allows selective modification - * of its attributes using the with... methods. - */ - class FreshContext(base: ContextBase) extends ContextCls(base) { thiscontext => - - private var _outer: DetachedContext = uninitialized - def outer: DetachedContext = _outer - - def outersIterator: Iterator[ContextCls] = new Iterator[ContextCls] { - var current: ContextCls = thiscontext - def hasNext = current != NoContext - def next = { val c = current; current = current.outer; c } - } - - private var _period: Period = uninitialized - final def period: Period = _period - - private var _mode: Mode = uninitialized - final def mode: Mode = _mode - - private var _owner: Symbol = uninitialized - final def owner: Symbol = _owner - - private var _tree: Tree[?]= _ - final def tree: Tree[?] = _tree - - private var _scope: Scope = uninitialized - final def scope: Scope = _scope - - private var _typerState: TyperState = uninitialized - final def typerState: TyperState = _typerState - - private var _gadt: GadtConstraint = uninitialized - final def gadt: GadtConstraint = _gadt - - private var _searchHistory: SearchHistory = uninitialized - final def searchHistory: SearchHistory = _searchHistory - - private var _source: SourceFile = uninitialized - final def source: SourceFile = _source - - private var _moreProperties: Map[Key[Any], Any] = uninitialized - final def moreProperties: Map[Key[Any], Any] = _moreProperties - - private var _store: Store = uninitialized - final def store: Store = _store - - /** Initialize all context fields, except typerState, which has to be set separately - * @param outer The outer context - * @param origin The context from which fields are copied - */ - private[Contexts] def init(outer: Context, origin: Context): this.type = { - _outer = outer.asInstanceOf[DetachedContext] - _period = origin.period - _mode = origin.mode - _owner = origin.owner - _tree = origin.tree - _scope = origin.scope - _gadt = origin.gadt - _searchHistory = origin.searchHistory - _source = origin.source - _moreProperties = origin.moreProperties - _store = origin.store - this - } - - def reuseIn(outer: Context): this.type = - resetCaches() - init(outer, outer) - - def detach: DetachedContext = detached(this) - - def setPeriod(period: Period): this.type = - util.Stats.record("Context.setPeriod") - assert(period.firstPhaseId == period.lastPhaseId, period) - this._period = period - this - - def setMode(mode: Mode): this.type = - util.Stats.record("Context.setMode") - this._mode = mode - this - - def setOwner(owner: Symbol): this.type = - util.Stats.record("Context.setOwner") - assert(owner != NoSymbol) - this._owner = owner - this - - def setTree(tree: Tree[?]): this.type = - util.Stats.record("Context.setTree") - this._tree = tree - this - - def setScope(scope: Scope): this.type = - this._scope = scope - this - - def setNewScope: this.type = - util.Stats.record("Context.setScope") - this._scope = newScope - this - - def setTyperState(typerState: TyperState): this.type = - this._typerState = typerState - this - def setNewTyperState(): this.type = - setTyperState(typerState.fresh(committable = true)) - def setExploreTyperState(): this.type = - setTyperState(typerState.fresh(committable = false)) - def setReporter(reporter: Reporter): this.type = - setTyperState(typerState.fresh().setReporter(reporter)) - - def setTyper(typer: Typer): this.type = - this._scope = typer.scope - setTypeAssigner(typer) - - def setGadt(gadt: GadtConstraint): this.type = - util.Stats.record("Context.setGadt") - this._gadt = gadt - this - def setFreshGADTBounds: this.type = - setGadt(gadt.fresh) - - def setSearchHistory(searchHistory: SearchHistory): this.type = - util.Stats.record("Context.setSearchHistory") - this._searchHistory = searchHistory - this - - def setSource(source: SourceFile): this.type = - util.Stats.record("Context.setSource") - this._source = source - this - - private def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = - util.Stats.record("Context.setMoreProperties") - this._moreProperties = moreProperties - this - - private def setStore(store: Store): this.type = - util.Stats.record("Context.setStore") - this._store = store - this - - def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { - setSource(compilationUnit.source) - updateStore(compilationUnitLoc, compilationUnit) - } - - def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) - def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) - def setPrinterFn(printer: DetachedContext -> Printer): this.type = updateStore(printerFnLoc, printer) - def setSettings(settingsState: SettingsState): this.type = updateStore(settingsStateLoc, settingsState) - def setRun(run: Run | Null): this.type = updateStore(runLoc, run) - def setProfiler(profiler: Profiler): this.type = updateStore(profilerLoc, profiler) - def setNotNullInfos(notNullInfos: List[NotNullInfo]): this.type = updateStore(notNullInfosLoc, notNullInfos) - def setImportInfo(importInfo: ImportInfo): this.type = - importInfo.mentionsFeature(nme.unsafeNulls) match - case Some(true) => - setMode(this.mode &~ Mode.SafeNulls) - case Some(false) if ctx.settings.YexplicitNulls.value => - setMode(this.mode | Mode.SafeNulls) - case _ => - updateStore(importInfoLoc, importInfo) - def setTypeAssigner(typeAssigner: TypeAssigner): this.type = updateStore(typeAssignerLoc, typeAssigner) - - def setProperty[T](key: Key[T], value: T): this.type = - setMoreProperties(moreProperties.updated(key, value)) - - def dropProperty(key: Key[?]): this.type = - setMoreProperties(moreProperties - key) - - def addLocation[T](initial: T): Store.Location[T] = { - val (loc, store1) = store.newLocation(initial) - setStore(store1) - loc - } - - def addLocation[T](): Store.Location[T] = { - val (loc, store1) = store.newLocation[T]() - setStore(store1) - loc - } - - def updateStore[T](loc: Store.Location[T], value: T): this.type = - setStore(store.updated(loc, value)) - - def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid)) - def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end)) - - def setSetting[T](setting: Setting[T], value: T): this.type = - setSettings(setting.updateIn(settingsState, value)) - - def setDebug: this.type = setSetting(base.settings.Ydebug, true) - } - - object FreshContext: - /** Defines an initial context with given context base and possible settings. */ - def initial(base: ContextBase, settingsGroup: SettingGroup): Context = - val c = new FreshContext(base) - c._outer = NoContext - c._period = InitialPeriod - c._mode = Mode.None - c._typerState = TyperState.initialState() - c._owner = NoSymbol - c._tree = untpd.EmptyTree - c._moreProperties = Map(MessageLimiter -> DefaultMessageLimiter()) - c._scope = EmptyScope - c._source = NoSource - c._store = initialStore - .updated(settingsStateLoc, settingsGroup.defaultState) - .updated(notNullInfosLoc, Nil) - .updated(compilationUnitLoc, NoCompilationUnit) - c._searchHistory = new SearchRoot - c._gadt = GadtConstraint.empty - c - end FreshContext - - given detachedCtx(using c: Context): DetachedContext = c.detach - - given ops: AnyRef with - extension (c: Context) - def addNotNullInfo(info: NotNullInfo): Context = - c.withNotNullInfos(c.notNullInfos.extendWith(info)) - - def addNotNullRefs(refs: Set[TermRef]): Context = - c.addNotNullInfo(NotNullInfo(refs, Set())) - - def withNotNullInfos(infos: List[NotNullInfo]): Context = - if c.notNullInfos eq infos then c else c.fresh.setNotNullInfos(infos) - - def relaxedOverrideContext: Context = - c.withModeBits(c.mode &~ Mode.SafeNulls | Mode.RelaxedOverriding) - end ops - - // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens - extension (c: Context) { - final def withModeBits(mode: Mode): Context = - if (mode != c.mode) c.fresh.setMode(mode) else c - - final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) - final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) - } - - extension (c: FreshContext) { - final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) - final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) - } - - private def exploreCtx(using Context): FreshContext = - util.Stats.record("explore") - val base = ctx.base - import base._ - val nestedCtx = - if exploresInUse < exploreContexts.size then - exploreContexts(exploresInUse).reuseIn(ctx) - else - val ts = TyperState() - .setReporter(ExploringReporter()) - .setCommittable(false) - val c = FreshContext(ctx.base).init(ctx, ctx).setTyperState(ts) - exploreContexts += c - c - exploresInUse += 1 - val nestedTS = nestedCtx.typerState - nestedTS.init(ctx.typerState, ctx.typerState.constraint) - nestedCtx - - private def wrapUpExplore(ectx: Context) = - ectx.reporter.asInstanceOf[ExploringReporter].reset() - ectx.base.exploresInUse -= 1 - - inline def explore[T](inline op: Context ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - inline def exploreInFreshCtx[T](inline op: FreshContext ?=> T)(using Context): T = - val ectx = exploreCtx - try op(using ectx) finally wrapUpExplore(ectx) - - private def changeOwnerCtx(owner: Symbol)(using Context): Context = - val base = ctx.base - import base._ - val nestedCtx = - if changeOwnersInUse < changeOwnerContexts.size then - changeOwnerContexts(changeOwnersInUse).reuseIn(ctx) - else - val c = FreshContext(ctx.base).init(ctx, ctx) - changeOwnerContexts += c - c - changeOwnersInUse += 1 - nestedCtx.setOwner(owner).setTyperState(ctx.typerState) - - /** Run `op` in current context, with a mode is temporarily set as specified. - */ - inline def runWithOwner[T](owner: Symbol)(inline op: Context ?=> T)(using Context): T = - if Config.reuseOwnerContexts then - try op(using changeOwnerCtx(owner)) - finally ctx.base.changeOwnersInUse -= 1 - else - op(using ctx.fresh.setOwner(owner)) - - /** The type comparer of the kind created by `maker` to be used. - * This is the currently active type comparer CMP if - * - CMP is associated with the current context, and - * - CMP is of the kind created by maker or maker creates a plain type comparer. - * Note: plain TypeComparers always take on the kind of the outer comparer if they are in the same context. - * In other words: tracking or explaining is a sticky property in the same context. - */ - private def comparer(using Context): TypeComparer = - util.Stats.record("comparing") - val base = ctx.base - if base.comparersInUse > 0 - && (base.comparers(base.comparersInUse - 1).comparerContext eq ctx) - then - base.comparers(base.comparersInUse - 1).currentInstance - else - val result = - if base.comparersInUse < base.comparers.size then - base.comparers(base.comparersInUse) - else - val result = TypeComparer(ctx) - base.comparers += result - result - base.comparersInUse += 1 - result.init(ctx) - result - - inline def comparing[T](inline op: TypeComparer => T)(using Context): T = - util.Stats.record("comparing") - val saved = ctx.base.comparersInUse - try op(comparer) - finally ctx.base.comparersInUse = saved - end comparing - - @sharable val NoContext: DetachedContext = detached( - new FreshContext((null: ContextBase | Null).uncheckedNN) { - override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(detached(this: @unchecked)) - setSource(NoSource) - } - ) - - /** A context base defines state and associated methods that exist once per - * compiler run. - */ - class ContextBase extends ContextState - with Phases.PhasesBase - with Plugins { - - /** The applicable settings */ - val settings: ScalaSettings = new ScalaSettings - - /** The initial context */ - val initialCtx: Context = FreshContext.initial(this: @unchecked, settings) - - /** The platform, initialized by `initPlatform()`. */ - private var _platform: Platform | Null = uninitialized - - /** The platform */ - def platform: Platform = { - val p = _platform - if p == null then - throw new IllegalStateException( - "initialize() must be called before accessing platform") - p - } - - protected def newPlatform(using Context): Platform = - if (settings.scalajs.value) new SJSPlatform - else new JavaPlatform - - /** The loader that loads the members of _root_ */ - def rootLoader(root: TermSymbol)(using Context): SymbolLoader = platform.rootLoader(root) - - /** The standard definitions */ - val definitions: Definitions = new Definitions - - // Set up some phases to get started */ - usePhases(List(SomePhase)) - - /** Initializes the `ContextBase` with a starting context. - * This initializes the `platform` and the `definitions`. - */ - def initialize()(using Context): Unit = { - _platform = newPlatform - definitions.init() - } - - def fusedContaining(p: Phase): Phase = - allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) - } - - /** The essential mutable state of a context base, collected into a common class */ - class ContextState { - // Symbols state - - /** Counter for unique symbol ids */ - private var _nextSymId: Int = 0 - def nextSymId: Int = { _nextSymId += 1; _nextSymId } - - /** Sources and Files that were loaded */ - val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() - val files: util.HashMap[TermName, AbstractFile] = util.HashMap() - - // Types state - /** A table for hash consing unique types */ - private[core] val uniques: Uniques = Uniques() - - /** A table for hash consing unique applied types */ - private[dotc] val uniqueAppliedTypes: AppliedUniques = AppliedUniques() - - /** A table for hash consing unique named types */ - private[core] val uniqueNamedTypes: NamedTypeUniques = NamedTypeUniques() - - var emptyTypeBounds: TypeBounds | Null = null - var emptyWildcardBounds: WildcardType | Null = null - - /** Number of findMember calls on stack */ - private[core] var findMemberCount: Int = 0 - - /** List of names which have a findMemberCall on stack, - * after Config.LogPendingFindMemberThreshold is reached. - */ - private[core] var pendingMemberSearches: List[Name] = Nil - - /** The number of recursive invocation of underlying on a NamedType - * during a controlled operation. - */ - private[core] var underlyingRecursions: Int = 0 - - /** The set of named types on which a currently active invocation - * of underlying during a controlled operation exists. */ - private[core] val pendingUnderlying: util.HashSet[Type] = util.HashSet[Type]() - - /** A map from ErrorType to associated message. We use this map - * instead of storing messages directly in ErrorTypes in order - * to avoid space leaks - the message usually captures a context. - */ - private[core] val errorTypeMsg: mutable.Map[Types.ErrorType, Message] = mutable.Map() - - // Phases state - - private[core] var phasesPlan: List[List[Phase]] = uninitialized - - /** Phases by id */ - private[dotc] var phases: Array[Phase] = uninitialized - - /** Phases with consecutive Transforms grouped into a single phase, Empty array if fusion is disabled */ - private[core] var fusedPhases: Array[Phase] = Array.empty[Phase] - - /** Next denotation transformer id */ - private[core] var nextDenotTransformerId: Array[Int] = uninitialized - - private[core] var denotTransformers: Array[DenotTransformer] = uninitialized - - /** Flag to suppress inlining, set after overflow */ - private[dotc] var stopInlining: Boolean = false - - /** A variable that records that some error was reported in a globally committable context. - * The error will not necessarlily be emitted, since it could still be that - * the enclosing context will be aborted. The variable is used as a smoke test - * to turn off assertions that might be wrong if the program is erroneous. To - * just test for `ctx.reporter.errorsReported` is not always enough, since it - * could be that the context in which the assertion is tested is a completer context - * that's different from the context where the error was reported. See i13218.scala - * for a test. - */ - private[dotc] var errorsToBeReported = false - - // Reporters state - private[dotc] var indent: Int = 0 - - protected[dotc] val indentTab: String = " " - - private[Contexts] val exploreContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var exploresInUse: Int = 0 - - private[Contexts] val changeOwnerContexts = new mutable.ArrayBuffer[FreshContext] - private[Contexts] var changeOwnersInUse: Int = 0 - - private[Contexts] val comparers = new mutable.ArrayBuffer[TypeComparer] - private[Contexts] var comparersInUse: Int = 0 - - private var charArray = new Array[Char](256) - - private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) - - private[dotc] var wConfCache: (List[String], WConf) = uninitialized - - def sharedCharArray(len: Int): Array[Char] = - while len > charArray.length do - charArray = new Array[Char](charArray.length * 2) - charArray - - def reset(): Unit = - uniques.clear() - uniqueAppliedTypes.clear() - uniqueNamedTypes.clear() - emptyTypeBounds = null - emptyWildcardBounds = null - errorsToBeReported = false - errorTypeMsg.clear() - sources.clear() - files.clear() - comparers.clear() // forces re-evaluation of top and bottom classes in TypeComparer - - // Test that access is single threaded - - /** The thread on which `checkSingleThreaded was invoked last */ - @sharable private var thread: Thread | Null = null - - /** Check that we are on the same thread as before */ - def checkSingleThreaded(): Unit = - if (thread == null) thread = Thread.currentThread() - else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala b/tests/pos-with-compiler-cc/dotc/core/Decorators.scala deleted file mode 100644 index f9844c6eaab6..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Decorators.scala +++ /dev/null @@ -1,322 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.annotation.tailrec -import scala.collection.mutable.ListBuffer -import scala.util.control.NonFatal - -import Contexts._, Names._, Phases._, Symbols._ -import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._ -import transform.MegaPhase -import reporting.{Message, NoExplanation} -import language.experimental.pureFunctions -import annotation.retains - -/** This object provides useful extension methods for types defined elsewhere */ -object Decorators { - - /** Extension methods for toType/TermName methods on PreNames. - */ - extension (pn: PreName) - def toTermName: TermName = pn match - case s: String => termName(s) - case n: Name => n.toTermName - def toTypeName: TypeName = pn match - case s: String => typeName(s) - case n: Name => n.toTypeName - - extension (s: String) - def splitWhere(f: Char => Boolean, doDropIndex: Boolean): Option[(String, String)] = - def splitAt(idx: Int, doDropIndex: Boolean): Option[(String, String)] = - if (idx == -1) None - else Some((s.take(idx), s.drop(if (doDropIndex) idx + 1 else idx))) - splitAt(s.indexWhere(f), doDropIndex) - - /** Create a term name from a string slice, using a common buffer. - * This avoids some allocation relative to `termName(s)` - */ - def sliceToTermName(start: Int, end: Int)(using Context): SimpleName = - val len = end - start - val chars = ctx.base.sharedCharArray(len) - s.getChars(start, end, chars, 0) - termName(chars, 0, len) - - def sliceToTypeName(start: Int, end: Int)(using Context): TypeName = - sliceToTermName(start, end).toTypeName - - def concat(name: Name)(using Context): SimpleName = name match - case name: SimpleName => - val len = s.length + name.length - var chars = ctx.base.sharedCharArray(len) - s.getChars(0, s.length, chars, 0) - if name.length != 0 then name.getChars(0, name.length, chars, s.length) - termName(chars, 0, len) - case name: TypeName => s.concat(name.toTermName) - case _ => termName(s.concat(name.toString).nn) - - def indented(width: Int): String = - val padding = " " * width - padding + s.replace("\n", "\n" + padding) - end extension - - /** Convert lazy string to message. To be with caution, since no message-defined - * formatting will be done on the string. - */ - extension (str: -> String) - def toMessage: Message = NoExplanation(str)(using NoContext) - - /** Implements a findSymbol method on iterators of Symbols that - * works like find but avoids Option, replacing None with NoSymbol. - */ - extension (it: Iterator[Symbol]) - final def findSymbol(p: Symbol => Boolean): Symbol = { - while (it.hasNext) { - val sym = it.next() - if (p(sym)) return sym - } - NoSymbol - } - - inline val MaxFilterRecursions = 10 - - /** Implements filterConserve, zipWithConserve methods - * on lists that avoid duplication of list nodes where feasible. - */ - extension [T](xs: List[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): List[U] = - xs.collect(pf.asInstanceOf) - - final def mapconserve[U](f: T => U): List[U] = { - @tailrec - def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = - if (pending.isEmpty) - if (mapped == null) unchanged - else mapped.prependToList(unchanged) - else { - val head0 = pending.head - val head1 = f(head0) - - if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) - loop(mapped, unchanged, pending.tail) - else { - val b = if (mapped == null) new ListBuffer[U] else mapped - var xc = unchanged - while (xc ne pending) { - b += xc.head - xc = xc.tail - } - b += head1 - val tail0 = pending.tail - loop(b, tail0.asInstanceOf[List[U]], tail0) - } - } - loop(null, xs.asInstanceOf[List[U]], xs) - } - - /** Like `xs filter p` but returns list `xs` itself - instead of a copy - - * if `p` is true for all elements. - */ - def filterConserve(p: T => Boolean): List[T] = - - def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = - if from eq until then buf else addAll(buf += from.head, from.tail, until) - - def loopWithBuffer(buf: ListBuffer[T], xs: List[T]): List[T] = xs match - case x :: xs1 => - if p(x) then buf += x - loopWithBuffer(buf, xs1) - case nil => buf.toList - - def loop(keep: List[T], explore: List[T], keepCount: Int, recCount: Int): List[T] = - explore match - case x :: rest => - if p(x) then - loop(keep, rest, keepCount + 1, recCount) - else if keepCount <= 3 && recCount <= MaxFilterRecursions then - val rest1 = loop(rest, rest, 0, recCount + 1) - keepCount match - case 0 => rest1 - case 1 => keep.head :: rest1 - case 2 => keep.head :: keep.tail.head :: rest1 - case 3 => val tl = keep.tail; keep.head :: tl.head :: tl.tail.head :: rest1 - else - loopWithBuffer(addAll(new ListBuffer[T], keep, explore), rest) - case nil => - keep - - loop(xs, xs, 0, 0) - end filterConserve - - /** Like `xs.lazyZip(ys).map(f)`, but returns list `xs` itself - * - instead of a copy - if function `f` maps all elements of - * `xs` to themselves. Also, it is required that `ys` is at least - * as long as `xs`. - */ - def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = - if (xs.isEmpty || ys.isEmpty) Nil - else { - val x1 = f(xs.head, ys.head) - val xs1 = xs.tail.zipWithConserve(ys.tail)(f) - if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) - then xs.asInstanceOf[List[V]] - else x1 :: xs1 - } - - /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself - * - instead of a copy - if function `f` maps all elements of - * `xs` to themselves. - */ - def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = - - @tailrec - def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = - if from eq until then buf else addAll(buf += from.head, from.tail, until) - - @tailrec - def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match - case Nil => buf.toList - case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) - - @tailrec - def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match - case Nil => keep.asInstanceOf[List[U]] - case t :: rest => - val u = f(t, idx) - if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then - loop(keep, rest, idx + 1) - else - val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] - loopWithBuffer(buf += u, rest, idx + 1) - - loop(xs, xs, 0) - end mapWithIndexConserve - - /** True if two lists have the same length. Since calling length on linear sequences - * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). - */ - final def hasSameLengthAs[U](ys: List[U]): Boolean = { - @tailrec def loop(xs: List[T], ys: List[U]): Boolean = - if (xs.isEmpty) ys.isEmpty - else ys.nonEmpty && loop(xs.tail, ys.tail) - loop(xs, ys) - } - - @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { - case x :: _ => - ys match { - case y :: _ => - x.asInstanceOf[AnyRef].eq(y) && - xs.tail.eqElements(ys.tail) - case _ => false - } - case nil => ys.isEmpty - } - - /** Union on lists seen as sets */ - def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) - - extension [T, U](xss: List[List[T]]) - def nestedMap(f: T => U): List[List[U]] = xss match - case xs :: xss1 => xs.map(f) :: xss1.nestedMap(f) - case nil => Nil - def nestedMapConserve(f: T => U): List[List[U]] = - xss.mapconserve(_.mapconserve(f)) - def nestedZipWithConserve(yss: List[List[U]])(f: (T, U) => T): List[List[T]] = - xss.zipWithConserve(yss)((xs, ys) => xs.zipWithConserve(ys)(f)) - def nestedExists(p: T => Boolean): Boolean = xss match - case xs :: xss1 => xs.exists(p) || xss1.nestedExists(p) - case nil => false - end extension - - extension [T](xs: Seq[T]) - final def collectCC[U](pf: PartialFunction[T, U] @retains(caps.cap)): Seq[U] = - xs.collect(pf.asInstanceOf) - - extension [A, B](f: PartialFunction[A, B] @retains(caps.cap)) - def orElseCC(g: PartialFunction[A, B] @retains(caps.cap)): PartialFunction[A, B] @retains(f, g) = - f.orElse(g.asInstanceOf).asInstanceOf - - extension (text: Text) - def show(using Context): String = text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) - - /** Test whether a list of strings representing phases contains - * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the - * exact meaning of "contains" here. - */ - extension (names: List[String]) - def containsPhase(phase: Phase): Boolean = - names.nonEmpty && { - phase match { - case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) - case _ => - names exists { name => - name == "all" || { - val strippedName = name.stripSuffix("+") - val logNextPhase = name != strippedName - phase.phaseName.startsWith(strippedName) || - (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) - } - } - } - } - - extension [T](x: T) - def showing[U]( - op: WrappedResult[U] ?=> String, - printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { - // either the use of `$result` was driven by the expected type of `Shown` - // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) - // or no such conversion was found so we'll consume the result as it is instead - val obj = if c == null then x.asInstanceOf[U] else c(x) - printer.println(op(using WrappedResult(obj))) - x - } - - /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ - def tryToShow(using Context): String = x match - case x: Showable => - try x.show - catch - case ex: CyclicReference => "... (caught cyclic reference) ..." - case NonFatal(ex) - if !ctx.mode.is(Mode.PrintShowExceptions) && !ctx.settings.YshowPrintErrors.value => - val msg = ex match - case te: TypeError => te.toMessage.message - case _ => ex.getMessage - s"[cannot display due to $msg, raw string = $x]" - case _ => String.valueOf(x).nn - - /** Returns the simple class name of `x`. */ - def className: String = getClass.getSimpleName.nn - - extension [T](x: T) - def assertingErrorsReported(using Context): T = { - assert(ctx.reporter.errorsReported) - x - } - def assertingErrorsReported(msg: Message)(using Context): T = { - assert(ctx.reporter.errorsReported, msg) - x - } - - extension [T <: AnyRef](xs: ::[T]) - def derivedCons(x1: T, xs1: List[T]) = - if (xs.head eq x1) && (xs.tail eq xs1) then xs else x1 :: xs1 - - extension (sc: StringContext) - - /** General purpose string formatting */ - def i(args: Shown*)(using Context): String = - new StringFormatter(sc).assemble(args) - - /** Interpolator yielding an error message, which undergoes - * the formatting defined in Message. - */ - def em(args: Shown*)(using Context): NoExplanation = - NoExplanation(i(args*)) - - extension [T <: AnyRef](arr: Array[T]) - def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) - -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala b/tests/pos-with-compiler-cc/dotc/core/Definitions.scala deleted file mode 100644 index 603088dd8f26..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Definitions.scala +++ /dev/null @@ -1,2434 +0,0 @@ -package dotty.tools -package dotc -package core - -import scala.annotation.{threadUnsafe => tu} -import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._ -import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._ -import unpickleScala2.Scala2Unpickler.ensureConstructor -import scala.collection.mutable -import collection.mutable -import Denotations.{SingleDenotation, staticRef} -import util.{SimpleIdentityMap, SourceFile, NoSource} -import typer.ImportInfo.RootRef -import Comments.CommentsContext -import Comments.Comment -import util.Spans.NoSpan -import config.Feature -import Symbols.requiredModuleRef -import cc.{CapturingType, CaptureSet, EventuallyCapturingType} - -import scala.annotation.tailrec -import language.experimental.pureFunctions - -object Definitions { - - /** The maximum number of elements in a tuple or product. - * This should be removed once we go to hlists. - */ - val MaxTupleArity: Int = 22 - - /** The maximum arity N of a function type that's implemented - * as a trait `scala.FunctionN`. Functions of higher arity are possible, - * but are mapped in erasure to functions taking a single parameter of type - * Object[]. - * The limit 22 is chosen for Scala2x interop. It could be something - * else without affecting the set of programs that can be compiled. - */ - val MaxImplementedFunctionArity: Int = MaxTupleArity -} - -/** A class defining symbols and types of standard definitions - * - */ -class Definitions { - import Definitions._ - - private var initCtx: DetachedContext = _ - private given currentContext[Dummy_so_its_a_def]: DetachedContext = initCtx - - private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) = - newSymbol(owner, name, flags | Permanent, info) - - private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = - newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) - - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = - enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) - - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = - newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = - scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = - enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) - - private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = - enterTypeParam(cls, suffix.toTypeName, paramFlags, scope) - - // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only - // implemented in Dotty and not in Scala 2. - // See . - private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: -> Seq[Type]): ClassSymbol = { - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { - val cls = denot.asClass.classSymbol - val paramDecls = newScope - val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) - def instantiate(tpe: Type) = - if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef) - else tpe - val parents = parentConstrs.toList map instantiate - denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) - } - } - newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered - } - - /** The trait FunctionN, ContextFunctionN, ErasedFunctionN or ErasedContextFunction, for some N - * @param name The name of the trait to be created - * - * FunctionN traits follow this template: - * - * trait FunctionN[-T0,...-T{N-1}, +R] extends Object { - * def apply($x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * That is, they follow the template given for Function2..Function22 in the - * standard library, but without `tupled` and `curried` methods and without - * a `toString`. - * - * ContextFunctionN traits follow this template: - * - * trait ContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN traits follow this template: - * - * trait ErasedFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedContextFunctionN traits follow this template: - * - * trait ErasedContextFunctionN[-T0,...,-T{N-1}, +R] extends Object { - * def apply(using erased $x0: T0, ..., $x{N_1}: T{N-1}): R - * } - * - * ErasedFunctionN and ErasedContextFunctionN erase to Function0. - * - * ImpureXYZFunctionN follow this template: - * - * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {*} XYZFunctionN[T0,...,T{N-1}, R] - */ - private def newFunctionNType(name: TypeName): Symbol = { - val impure = name.startsWith("Impure") - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { - val arity = name.functionArity - if impure then - val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) - val argVariances = List.fill(arity)(Contravariant) - val underlyingName = name.asSimpleName.drop(6) - val underlyingClass = ScalaPackageVal.requiredClass(underlyingName) - denot.info = TypeAlias( - HKTypeLambda(argParamNames :+ "R".toTypeName, argVariances :+ Covariant)( - tl => List.fill(arity + 1)(TypeBounds.empty), - tl => CapturingType(underlyingClass.typeRef.appliedTo(tl.paramRefs), - CaptureSet.universal) - )) - else - val cls = denot.asClass.classSymbol - val decls = newScope - val paramNamePrefix = tpnme.scala ++ str.NAME_JOIN ++ name ++ str.EXPAND_SEPARATOR - val argParamRefs = List.tabulate(arity) { i => - enterTypeParam(cls, paramNamePrefix ++ "T" ++ (i + 1).toString, Contravariant, decls).typeRef - } - val resParamRef = enterTypeParam(cls, paramNamePrefix ++ "R", Covariant, decls).typeRef - val methodType = MethodType.companion( - isContextual = name.isContextFunction, - isImplicit = false, - isErased = name.isErasedFunction) - decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) - denot.info = - ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) - } - } - if impure then - newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) - else - newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) - } - - private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = - newPermanentSymbol(cls, name, flags | Method, info).asTerm - - private def enterMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = - newMethod(cls, name, info, flags).entered - - private def enterPermanentSymbol(name: Name, info: Type, flags: FlagSet = EmptyFlags): Symbol = - val sym = newPermanentSymbol(ScalaPackageClass, name, flags, info) - ScalaPackageClass.currentPackageDecls.enter(sym) - sym - - private def enterAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = - enterPermanentSymbol(name, TypeAlias(tpe), flags).asType - - private def enterBinaryAlias(name: TypeName, op: (Type, Type) => Type): TypeSymbol = - enterAliasType(name, - HKTypeLambda(TypeBounds.empty :: TypeBounds.empty :: Nil)( - tl => op(tl.paramRefs(0), tl.paramRefs(1)))) - - private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType -> Type, - flags: FlagSet = EmptyFlags, - bounds: TypeBounds = TypeBounds.empty, - useCompleter: Boolean = false) = { - val tparamNames = PolyType.syntheticParamNames(typeParamCount) - val tparamInfos = tparamNames map (_ => bounds) - def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) - val info = - if (useCompleter) - new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = - denot.info = ptype - } - else ptype - enterMethod(cls, name, info, flags) - } - - private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType -> Type, flags: FlagSet) = - enterPolyMethod(cls, name, 1, resultTypeFn, flags) - - private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { - val arr = new Array[TypeRef | Null](arity + 1) - for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) - arr - } - - private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { - if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) - if (cls.linkedClass.exists) cls.linkedClass.markAbsent() - cls - } - - @tu lazy val RootClass: ClassSymbol = newPackageSymbol( - NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass - @tu lazy val RootPackage: TermSymbol = newSymbol( - NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass)) - - @tu lazy val EmptyPackageVal: TermSymbol = newPackageSymbol( - RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.base.rootLoader(emptypkg)).entered - @tu lazy val EmptyPackageClass: ClassSymbol = EmptyPackageVal.moduleClass.asClass - - /** A package in which we can place all methods and types that are interpreted specially by the compiler */ - @tu lazy val OpsPackageVal: TermSymbol = newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered - @tu lazy val OpsPackageClass: ClassSymbol = OpsPackageVal.moduleClass.asClass - - @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) - @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") - @tu lazy val ScalaPackageClass: ClassSymbol = { - val cls = ScalaPackageVal.moduleClass.asClass - cls.info.decls.openForMutations.useSynthesizer( - name => - if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) - else NoSymbol) - cls - } - @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") - @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") - @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass - @tu lazy val JavaPackageVal: TermSymbol = requiredPackage(nme.java) - @tu lazy val JavaPackageClass: ClassSymbol = JavaPackageVal.moduleClass.asClass - @tu lazy val JavaLangPackageVal: TermSymbol = requiredPackage(jnme.JavaLang) - @tu lazy val JavaLangPackageClass: ClassSymbol = JavaLangPackageVal.moduleClass.asClass - - // fundamental modules - @tu lazy val SysPackage : Symbol = requiredModule("scala.sys.package") - @tu lazy val Sys_error: Symbol = SysPackage.moduleClass.requiredMethod(nme.error) - - @tu lazy val ScalaXmlPackageClass: Symbol = getPackageClassIfDefined("scala.xml") - - @tu lazy val CompiletimePackageClass: Symbol = requiredPackage("scala.compiletime").moduleClass - @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") - @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") - @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") - @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) - @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") - @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") - @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") - @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") - @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") - @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") - @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") - @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") - @tu lazy val CompiletimeTesting_ErrorClass: ClassSymbol = requiredClass("scala.compiletime.testing.Error") - @tu lazy val CompiletimeTesting_Error: Symbol = requiredModule("scala.compiletime.testing.Error") - @tu lazy val CompiletimeTesting_Error_apply = CompiletimeTesting_Error.requiredMethod(nme.apply) - @tu lazy val CompiletimeTesting_ErrorKind: Symbol = requiredModule("scala.compiletime.testing.ErrorKind") - @tu lazy val CompiletimeTesting_ErrorKind_Parser: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Parser") - @tu lazy val CompiletimeTesting_ErrorKind_Typer: Symbol = CompiletimeTesting_ErrorKind.requiredMethod("Typer") - @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") - @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass - @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass - @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass - @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass - @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass - @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass - @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass - - /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter) - * because after erasure the Any and AnyVal references get remapped to the Object methods - * which would result in a double binding assertion failure. - * Instead we do the following: - * - * - Have some methods exist only in Any, and remap them with the Erasure denotation - * transformer to be owned by Object. - * - Have other methods exist only in Object. - * To achieve this, we synthesize all Any and Object methods; Object methods no longer get - * loaded from a classfile. - */ - @tu lazy val AnyClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil), ensureCtor = false) - def AnyType: TypeRef = AnyClass.typeRef - @tu lazy val MatchableClass: ClassSymbol = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.Matchable, Trait, AnyType :: Nil), ensureCtor = false) - def MatchableType: TypeRef = MatchableClass.typeRef - @tu lazy val AnyValClass: ClassSymbol = - val res = completeClass(enterCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyType, MatchableType))) - // Mark companion as absent, so that class does not get re-completed - val companion = ScalaPackageVal.info.decl(nme.AnyVal).symbol - companion.moduleClass.markAbsent() - companion.markAbsent() - res - - def AnyValType: TypeRef = AnyValClass.typeRef - - @tu lazy val Any_== : TermSymbol = enterMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final) - @tu lazy val Any_!= : TermSymbol = enterMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final) - @tu lazy val Any_equals: TermSymbol = enterMethod(AnyClass, nme.equals_, methOfAny(BooleanType)) - @tu lazy val Any_hashCode: TermSymbol = enterMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType)) - @tu lazy val Any_toString: TermSymbol = enterMethod(AnyClass, nme.toString_, MethodType(Nil, StringType)) - @tu lazy val Any_## : TermSymbol = enterMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final) - @tu lazy val Any_isInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final) - @tu lazy val Any_asInstanceOf: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, _.paramRefs(0), Final) - @tu lazy val Any_typeTest: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.isInstanceOfPM, _ => BooleanType, Final | SyntheticArtifact) - @tu lazy val Any_typeCast: TermSymbol = enterT1ParameterlessMethod(AnyClass, nme.asInstanceOfPM, _.paramRefs(0), Final | SyntheticArtifact | StableRealizable) - // generated by pattern matcher and explicit nulls, eliminated by erasure - - /** def getClass[A >: this.type](): Class[? <: A] */ - @tu lazy val Any_getClass: TermSymbol = - enterPolyMethod( - AnyClass, nme.getClass_, 1, - pt => MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.upper(pt.paramRefs(0)))), - Final, - bounds = TypeBounds.lower(AnyClass.thisType)) - - def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, - Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) - - @tu lazy val ObjectClass: ClassSymbol = { - val cls = requiredClass("java.lang.Object") - assert(!cls.isCompleted, "race for completing java.lang.Object") - cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) - cls.setFlag(NoInits | JavaDefined) - - ensureConstructor(cls, cls.denot.asClass, EmptyScope) - val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm - NamerOps.makeConstructorCompanion(companion, cls) - cls - } - def ObjectType: TypeRef = ObjectClass.typeRef - - /** A type alias of Object used to represent any reference to Object in a Java - * signature, the secret sauce is that subtype checking treats it specially: - * - * tp <:< FromJavaObject - * - * is equivalent to: - * - * tp <:< Any - * - * This is useful to avoid usability problems when interacting with Java - * code where Object is the top type. This is safe because this type will - * only appear in signatures of Java definitions in positions where `Object` - * might appear, let's enumerate all possible cases this gives us: - * - * 1. At the top level: - * - * // A.java - * void meth1(Object arg) {} - * void meth2(T arg) {} // T implicitly extends Object - * - * // B.scala - * meth1(1) // OK - * meth2(1) // OK - * - * This is safe even though Int is not a subtype of Object, because Erasure - * will detect the mismatch and box the value type. - * - * 2. In a class type parameter: - * - * // A.java - * void meth3(scala.List arg) {} - * void meth4(scala.List arg) {} - * - * // B.scala - * meth3(List[Int](1)) // OK - * meth4(List[Int](1)) // OK - * - * At erasure, type parameters are removed and value types are boxed. - * - * 3. As the type parameter of an array: - * - * // A.java - * void meth5(Object[] arg) {} - * void meth6(T[] arg) {} - * - * // B.scala - * meth5(Array[Int](1)) // error: Array[Int] is not a subtype of Array[Object] - * meth6(Array[Int](1)) // error: Array[Int] is not a subtype of Array[T & Object] - * - * - * This is a bit more subtle: at erasure, Arrays keep their type parameter, - * and primitive Arrays are not subtypes of reference Arrays on the JVM, - * so we can't pass an Array of Int where a reference Array is expected. - * Array is invariant in Scala, so `meth5` is safe even if we use `FromJavaObject`, - * but generic Arrays are treated specially: we always add `& Object` (and here - * we mean the normal java.lang.Object type) to these types when they come from - * Java signatures (see `translateJavaArrayElementType`), this ensure that `meth6` - * is safe to use. - * - * 4. As the repeated argument of a varargs method: - * - * // A.java - * void meth7(Object... args) {} - * void meth8(T... args) {} - * - * // B.scala - * meth7(1) // OK (creates a reference array) - * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) - * val ai = Array[Int](1) - * meth7(ai: _*) // OK (will copy the array at Erasure) - * meth8(ai: _*) // OK (will copy the array at Erasure) - * - * Java repeated arguments are erased to arrays, so it would be safe to treat - * them in the same way: add an `& Object` to the parameter type to disallow - * passing primitives, but that would be very inconvenient as it is common to - * want to pass a primitive to an Object repeated argument (e.g. - * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the - * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation - * (note that adapting a primitive array to a reference array requires - * copying the whole array, so this transformation only preserves semantics - * if the callee does not try to mutate the varargs array which is a reasonable - * assumption to make). - * - * - * This mechanism is similar to `ObjectTpeJavaRef` in Scala 2, except that we - * create a new symbol with its own name, this is needed because this type - * can show up in inferred types and therefore needs to be preserved when - * pickling so that unpickled trees pass `-Ycheck`. - * - * Note that by default we pretty-print `FromJavaObject` as `Object` or simply omit it - * if it's the sole upper-bound of a type parameter, use `-Yprint-debug` to explicitly - * display it. - */ - @tu lazy val FromJavaObjectSymbol: TypeSymbol = - newPermanentSymbol(OpsPackageClass, tpnme.FromJavaObject, JavaDefined, TypeAlias(ObjectType)).entered - def FromJavaObjectType: TypeRef = FromJavaObjectSymbol.typeRef - - @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) - def AnyRefType: TypeRef = AnyRefAlias.typeRef - - @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) - @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) - @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, - pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) - @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) - @tu lazy val Object_finalize: TermSymbol = enterMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected) - @tu lazy val Object_notify: TermSymbol = enterMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_notifyAll: TermSymbol = enterMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_wait: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType), Final) - @tu lazy val Object_waitL: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType), Final) - @tu lazy val Object_waitLI: TermSymbol = enterMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType), Final) - - def ObjectMethods: List[TermSymbol] = List(Object_eq, Object_ne, Object_synchronized, Object_clone, - Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI) - - /** Methods in Object and Any that do not have a side effect */ - @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, - Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) - - @tu lazy val AnyKindClass: ClassSymbol = { - val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) - if (!ctx.settings.YnoKindPolymorphism.value) - // Enable kind-polymorphism by exposing scala.AnyKind - cls.entered - cls - } - def AnyKindType: TypeRef = AnyKindClass.typeRef - - @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) - @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) - - /** Method representing a throw */ - @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, - MethodType(List(ThrowableType), NothingType)) - - @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) - def NothingType: TypeRef = NothingClass.typeRef - @tu lazy val NullClass: ClassSymbol = { - // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable - val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil - enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) - } - def NullType: TypeRef = NullClass.typeRef - - @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") - @tu lazy val InvokedMethodRef = InvokerModule.requiredMethodRef("invoked") - - @tu lazy val ImplicitScrutineeTypeSym = - newPermanentSymbol(ScalaPackageClass, tpnme.IMPLICITkw, EmptyFlags, TypeBounds.empty).entered - def ImplicitScrutineeTypeRef: TypeRef = ImplicitScrutineeTypeSym.typeRef - - @tu lazy val ScalaPredefModule: Symbol = requiredModule("scala.Predef") - @tu lazy val Predef_conforms : Symbol = ScalaPredefModule.requiredMethod(nme.conforms_) - @tu lazy val Predef_classOf : Symbol = ScalaPredefModule.requiredMethod(nme.classOf) - @tu lazy val Predef_identity : Symbol = ScalaPredefModule.requiredMethod(nme.identity) - @tu lazy val Predef_undefined: Symbol = ScalaPredefModule.requiredMethod(nme.???) - @tu lazy val ScalaPredefModuleClass: ClassSymbol = ScalaPredefModule.moduleClass.asClass - - @tu lazy val SubTypeClass: ClassSymbol = requiredClass("scala.<:<") - @tu lazy val SubType_refl: Symbol = SubTypeClass.companionModule.requiredMethod(nme.refl) - - @tu lazy val DummyImplicitClass: ClassSymbol = requiredClass("scala.DummyImplicit") - - @tu lazy val ScalaRuntimeModule: Symbol = requiredModule("scala.runtime.ScalaRunTime") - def runtimeMethodRef(name: PreName): TermRef = ScalaRuntimeModule.requiredMethodRef(name) - def ScalaRuntime_drop: Symbol = runtimeMethodRef(nme.drop).symbol - @tu lazy val ScalaRuntime__hashCode: Symbol = ScalaRuntimeModule.requiredMethod(nme._hashCode_) - @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) - @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) - - @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") - @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol - @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") - def staticsMethodRef(name: PreName): TermRef = ScalaStaticsModule.requiredMethodRef(name) - def staticsMethod(name: PreName): TermSymbol = ScalaStaticsModule.requiredMethod(name) - - @tu lazy val DottyArraysModule: Symbol = requiredModule("scala.runtime.Arrays") - def newGenericArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newGenericArray") - def newArrayMethod(using Context): TermSymbol = DottyArraysModule.requiredMethod("newArray") - - def getWrapVarargsArrayModule: Symbol = ScalaRuntimeModule - - // The set of all wrap{X, Ref}Array methods, where X is a value type - val WrapArrayMethods: PerRun[collection.Set[Symbol]] = new PerRun({ - val methodNames = ScalaValueTypes.map(ast.tpd.wrapArrayMethodName) `union` Set(nme.wrapRefArray) - methodNames.map(getWrapVarargsArrayModule.requiredMethod(_)) - }) - - @tu lazy val ListClass: Symbol = requiredClass("scala.collection.immutable.List") - @tu lazy val ListModule: Symbol = requiredModule("scala.collection.immutable.List") - @tu lazy val NilModule: Symbol = requiredModule("scala.collection.immutable.Nil") - @tu lazy val ConsClass: Symbol = requiredClass("scala.collection.immutable.::") - @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") - - @tu lazy val SingletonClass: ClassSymbol = - // needed as a synthetic class because Scala 2.x refers to it in classfiles - // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) - @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef - - @tu lazy val CollectionSeqType: TypeRef = requiredClassRef("scala.collection.Seq") - @tu lazy val SeqType: TypeRef = requiredClassRef("scala.collection.immutable.Seq") - def SeqClass(using Context): ClassSymbol = SeqType.symbol.asClass - @tu lazy val Seq_apply : Symbol = SeqClass.requiredMethod(nme.apply) - @tu lazy val Seq_head : Symbol = SeqClass.requiredMethod(nme.head) - @tu lazy val Seq_drop : Symbol = SeqClass.requiredMethod(nme.drop) - @tu lazy val Seq_lengthCompare: Symbol = SeqClass.requiredMethod(nme.lengthCompare, List(IntType)) - @tu lazy val Seq_length : Symbol = SeqClass.requiredMethod(nme.length) - @tu lazy val Seq_toSeq : Symbol = SeqClass.requiredMethod(nme.toSeq) - @tu lazy val SeqModule: Symbol = requiredModule("scala.collection.immutable.Seq") - - - @tu lazy val StringOps: Symbol = requiredClass("scala.collection.StringOps") - @tu lazy val StringOps_format: Symbol = StringOps.requiredMethod(nme.format) - - @tu lazy val ArrayType: TypeRef = requiredClassRef("scala.Array") - def ArrayClass(using Context): ClassSymbol = ArrayType.symbol.asClass - @tu lazy val Array_apply : Symbol = ArrayClass.requiredMethod(nme.apply) - @tu lazy val Array_update : Symbol = ArrayClass.requiredMethod(nme.update) - @tu lazy val Array_length : Symbol = ArrayClass.requiredMethod(nme.length) - @tu lazy val Array_clone : Symbol = ArrayClass.requiredMethod(nme.clone_) - @tu lazy val ArrayConstructor: Symbol = ArrayClass.requiredMethod(nme.CONSTRUCTOR) - - @tu lazy val ArrayModule: Symbol = requiredModule("scala.Array") - def ArrayModuleClass: Symbol = ArrayModule.moduleClass - - @tu lazy val IArrayModule: Symbol = requiredModule("scala.IArray") - def IArrayModuleClass: Symbol = IArrayModule.moduleClass - - @tu lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", java.lang.Void.TYPE, UnitEnc, nme.specializedTypeNames.Void) - def UnitClass(using Context): ClassSymbol = UnitType.symbol.asClass - def UnitModuleClass(using Context): Symbol = UnitType.symbol.asClass.linkedClass - @tu lazy val BooleanType: TypeRef = valueTypeRef("scala.Boolean", java.lang.Boolean.TYPE, BooleanEnc, nme.specializedTypeNames.Boolean) - def BooleanClass(using Context): ClassSymbol = BooleanType.symbol.asClass - @tu lazy val Boolean_! : Symbol = BooleanClass.requiredMethod(nme.UNARY_!) - @tu lazy val Boolean_&& : Symbol = BooleanClass.requiredMethod(nme.ZAND) // ### harmonize required... calls - @tu lazy val Boolean_|| : Symbol = BooleanClass.requiredMethod(nme.ZOR) - @tu lazy val Boolean_== : Symbol = - BooleanClass.info.member(nme.EQ).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isRef(BooleanClass) - case _ => false - }).symbol - @tu lazy val Boolean_!= : Symbol = - BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isRef(BooleanClass) - case _ => false - }).symbol - - @tu lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", java.lang.Byte.TYPE, ByteEnc, nme.specializedTypeNames.Byte) - def ByteClass(using Context): ClassSymbol = ByteType.symbol.asClass - @tu lazy val ShortType: TypeRef = valueTypeRef("scala.Short", java.lang.Short.TYPE, ShortEnc, nme.specializedTypeNames.Short) - def ShortClass(using Context): ClassSymbol = ShortType.symbol.asClass - @tu lazy val CharType: TypeRef = valueTypeRef("scala.Char", java.lang.Character.TYPE, CharEnc, nme.specializedTypeNames.Char) - def CharClass(using Context): ClassSymbol = CharType.symbol.asClass - @tu lazy val IntType: TypeRef = valueTypeRef("scala.Int", java.lang.Integer.TYPE, IntEnc, nme.specializedTypeNames.Int) - def IntClass(using Context): ClassSymbol = IntType.symbol.asClass - @tu lazy val Int_- : Symbol = IntClass.requiredMethod(nme.MINUS, List(IntType)) - @tu lazy val Int_+ : Symbol = IntClass.requiredMethod(nme.PLUS, List(IntType)) - @tu lazy val Int_/ : Symbol = IntClass.requiredMethod(nme.DIV, List(IntType)) - @tu lazy val Int_* : Symbol = IntClass.requiredMethod(nme.MUL, List(IntType)) - @tu lazy val Int_== : Symbol = IntClass.requiredMethod(nme.EQ, List(IntType)) - @tu lazy val Int_>= : Symbol = IntClass.requiredMethod(nme.GE, List(IntType)) - @tu lazy val Int_<= : Symbol = IntClass.requiredMethod(nme.LE, List(IntType)) - @tu lazy val LongType: TypeRef = valueTypeRef("scala.Long", java.lang.Long.TYPE, LongEnc, nme.specializedTypeNames.Long) - def LongClass(using Context): ClassSymbol = LongType.symbol.asClass - @tu lazy val Long_+ : Symbol = LongClass.requiredMethod(nme.PLUS, List(LongType)) - @tu lazy val Long_* : Symbol = LongClass.requiredMethod(nme.MUL, List(LongType)) - @tu lazy val Long_/ : Symbol = LongClass.requiredMethod(nme.DIV, List(LongType)) - - @tu lazy val FloatType: TypeRef = valueTypeRef("scala.Float", java.lang.Float.TYPE, FloatEnc, nme.specializedTypeNames.Float) - def FloatClass(using Context): ClassSymbol = FloatType.symbol.asClass - @tu lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", java.lang.Double.TYPE, DoubleEnc, nme.specializedTypeNames.Double) - def DoubleClass(using Context): ClassSymbol = DoubleType.symbol.asClass - - @tu lazy val BoxedUnitClass: ClassSymbol = requiredClass("scala.runtime.BoxedUnit") - def BoxedUnit_UNIT(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("UNIT") - def BoxedUnit_TYPE(using Context): TermSymbol = BoxedUnitClass.linkedClass.requiredValue("TYPE") - - @tu lazy val BoxedBooleanClass: ClassSymbol = requiredClass("java.lang.Boolean") - @tu lazy val BoxedByteClass : ClassSymbol = requiredClass("java.lang.Byte") - @tu lazy val BoxedShortClass : ClassSymbol = requiredClass("java.lang.Short") - @tu lazy val BoxedCharClass : ClassSymbol = requiredClass("java.lang.Character") - @tu lazy val BoxedIntClass : ClassSymbol = requiredClass("java.lang.Integer") - @tu lazy val BoxedLongClass : ClassSymbol = requiredClass("java.lang.Long") - @tu lazy val BoxedFloatClass : ClassSymbol = requiredClass("java.lang.Float") - @tu lazy val BoxedDoubleClass : ClassSymbol = requiredClass("java.lang.Double") - - @tu lazy val BoxedBooleanModule: TermSymbol = requiredModule("java.lang.Boolean") - @tu lazy val BoxedByteModule : TermSymbol = requiredModule("java.lang.Byte") - @tu lazy val BoxedShortModule : TermSymbol = requiredModule("java.lang.Short") - @tu lazy val BoxedCharModule : TermSymbol = requiredModule("java.lang.Character") - @tu lazy val BoxedIntModule : TermSymbol = requiredModule("java.lang.Integer") - @tu lazy val BoxedLongModule : TermSymbol = requiredModule("java.lang.Long") - @tu lazy val BoxedFloatModule : TermSymbol = requiredModule("java.lang.Float") - @tu lazy val BoxedDoubleModule : TermSymbol = requiredModule("java.lang.Double") - @tu lazy val BoxedUnitModule : TermSymbol = requiredModule("java.lang.Void") - - @tu lazy val ByNameParamClass2x: ClassSymbol = enterSpecialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType)) - - @tu lazy val RepeatedParamClass: ClassSymbol = enterSpecialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType)) - - @tu lazy val IntoType: TypeSymbol = enterAliasType(tpnme.INTO, HKTypeLambda(TypeBounds.empty :: Nil)(_.paramRefs(0))) - - // fundamental classes - @tu lazy val StringClass: ClassSymbol = requiredClass("java.lang.String") - def StringType: Type = StringClass.typeRef - @tu lazy val StringModule: Symbol = StringClass.linkedClass - @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) - @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isAny || pt.stripNull.isAnyRef - case _ => false - }).symbol - - @tu lazy val JavaCloneableClass: ClassSymbol = requiredClass("java.lang.Cloneable") - @tu lazy val NullPointerExceptionClass: ClassSymbol = requiredClass("java.lang.NullPointerException") - @tu lazy val IndexOutOfBoundsException: ClassSymbol = requiredClass("java.lang.IndexOutOfBoundsException") - @tu lazy val ClassClass: ClassSymbol = requiredClass("java.lang.Class") - @tu lazy val BoxedNumberClass: ClassSymbol = requiredClass("java.lang.Number") - @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") - @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { - case List(pt) => - pt.stripNull.isRef(StringClass) - case _ => false - }).symbol.asTerm - @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") - @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { - case List(pt) => - pt.stripNull.isRef(StringClass) - case _ => false - }).symbol.asTerm - - @tu lazy val JavaSerializableClass: ClassSymbol = requiredClass("java.io.Serializable") - - @tu lazy val ComparableClass: ClassSymbol = requiredClass("java.lang.Comparable") - - @tu lazy val SystemClass: ClassSymbol = requiredClass("java.lang.System") - @tu lazy val SystemModule: Symbol = SystemClass.linkedClass - - @tu lazy val NoSuchElementExceptionClass = requiredClass("java.util.NoSuchElementException") - def NoSuchElementExceptionType = NoSuchElementExceptionClass.typeRef - @tu lazy val IllegalArgumentExceptionClass = requiredClass("java.lang.IllegalArgumentException") - def IllegalArgumentExceptionType = IllegalArgumentExceptionClass.typeRef - - // in scalac modified to have Any as parent - - @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") - def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass - @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") - @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") - - @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef - def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass - - @tu lazy val JavaBigIntegerClass: ClassSymbol = requiredClass("java.math.BigInteger") - @tu lazy val JavaBigDecimalClass: ClassSymbol = requiredClass("java.math.BigDecimal") - @tu lazy val JavaCalendarClass: ClassSymbol = requiredClass("java.util.Calendar") - @tu lazy val JavaDateClass: ClassSymbol = requiredClass("java.util.Date") - @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") - - @tu lazy val JavaEnumClass: ClassSymbol = { - val cls = requiredClass("java.lang.Enum") - // jl.Enum has a single constructor protected(name: String, ordinal: Int). - // We remove the arguments from the primary constructor, and enter - // a new constructor symbol with 2 arguments, so that both - // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` - // pass typer and go through jl.Enum-specific checks in RefChecks. - cls.infoOrCompleter match { - case completer: ClassfileLoader => - cls.info = new ClassfileLoader(completer.classfile) { - override def complete(root: SymDenotation)(using Context): Unit = { - super.complete(root) - val constr = cls.primaryConstructor - val noArgInfo = constr.info match { - case info: PolyType => - info.resType match { - case meth: MethodType => - info.derivedLambdaType( - resType = meth.derivedLambdaType( - paramNames = Nil, paramInfos = Nil)) - } - } - val argConstr = constr.copy().entered - constr.info = noArgInfo - constr.termRef.recomputeDenot() - } - } - cls - } - } - def JavaEnumType = JavaEnumClass.typeRef - - @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") - @tu lazy val MethodHandlesLookupClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandles.Lookup") - @tu lazy val VarHandleClass: ClassSymbol = requiredClass("java.lang.invoke.VarHandle") - - @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") - @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") - @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass - - @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") - @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) - - @tu lazy val StringContextClass: ClassSymbol = requiredClass("scala.StringContext") - @tu lazy val StringContext_s : Symbol = StringContextClass.requiredMethod(nme.s) - @tu lazy val StringContext_raw: Symbol = StringContextClass.requiredMethod(nme.raw_) - @tu lazy val StringContext_f : Symbol = StringContextClass.requiredMethod(nme.f) - @tu lazy val StringContext_parts: Symbol = StringContextClass.requiredMethod(nme.parts) - @tu lazy val StringContextModule: Symbol = StringContextClass.companionModule - @tu lazy val StringContextModule_apply: Symbol = StringContextModule.requiredMethod(nme.apply) - @tu lazy val StringContextModule_standardInterpolator: Symbol = StringContextModule.requiredMethod(nme.standardInterpolator) - @tu lazy val StringContextModule_processEscapes: Symbol = StringContextModule.requiredMethod(nme.processEscapes) - - @tu lazy val PartialFunctionClass: ClassSymbol = requiredClass("scala.PartialFunction") - @tu lazy val PartialFunction_isDefinedAt: Symbol = PartialFunctionClass.requiredMethod(nme.isDefinedAt) - @tu lazy val PartialFunction_applyOrElse: Symbol = PartialFunctionClass.requiredMethod(nme.applyOrElse) - - @tu lazy val AbstractPartialFunctionClass: ClassSymbol = requiredClass("scala.runtime.AbstractPartialFunction") - @tu lazy val FunctionXXLClass: ClassSymbol = requiredClass("scala.runtime.FunctionXXL") - @tu lazy val ScalaSymbolClass: ClassSymbol = requiredClass("scala.Symbol") - @tu lazy val DynamicClass: ClassSymbol = requiredClass("scala.Dynamic") - @tu lazy val OptionClass: ClassSymbol = requiredClass("scala.Option") - @tu lazy val SomeClass: ClassSymbol = requiredClass("scala.Some") - @tu lazy val NoneModule: Symbol = requiredModule("scala.None") - - @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") - @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) - - @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") - @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = - EnumValueSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty), IntType)) - - @tu lazy val ProductClass: ClassSymbol = requiredClass("scala.Product") - @tu lazy val Product_canEqual : Symbol = ProductClass.requiredMethod(nme.canEqual_) - @tu lazy val Product_productArity : Symbol = ProductClass.requiredMethod(nme.productArity) - @tu lazy val Product_productElement : Symbol = ProductClass.requiredMethod(nme.productElement) - @tu lazy val Product_productElementName: Symbol = ProductClass.requiredMethod(nme.productElementName) - @tu lazy val Product_productPrefix : Symbol = ProductClass.requiredMethod(nme.productPrefix) - - @tu lazy val IteratorClass: ClassSymbol = requiredClass("scala.collection.Iterator") - def IteratorModule(using Context): Symbol = IteratorClass.companionModule - - @tu lazy val ModuleSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.ModuleSerializationProxy") - @tu lazy val ModuleSerializationProxyConstructor: TermSymbol = - ModuleSerializationProxyClass.requiredMethod(nme.CONSTRUCTOR, List(ClassType(TypeBounds.empty))) - - @tu lazy val MirrorClass: ClassSymbol = requiredClass("scala.deriving.Mirror") - @tu lazy val Mirror_ProductClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Product") - @tu lazy val Mirror_Product_fromProduct: Symbol = Mirror_ProductClass.requiredMethod(nme.fromProduct) - @tu lazy val Mirror_SumClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Sum") - @tu lazy val Mirror_SingletonClass: ClassSymbol = requiredClass("scala.deriving.Mirror.Singleton") - @tu lazy val Mirror_SingletonProxyClass: ClassSymbol = requiredClass("scala.deriving.Mirror.SingletonProxy") - - @tu lazy val LanguageModule: Symbol = requiredModule("scala.language") - @tu lazy val LanguageModuleClass: Symbol = LanguageModule.moduleClass.asClass - @tu lazy val LanguageExperimentalModule: Symbol = requiredModule("scala.language.experimental") - @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") - @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") - @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") - @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") - - @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") - @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") - @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") - @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") - @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") - - @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass - @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") - @tu lazy val ClassTagModule: Symbol = ClassTagClass.companionModule - @tu lazy val ClassTagModule_apply: Symbol = ClassTagModule.requiredMethod(nme.apply) - - @tu lazy val TypeTestClass: ClassSymbol = requiredClass("scala.reflect.TypeTest") - @tu lazy val TypeTest_unapply: Symbol = TypeTestClass.requiredMethod(nme.unapply) - @tu lazy val TypeTestModule_identity: Symbol = TypeTestClass.companionModule.requiredMethod(nme.identity) - - @tu lazy val QuotedExprClass: ClassSymbol = requiredClass("scala.quoted.Expr") - - @tu lazy val QuotesClass: ClassSymbol = requiredClass("scala.quoted.Quotes") - @tu lazy val Quotes_reflect: Symbol = QuotesClass.requiredValue("reflect") - @tu lazy val Quotes_reflect_asTerm: Symbol = Quotes_reflect.requiredMethod("asTerm") - @tu lazy val Quotes_reflect_Apply: Symbol = Quotes_reflect.requiredValue("Apply") - @tu lazy val Quotes_reflect_Apply_apply: Symbol = Quotes_reflect_Apply.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_TypeApply: Symbol = Quotes_reflect.requiredValue("TypeApply") - @tu lazy val Quotes_reflect_TypeApply_apply: Symbol = Quotes_reflect_TypeApply.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Assign: Symbol = Quotes_reflect.requiredValue("Assign") - @tu lazy val Quotes_reflect_Assign_apply: Symbol = Quotes_reflect_Assign.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Inferred: Symbol = Quotes_reflect.requiredValue("Inferred") - @tu lazy val Quotes_reflect_Inferred_apply: Symbol = Quotes_reflect_Inferred.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_Literal: Symbol = Quotes_reflect.requiredValue("Literal") - @tu lazy val Quotes_reflect_Literal_apply: Symbol = Quotes_reflect_Literal.requiredMethod(nme.apply) - @tu lazy val Quotes_reflect_TreeMethods: Symbol = Quotes_reflect.requiredMethod("TreeMethods") - @tu lazy val Quotes_reflect_TreeMethods_asExpr: Symbol = Quotes_reflect_TreeMethods.requiredMethod("asExpr") - @tu lazy val Quotes_reflect_TypeRepr: Symbol = Quotes_reflect.requiredValue("TypeRepr") - @tu lazy val Quotes_reflect_TypeRepr_of: Symbol = Quotes_reflect_TypeRepr.requiredMethod("of") - @tu lazy val Quotes_reflect_TypeRepr_typeConstructorOf: Symbol = Quotes_reflect_TypeRepr.requiredMethod("typeConstructorOf") - @tu lazy val Quotes_reflect_TypeReprMethods: Symbol = Quotes_reflect.requiredValue("TypeReprMethods") - @tu lazy val Quotes_reflect_TypeReprMethods_asType: Symbol = Quotes_reflect_TypeReprMethods.requiredMethod("asType") - @tu lazy val Quotes_reflect_TypeTreeType: Symbol = Quotes_reflect.requiredType("TypeTree") - @tu lazy val Quotes_reflect_TermType: Symbol = Quotes_reflect.requiredType("Term") - @tu lazy val Quotes_reflect_BooleanConstant: Symbol = Quotes_reflect.requiredValue("BooleanConstant") - @tu lazy val Quotes_reflect_ByteConstant: Symbol = Quotes_reflect.requiredValue("ByteConstant") - @tu lazy val Quotes_reflect_ShortConstant: Symbol = Quotes_reflect.requiredValue("ShortConstant") - @tu lazy val Quotes_reflect_IntConstant: Symbol = Quotes_reflect.requiredValue("IntConstant") - @tu lazy val Quotes_reflect_LongConstant: Symbol = Quotes_reflect.requiredValue("LongConstant") - @tu lazy val Quotes_reflect_FloatConstant: Symbol = Quotes_reflect.requiredValue("FloatConstant") - @tu lazy val Quotes_reflect_DoubleConstant: Symbol = Quotes_reflect.requiredValue("DoubleConstant") - @tu lazy val Quotes_reflect_CharConstant: Symbol = Quotes_reflect.requiredValue("CharConstant") - @tu lazy val Quotes_reflect_StringConstant: Symbol = Quotes_reflect.requiredValue("StringConstant") - @tu lazy val Quotes_reflect_UnitConstant: Symbol = Quotes_reflect.requiredValue("UnitConstant") - @tu lazy val Quotes_reflect_NullConstant: Symbol = Quotes_reflect.requiredValue("NullConstant") - @tu lazy val Quotes_reflect_ClassOfConstant: Symbol = Quotes_reflect.requiredValue("ClassOfConstant") - - - @tu lazy val QuoteUnpicklerClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteUnpickler") - @tu lazy val QuoteUnpickler_unpickleExprV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleExprV2") - @tu lazy val QuoteUnpickler_unpickleTypeV2: Symbol = QuoteUnpicklerClass.requiredMethod("unpickleTypeV2") - - @tu lazy val QuoteMatchingClass: ClassSymbol = requiredClass("scala.quoted.runtime.QuoteMatching") - @tu lazy val QuoteMatching_ExprMatch: Symbol = QuoteMatchingClass.requiredMethod("ExprMatch") - @tu lazy val QuoteMatching_TypeMatch: Symbol = QuoteMatchingClass.requiredMethod("TypeMatch") - - @tu lazy val ToExprModule: Symbol = requiredModule("scala.quoted.ToExpr") - @tu lazy val ToExprModule_BooleanToExpr: Symbol = ToExprModule.requiredMethod("BooleanToExpr") - @tu lazy val ToExprModule_ByteToExpr: Symbol = ToExprModule.requiredMethod("ByteToExpr") - @tu lazy val ToExprModule_ShortToExpr: Symbol = ToExprModule.requiredMethod("ShortToExpr") - @tu lazy val ToExprModule_IntToExpr: Symbol = ToExprModule.requiredMethod("IntToExpr") - @tu lazy val ToExprModule_LongToExpr: Symbol = ToExprModule.requiredMethod("LongToExpr") - @tu lazy val ToExprModule_FloatToExpr: Symbol = ToExprModule.requiredMethod("FloatToExpr") - @tu lazy val ToExprModule_DoubleToExpr: Symbol = ToExprModule.requiredMethod("DoubleToExpr") - @tu lazy val ToExprModule_CharToExpr: Symbol = ToExprModule.requiredMethod("CharToExpr") - @tu lazy val ToExprModule_StringToExpr: Symbol = ToExprModule.requiredMethod("StringToExpr") - - @tu lazy val QuotedRuntimeModule: Symbol = requiredModule("scala.quoted.runtime.Expr") - @tu lazy val QuotedRuntime_exprQuote : Symbol = QuotedRuntimeModule.requiredMethod("quote") - @tu lazy val QuotedRuntime_exprSplice : Symbol = QuotedRuntimeModule.requiredMethod("splice") - @tu lazy val QuotedRuntime_exprNestedSplice : Symbol = QuotedRuntimeModule.requiredMethod("nestedSplice") - - @tu lazy val QuotedRuntime_SplicedTypeAnnot: ClassSymbol = requiredClass("scala.quoted.runtime.SplicedType") - - @tu lazy val QuotedRuntimePatterns: Symbol = requiredModule("scala.quoted.runtime.Patterns") - @tu lazy val QuotedRuntimePatterns_patternHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHole") - @tu lazy val QuotedRuntimePatterns_patternHigherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("patternHigherOrderHole") - @tu lazy val QuotedRuntimePatterns_higherOrderHole: Symbol = QuotedRuntimePatterns.requiredMethod("higherOrderHole") - @tu lazy val QuotedRuntimePatterns_patternTypeAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("patternType") - @tu lazy val QuotedRuntimePatterns_fromAboveAnnot: ClassSymbol = QuotedRuntimePatterns.requiredClass("fromAbove") - - @tu lazy val QuotedTypeClass: ClassSymbol = requiredClass("scala.quoted.Type") - @tu lazy val QuotedType_splice: Symbol = QuotedTypeClass.requiredType(tpnme.Underlying) - - @tu lazy val QuotedTypeModule: Symbol = QuotedTypeClass.companionModule - @tu lazy val QuotedTypeModule_of: Symbol = QuotedTypeModule.requiredMethod("of") - - @tu lazy val CanEqualClass: ClassSymbol = getClassIfDefined("scala.Eql").orElse(requiredClass("scala.CanEqual")).asClass - def CanEqual_canEqualAny(using Context): TermSymbol = - val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny - CanEqualClass.companionModule.requiredMethod(methodName) - - @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") - @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) - - @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") - @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) - - @tu lazy val MatchCaseClass: ClassSymbol = requiredClass("scala.runtime.MatchCase") - @tu lazy val NotGivenClass: ClassSymbol = requiredClass("scala.util.NotGiven") - @tu lazy val NotGiven_value: Symbol = NotGivenClass.companionModule.requiredMethod(nme.value) - - @tu lazy val ValueOfClass: ClassSymbol = requiredClass("scala.ValueOf") - - @tu lazy val FromDigitsClass: ClassSymbol = requiredClass("scala.util.FromDigits") - @tu lazy val FromDigits_WithRadixClass: ClassSymbol = requiredClass("scala.util.FromDigits.WithRadix") - @tu lazy val FromDigits_DecimalClass: ClassSymbol = requiredClass("scala.util.FromDigits.Decimal") - @tu lazy val FromDigits_FloatingClass: ClassSymbol = requiredClass("scala.util.FromDigits.Floating") - - @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") - - @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") - @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") - @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") - @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") - @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") - - @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") - @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass - @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") - @tu lazy val CLP_parseRemainingArguments: Symbol = CommandLineParserModule.requiredMethod("parseRemainingArguments") - @tu lazy val CLP_showError: Symbol = CommandLineParserModule.requiredMethod("showError") - - @tu lazy val TupleTypeRef: TypeRef = requiredClassRef("scala.Tuple") - def TupleClass(using Context): ClassSymbol = TupleTypeRef.symbol.asClass - @tu lazy val Tuple_cons: Symbol = TupleClass.requiredMethod("*:") - @tu lazy val EmptyTupleModule: Symbol = requiredModule("scala.EmptyTuple") - @tu lazy val NonEmptyTupleTypeRef: TypeRef = requiredClassRef("scala.NonEmptyTuple") - def NonEmptyTupleClass(using Context): ClassSymbol = NonEmptyTupleTypeRef.symbol.asClass - lazy val NonEmptyTuple_tail: Symbol = NonEmptyTupleClass.requiredMethod("tail") - @tu lazy val PairClass: ClassSymbol = requiredClass("scala.*:") - - @tu lazy val TupleXXLClass: ClassSymbol = requiredClass("scala.runtime.TupleXXL") - def TupleXXLModule(using Context): Symbol = TupleXXLClass.companionModule - - def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") - - @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") - - @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") - @tu lazy val RuntimeTuplesModuleClass: Symbol = RuntimeTuplesModule.moduleClass - @tu lazy val RuntimeTuples_consIterator: Symbol = RuntimeTuplesModule.requiredMethod("consIterator") - @tu lazy val RuntimeTuples_concatIterator: Symbol = RuntimeTuplesModule.requiredMethod("concatIterator") - @tu lazy val RuntimeTuples_apply: Symbol = RuntimeTuplesModule.requiredMethod("apply") - @tu lazy val RuntimeTuples_cons: Symbol = RuntimeTuplesModule.requiredMethod("cons") - @tu lazy val RuntimeTuples_size: Symbol = RuntimeTuplesModule.requiredMethod("size") - @tu lazy val RuntimeTuples_tail: Symbol = RuntimeTuplesModule.requiredMethod("tail") - @tu lazy val RuntimeTuples_concat: Symbol = RuntimeTuplesModule.requiredMethod("concat") - @tu lazy val RuntimeTuples_toArray: Symbol = RuntimeTuplesModule.requiredMethod("toArray") - @tu lazy val RuntimeTuples_productToArray: Symbol = RuntimeTuplesModule.requiredMethod("productToArray") - @tu lazy val RuntimeTuples_isInstanceOfTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfTuple") - @tu lazy val RuntimeTuples_isInstanceOfEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfEmptyTuple") - @tu lazy val RuntimeTuples_isInstanceOfNonEmptyTuple: Symbol = RuntimeTuplesModule.requiredMethod("isInstanceOfNonEmptyTuple") - - @tu lazy val TupledFunctionTypeRef: TypeRef = requiredClassRef("scala.util.TupledFunction") - def TupledFunctionClass(using Context): ClassSymbol = TupledFunctionTypeRef.symbol.asClass - def RuntimeTupleFunctionsModule(using Context): Symbol = requiredModule("scala.runtime.TupledFunctions") - - @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") - @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("*") - @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") - @tu lazy val Caps_unsafeBox: Symbol = CapsUnsafeModule.requiredMethod("unsafeBox") - @tu lazy val Caps_unsafeUnbox: Symbol = CapsUnsafeModule.requiredMethod("unsafeUnbox") - @tu lazy val Caps_unsafeBoxFunArg: Symbol = CapsUnsafeModule.requiredMethod("unsafeBoxFunArg") - - // Annotation base classes - @tu lazy val AnnotationClass: ClassSymbol = requiredClass("scala.annotation.Annotation") - @tu lazy val StaticAnnotationClass: ClassSymbol = requiredClass("scala.annotation.StaticAnnotation") - @tu lazy val RefiningAnnotationClass: ClassSymbol = requiredClass("scala.annotation.RefiningAnnotation") - - // Annotation classes - @tu lazy val AllowConversionsAnnot: ClassSymbol = requiredClass("scala.annotation.allowConversions") - @tu lazy val AnnotationDefaultAnnot: ClassSymbol = requiredClass("scala.annotation.internal.AnnotationDefault") - @tu lazy val BeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BeanProperty") - @tu lazy val BooleanBeanPropertyAnnot: ClassSymbol = requiredClass("scala.beans.BooleanBeanProperty") - @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") - @tu lazy val CapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.capability") - @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") - @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") - @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") - @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") - @tu lazy val DeprecatedOverridingAnnot: ClassSymbol = requiredClass("scala.deprecatedOverriding") - @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") - @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") - @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") - @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") - @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") - @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") - @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") - @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") - @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") - @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") - @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") - @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") - @tu lazy val SourceFileAnnot: ClassSymbol = requiredClass("scala.annotation.internal.SourceFile") - @tu lazy val ScalaSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaSignature") - @tu lazy val ScalaLongSignatureAnnot: ClassSymbol = requiredClass("scala.reflect.ScalaLongSignature") - @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") - @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") - @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") - @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") - @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") - @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") - @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") - @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") - @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") - @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") - @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") - @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") - @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") - @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") - @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") - @tu lazy val WithPureFunsAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WithPureFuns") - @tu lazy val FieldMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.field") - @tu lazy val GetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.getter") - @tu lazy val ParamMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.param") - @tu lazy val SetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.setter") - @tu lazy val ShowAsInfixAnnot: ClassSymbol = requiredClass("scala.annotation.showAsInfix") - @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") - @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") - @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") - @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") - @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") - @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") - @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") - - @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") - - // A list of meta-annotations that are relevant for fields and accessors - @tu lazy val FieldAccessorMetaAnnots: Set[Symbol] = - Set(FieldMetaAnnot, GetterMetaAnnot, ParamMetaAnnot, SetterMetaAnnot) - - // A list of annotations that are commonly used to indicate that a field/method argument or return - // type is not null. These annotations are used by the nullification logic in JavaNullInterop to - // improve the precision of type nullification. - // We don't require that any of these annotations be present in the class path, but we want to - // create Symbols for the ones that are present, so they can be checked during nullification. - @tu lazy val NotNullAnnots: List[ClassSymbol] = getClassesIfDefined( - "javax.annotation.Nonnull" :: - "javax.validation.constraints.NotNull" :: - "androidx.annotation.NonNull" :: - "android.support.annotation.NonNull" :: - "android.annotation.NonNull" :: - "com.android.annotations.NonNull" :: - "org.eclipse.jdt.annotation.NonNull" :: - "edu.umd.cs.findbugs.annotations.NonNull" :: - "org.checkerframework.checker.nullness.qual.NonNull" :: - "org.checkerframework.checker.nullness.compatqual.NonNullDecl" :: - "org.jetbrains.annotations.NotNull" :: - "org.springframework.lang.NonNull" :: - "org.springframework.lang.NonNullApi" :: - "org.springframework.lang.NonNullFields" :: - "lombok.NonNull" :: - "reactor.util.annotation.NonNull" :: - "reactor.util.annotation.NonNullApi" :: - "io.reactivex.annotations.NonNull" :: Nil) - - // convenient one-parameter method types - def methOfAny(tp: Type): MethodType = MethodType(List(AnyType), tp) - def methOfAnyVal(tp: Type): MethodType = MethodType(List(AnyValType), tp) - def methOfAnyRef(tp: Type): MethodType = MethodType(List(ObjectType), tp) - - // Derived types - - def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef - - def ClassType(arg: Type)(using Context): Type = { - val ctype = ClassClass.typeRef - if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) - } - - /** The enumeration type, goven a value of the enumeration */ - def EnumType(sym: Symbol)(using Context): TypeRef = - // given (in java): "class A { enum E { VAL1 } }" - // - sym: the symbol of the actual enumeration value (VAL1) - // - .owner: the ModuleClassSymbol of the enumeration (object E) - // - .linkedClass: the ClassSymbol of the enumeration (class E) - sym.owner.linkedClass.typeRef - - object FunctionOf { - def apply(args: List[Type], resultType: Type, isContextual: Boolean = false, isErased: Boolean = false)(using Context): Type = - FunctionType(args.length, isContextual, isErased).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean, Boolean)] = { - val tsym = ft.typeSymbol - if isFunctionClass(tsym) && ft.isRef(tsym) then - val targs = ft.dealias.argInfos - if (targs.isEmpty) None - else Some(targs.init, targs.last, tsym.name.isContextFunction, tsym.name.isErasedFunction) - else None - } - } - - object PartialFunctionOf { - def apply(arg: Type, result: Type)(using Context): Type = - PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) - def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = - if (pft.isRef(PartialFunctionClass)) { - val targs = pft.dealias.argInfos - if (targs.length == 2) Some((targs.head, targs.tail)) else None - } - else None - } - - object ArrayOf { - def apply(elem: Type)(using Context): Type = - if (ctx.erasedTypes) JavaArrayType(elem) - else ArrayType.appliedTo(elem :: Nil) - def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { - case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) - case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) - case _ => None - } - } - - object MatchCase { - def apply(pat: Type, body: Type)(using Context): Type = - MatchCaseClass.typeRef.appliedTo(pat, body) - def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { - case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => - Some((pat, body)) - case _ => - None - } - def isInstance(tp: Type)(using Context): Boolean = tp match { - case AppliedType(tycon: TypeRef, _) => - tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols - tycon.isRef(MatchCaseClass) - case _ => false - } - } - - /** An extractor for multi-dimensional arrays. - * Note that this will also extract the high bound if an - * element type is a wildcard upper-bounded by an array. E.g. - * - * Array[? <: Array[? <: Number]] - * - * would match - * - * MultiArrayOf(, 2) - */ - object MultiArrayOf { - def apply(elem: Type, ndims: Int)(using Context): Type = - if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) - def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { - case ArrayOf(elemtp) => - def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { - case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => - Some(finalElemTp, n) - case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) - case _ => Some(elemtp, 1) - } - recur(elemtp) - case _ => - None - } - } - - /** Extractor for context function types representing by-name parameters, of the form - * `() ?=> T`. - * Under purefunctions, this becomes `() ?-> T` or `{r1, ..., rN} () ?-> T`. - */ - object ByNameFunction: - def apply(tp: Type)(using Context): Type = tp match - case tp @ EventuallyCapturingType(tp1, refs) if tp.annot.symbol == RetainsByNameAnnot => - CapturingType(apply(tp1), refs) - case _ => - defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) - def unapply(tp: Type)(using Context): Option[Type] = tp match - case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => - Some(arg) - case tp @ AnnotatedType(parent, _) => - unapply(parent) - case _ => - None - - final def isByNameFunctionClass(sym: Symbol): Boolean = - sym eq ContextFunction0 - - def isByNameFunction(tp: Type)(using Context): Boolean = tp match - case ByNameFunction(_) => true - case _ => false - - final def isCompiletime_S(sym: Symbol)(using Context): Boolean = - sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass - - private val compiletimePackageAnyTypes: Set[Name] = Set( - tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString - ) - private val compiletimePackageNumericTypes: Set[Name] = Set( - tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, - tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, - tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max - ) - private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int - tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, - tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR - ) - private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, - tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR - ) - private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble - ) - private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( - tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat - ) - private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) - private val compiletimePackageStringTypes: Set[Name] = Set( - tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt - ) - private val compiletimePackageOpTypes: Set[Name] = - Set(tpnme.S) - ++ compiletimePackageAnyTypes - ++ compiletimePackageIntTypes - ++ compiletimePackageLongTypes - ++ compiletimePackageFloatTypes - ++ compiletimePackageDoubleTypes - ++ compiletimePackageBooleanTypes - ++ compiletimePackageStringTypes - - final def isCompiletimeAppliedType(sym: Symbol)(using Context): Boolean = - compiletimePackageOpTypes.contains(sym.name) - && ( - isCompiletime_S(sym) - || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) - || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) - || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) - || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) - || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) - || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) - || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) - ) - - // ----- Scala-2 library patches -------------------------------------- - - /** The `scala.runtime.stdLibPacthes` package contains objects - * that contain defnitions that get added as members to standard library - * objects with the same name. - */ - @tu lazy val StdLibPatchesPackage: TermSymbol = requiredPackage("scala.runtime.stdLibPatches") - @tu private lazy val ScalaPredefModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.Predef").moduleClass - @tu private lazy val LanguageModuleClassPatch: Symbol = getModuleIfDefined("scala.runtime.stdLibPatches.language").moduleClass - - /** If `sym` is a patched library class, the source file of its patch class, - * otherwise `NoSource` - */ - def patchSource(sym: Symbol)(using Context): SourceFile = - if sym == ScalaPredefModuleClass then ScalaPredefModuleClassPatch.source - else if sym == LanguageModuleClass then LanguageModuleClassPatch.source - else NoSource - - /** A finalizer that patches standard library classes. - * It copies all non-private, non-synthetic definitions from `patchCls` - * to `denot` while changing their owners to `denot`. Before that it deletes - * any definitions of `denot` that have the same name as one of the copied - * definitions. - * - * If an object is present in both the original class and the patch class, - * it is not overwritten. Instead its members are copied recursively. - * - * To avpid running into cycles on bootstrap, patching happens only if `patchCls` - * is read from a classfile. - */ - def patchStdLibClass(denot: ClassDenotation)(using Context): Unit = - def patch2(denot: ClassDenotation, patchCls: Symbol): Unit = - val scope = denot.info.decls.openForMutations - - def recurse(patch: Symbol) = patch.is(Module) && scope.lookup(patch.name).exists - - def makeClassSymbol(patch: Symbol, parents: List[Type], selfInfo: TypeOrSymbol) = - newClassSymbol( - owner = denot.symbol, - name = patch.name.asTypeName, - flags = patch.flags, - // need to rebuild a fresh ClassInfo - infoFn = cls => ClassInfo( - prefix = denot.symbol.thisType, - cls = cls, - declaredParents = parents, // assume parents in patch don't refer to symbols in the patch - decls = newScope, - selfInfo = - if patch.is(Module) - then TermRef(denot.symbol.thisType, patch.name.sourceModuleName) - else selfInfo // assume patch self type annotation does not refer to symbols in the patch - ), - privateWithin = patch.privateWithin, - coord = denot.symbol.coord, - assocFile = denot.symbol.associatedFile - ) - - def makeNonClassSymbol(patch: Symbol) = - if patch.is(Inline) then - // Inline symbols contain trees in annotations, which is coupled - // with the underlying symbol. - // Changing owner for inline symbols is a simple workaround. - patch.denot = patch.denot.copySymDenotation(owner = denot.symbol) - patch - else - // change `info` which might contain reference to the patch - patch.copy( - owner = denot.symbol, - info = - if patch.is(Module) - then TypeRef(denot.symbol.thisType, patch.name.moduleClassName) - else patch.info // assume non-object info does not refer to symbols in the patch - ) - - if patchCls.exists then - val patches = patchCls.info.decls.filter(patch => - !patch.isConstructor && !patch.isOneOf(PrivateOrSynthetic)) - for patch <- patches if !recurse(patch) do - val e = scope.lookupEntry(patch.name) - if e != null then scope.unlink(e) - for patch <- patches do - patch.ensureCompleted() - if !recurse(patch) then - val sym = - patch.info match - case ClassInfo(_, _, parents, _, selfInfo) => - makeClassSymbol(patch, parents, selfInfo) - case _ => - makeNonClassSymbol(patch) - end match - sym.annotations = patch.annotations - scope.enter(sym) - if patch.isClass then - patch2(scope.lookup(patch.name).asClass, patch) - - def patchWith(patchCls: Symbol) = - denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted - patch2(denot, patchCls) - - if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then - patchWith(ScalaPredefModuleClassPatch) - else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then - patchWith(LanguageModuleClassPatch) - end patchStdLibClass - - // ----- Symbol sets --------------------------------------------------- - - @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) - - @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) - - /** Base classes that are assumed to be pure for the purposes of capture checking. - * Every class inheriting from a pure baseclass is pure. - */ - @tu lazy val pureBaseClasses = Set(defn.AnyValClass, defn.ThrowableClass) - - /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, - */ - @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) - - @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] - val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) - def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) - - @tu lazy val caseClassSynthesized: List[Symbol] = List( - Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, - Product_productPrefix, Product_productElement, Product_productElementName) - - val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ - def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) - Map[Symbol, Symbol]( - IntClass -> holderImpl("LazyInt"), - LongClass -> holderImpl("LazyLong"), - BooleanClass -> holderImpl("LazyBoolean"), - FloatClass -> holderImpl("LazyFloat"), - DoubleClass -> holderImpl("LazyDouble"), - ByteClass -> holderImpl("LazyByte"), - CharClass -> holderImpl("LazyChar"), - ShortClass -> holderImpl("LazyShort") - ) - .withDefaultValue(holderImpl("LazyRef")) - }) - - @tu lazy val TupleType: Array[TypeRef | Null] = mkArityArray("scala.Tuple", MaxTupleArity, 1) - - def isSpecializedTuple(cls: Symbol)(using Context): Boolean = - cls.isClass && TupleSpecializedClasses.exists(tupleCls => cls.name.isSpecializedNameOf(tupleCls.name)) - - def SpecializedTuple(base: Symbol, args: List[Type])(using Context): Symbol = - base.owner.requiredClass(base.name.specializedName(args)) - - /** Cached function types of arbitary arities. - * Function types are created on demand with newFunctionNTrait, which is - * called from a synthesizer installed in ScalaPackageClass. - */ - private class FunType(prefix: String): - private var classRefs: Array[TypeRef | Null] = new Array(22) - def apply(n: Int): TypeRef = - while n >= classRefs.length do - val classRefs1 = new Array[TypeRef | Null](classRefs.length * 2) - Array.copy(classRefs, 0, classRefs1, 0, classRefs.length) - classRefs = classRefs1 - val funName = s"scala.$prefix$n" - if classRefs(n) == null then - classRefs(n) = - if prefix.startsWith("Impure") - then staticRef(funName.toTypeName).symbol.typeRef - else requiredClassRef(funName) - classRefs(n).nn - end FunType - - private def funTypeIdx(isContextual: Boolean, isErased: Boolean, isImpure: Boolean): Int = - (if isContextual then 1 else 0) - + (if isErased then 2 else 0) - + (if isImpure then 4 else 0) - - private val funTypeArray: IArray[FunType] = - val arr = Array.ofDim[FunType](8) - val choices = List(false, true) - for contxt <- choices; erasd <- choices; impure <- choices do - var str = "Function" - if contxt then str = "Context" + str - if erasd then str = "Erased" + str - if impure then str = "Impure" + str - arr(funTypeIdx(contxt, erasd, impure)) = FunType(str) - IArray.unsafeFromArray(arr) - - def FunctionSymbol(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): Symbol = - funTypeArray(funTypeIdx(isContextual, isErased, isImpure))(n).symbol - - @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) - @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) - - @tu lazy val Function0: Symbol = FunctionSymbol(0) - @tu lazy val Function1: Symbol = FunctionSymbol(1) - @tu lazy val Function2: Symbol = FunctionSymbol(2) - @tu lazy val ContextFunction0: Symbol = FunctionSymbol(0, isContextual = true) - - def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false, isImpure: Boolean = false)(using Context): TypeRef = - FunctionSymbol(n, isContextual && !ctx.erasedTypes, isErased, isImpure).typeRef - - lazy val PolyFunctionClass = requiredClass("scala.PolyFunction") - def PolyFunctionType = PolyFunctionClass.typeRef - - /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */ - def scalaClassName(cls: Symbol)(using Context): TypeName = cls.denot match - case clsd: ClassDenotation if clsd.owner eq ScalaPackageClass => - clsd.name.asInstanceOf[TypeName] - case _ => - EmptyTypeName - - /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */ - def scalaClassName(ref: Type)(using Context): TypeName = scalaClassName(ref.classSymbol) - - private def isVarArityClass(cls: Symbol, prefix: String) = - cls.isClass - && cls.owner.eq(ScalaPackageClass) - && cls.name.testSimple(name => - name.startsWith(prefix) - && name.length > prefix.length - && digitsOnlyAfter(name, prefix.length)) - - private def digitsOnlyAfter(name: SimpleName, idx: Int): Boolean = - idx == name.length || name(idx).isDigit && digitsOnlyAfter(name, idx + 1) - - def isBottomClass(cls: Symbol): Boolean = - if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes - then cls == NothingClass - else isBottomClassAfterErasure(cls) - - def isBottomClassAfterErasure(cls: Symbol): Boolean = cls == NothingClass || cls == NullClass - - /** Is any function class where - * - FunctionXXL - * - FunctionN for N >= 0 - * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isFunction - - /** Is a function class, or an impure function type alias */ - def isFunctionSymbol(sym: Symbol): Boolean = - sym.isType && (sym.owner eq ScalaPackageClass) && sym.name.isFunction - - /** Is a function class where - * - FunctionN for N >= 0 and N != XXL - */ - def isPlainFunctionClass(cls: Symbol) = isVarArityClass(cls, str.Function) - - /** Is an context function class. - * - ContextFunctionN for N >= 0 - * - ErasedContextFunctionN for N > 0 - */ - def isContextFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isContextFunction - - /** Is an erased function class. - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isErasedFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isErasedFunction - - /** Is either FunctionXXL or a class that will be erased to FunctionXXL - * - FunctionXXL - * - FunctionN for N >= 22 - * - ContextFunctionN for N >= 22 - */ - def isXXLFunctionClass(cls: Symbol): Boolean = { - val name = scalaClassName(cls) - (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity - } - - /** Is a synthetic function class - * - FunctionN for N > 22 - * - ContextFunctionN for N >= 0 - * - ErasedFunctionN for N > 0 - * - ErasedContextFunctionN for N > 0 - */ - def isSyntheticFunctionClass(cls: Symbol): Boolean = scalaClassName(cls).isSyntheticFunction - - def isAbstractFunctionClass(cls: Symbol): Boolean = isVarArityClass(cls, str.AbstractFunction) - def isTupleClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Tuple) - def isProductClass(cls: Symbol): Boolean = isVarArityClass(cls, str.Product) - - def isBoxedUnitClass(cls: Symbol): Boolean = - cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit - - /** Returns the erased type of the function class `cls` - * - FunctionN for N > 22 becomes FunctionXXL - * - FunctionN for 22 > N >= 0 remains as FunctionN - * - ContextFunctionN for N > 22 becomes FunctionXXL - * - ContextFunctionN for N <= 22 becomes FunctionN - * - ErasedFunctionN becomes Function0 - * - ImplicitErasedFunctionN becomes Function0 - * - anything else becomes a NoType - */ - def functionTypeErasure(cls: Symbol): Type = - val arity = scalaClassName(cls).functionArity - if cls.name.isErasedFunction then FunctionType(0) - else if arity > 22 then FunctionXXLClass.typeRef - else if arity >= 0 then FunctionType(arity) - else NoType - - private val JavaImportFns: List[RootRef] = List( - RootRef(() => JavaLangPackageVal.termRef) - ) - - private val ScalaImportFns: List[RootRef] = - JavaImportFns :+ - RootRef(() => ScalaPackageVal.termRef) - - private val PredefImportFns: RootRef = - RootRef(() => ScalaPredefModule.termRef, isPredef=true) - - @tu private lazy val JavaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else JavaImportFns - - @tu private lazy val ScalaRootImportFns: List[RootRef] = - if ctx.settings.YnoImports.value then Nil - else if ctx.settings.YnoPredef.value then ScalaImportFns - else ScalaImportFns :+ PredefImportFns - - @tu private lazy val JavaRootImportTypes: List[TermRef] = JavaRootImportFns.map(_.refFn()) - @tu private lazy val ScalaRootImportTypes: List[TermRef] = ScalaRootImportFns.map(_.refFn()) - @tu private lazy val JavaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(JavaRootImportTypes) - @tu private lazy val ScalaUnqualifiedOwnerTypes: Set[NamedType] = unqualifiedTypes(ScalaRootImportTypes) - - /** Are we compiling a java source file? */ - private def isJavaContext(using Context): Boolean = - ctx.compilationUnit.isJava - - private def unqualifiedTypes(refs: List[TermRef]) = - val types = refs.toSet[NamedType] - types ++ types.map(_.symbol.moduleClass.typeRef) - - /** Lazy references to the root imports */ - def rootImportFns(using Context): List[RootRef] = - if isJavaContext then JavaRootImportFns - else ScalaRootImportFns - - /** Root types imported by default */ - def rootImportTypes(using Context): List[TermRef] = - if isJavaContext then JavaRootImportTypes - else ScalaRootImportTypes - - /** Modules whose members are in the default namespace and their module classes */ - def unqualifiedOwnerTypes(using Context): Set[NamedType] = - if isJavaContext then JavaUnqualifiedOwnerTypes - else ScalaUnqualifiedOwnerTypes - - /** Names of the root import symbols that can be hidden by other imports */ - @tu lazy val ShadowableImportNames: Set[TermName] = Set("Predef".toTermName) - - /** Class symbols for which no class exist at runtime */ - @tu lazy val NotRuntimeClasses: Set[Symbol] = Set(AnyClass, MatchableClass, AnyValClass, NullClass, NothingClass) - - @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) - - @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) - - /** Classes that are known not to have an initializer irrespective of - * whether NoInits is set. Note: FunctionXXLClass is in this set - * because if it is compiled by Scala2, it does not get a NoInit flag. - * But since it is introduced only at erasure, there's no chance - * for augmentScala2Traits to do anything on a class that inherits it. So - * it also misses an implementation class, which means that the usual scheme - * of calling a superclass init in the implementation class of a Scala2 - * trait gets screwed up. Therefore, it is mandatory that FunctionXXL - * is treated as a NoInit trait. - */ - @tu lazy val NoInitClasses: Set[Symbol] = NotRuntimeClasses + FunctionXXLClass - - def isPolymorphicAfterErasure(sym: Symbol): Boolean = - (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) - - /** Is this type a `TupleN` type? - * - * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` - */ - def isTupleNType(tp: Type)(using Context): Boolean = { - val tp1 = tp.dealias - val arity = tp1.argInfos.length - arity <= MaxTupleArity && { - val tupletp = TupleType(arity) - tupletp != null && tp1.isRef(tupletp.symbol) - } - } - - def tupleType(elems: List[Type]): Type = { - val arity = elems.length - if 0 < arity && arity <= MaxTupleArity then - val tupletp = TupleType(arity) - if tupletp != null then tupletp.appliedTo(elems) - else TypeOps.nestedPairs(elems) - else TypeOps.nestedPairs(elems) - } - - def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { - @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { - case _ if bound < 0 => Some(acc.reverse) - case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) - case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) - case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) - case _ => None - } - rec(tp.stripTypeVar, Nil, bound) - } - - def isProductSubType(tp: Type)(using Context): Boolean = tp.derivesFrom(ProductClass) - - /** Is `tp` (an alias) of either a scala.FunctionN or a scala.ContextFunctionN - * instance? - */ - def isNonRefinedFunction(tp: Type)(using Context): Boolean = - val arity = functionArity(tp) - val sym = tp.dealias.typeSymbol - - arity >= 0 - && isFunctionClass(sym) - && tp.isRef( - FunctionType(arity, sym.name.isContextFunction, sym.name.isErasedFunction).typeSymbol, - skipRefined = false) - end isNonRefinedFunction - - /** Is `tp` a representation of a (possibly dependent) function type or an alias of such? */ - def isFunctionType(tp: Type)(using Context): Boolean = - isNonRefinedFunction(tp.dropDependentRefinement) - - def isFunctionOrPolyType(tp: Type)(using Context): Boolean = - isFunctionType(tp) || (tp.typeSymbol eq defn.PolyFunctionClass) - - private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = - for base <- bases; tp <- paramTypes do - cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) - cls - - @tu lazy val Tuple1: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple1"), List(nme._1), Tuple1SpecializedParamTypes) - @tu lazy val Tuple2: ClassSymbol = withSpecMethods(requiredClass("scala.Tuple2"), List(nme._1, nme._2), Tuple2SpecializedParamTypes) - - @tu lazy val TupleSpecializedClasses: Set[Symbol] = Set(Tuple1, Tuple2) - @tu lazy val Tuple1SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType) - @tu lazy val Tuple2SpecializedParamTypes: Set[TypeRef] = Set(IntType, LongType, DoubleType, CharType, BooleanType) - @tu lazy val Tuple1SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple1SpecializedParamTypes.map(_.symbol)) - @tu lazy val Tuple2SpecializedParamClasses: PerRun[Set[Symbol]] = new PerRun(Tuple2SpecializedParamTypes.map(_.symbol)) - - // Specialized type parameters defined for scala.Function{0,1,2}. - @tu lazy val Function1SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, FloatType, DoubleType) - @tu lazy val Function2SpecializedParamTypes: collection.Set[TypeRef] = - Set(IntType, LongType, DoubleType) - @tu lazy val Function0SpecializedReturnTypes: collection.Set[TypeRef] = - ScalaNumericValueTypeList.toSet + UnitType + BooleanType - @tu lazy val Function1SpecializedReturnTypes: collection.Set[TypeRef] = - Set(UnitType, BooleanType, IntType, FloatType, LongType, DoubleType) - @tu lazy val Function2SpecializedReturnTypes: collection.Set[TypeRef] = - Function1SpecializedReturnTypes - - @tu lazy val Function1SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedParamTypes.map(_.symbol)) - @tu lazy val Function2SpecializedParamClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedParamTypes.map(_.symbol)) - @tu lazy val Function0SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function0SpecializedReturnTypes.map(_.symbol)) - @tu lazy val Function1SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function1SpecializedReturnTypes.map(_.symbol)) - @tu lazy val Function2SpecializedReturnClasses: PerRun[collection.Set[Symbol]] = - new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) - - def isSpecializableTuple(base: Symbol, args: List[Type])(using Context): Boolean = - args.length <= 2 && base.isClass && TupleSpecializedClasses.exists(base.asClass.derivesFrom) && args.match - case List(x) => Tuple1SpecializedParamClasses().contains(x.classSymbol) - case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) - case _ => false - && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes - - def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = - paramTypes.length <= 2 - && (cls.derivesFrom(FunctionSymbol(paramTypes.length)) || isByNameFunctionClass(cls)) - && isSpecializableFunctionSAM(paramTypes, retType) - - /** If the Single Abstract Method of a Function class has this type, is it specializable? */ - def isSpecializableFunctionSAM(paramTypes: List[Type], retType: Type)(using Context): Boolean = - paramTypes.length <= 2 && (paramTypes match { - case Nil => - Function0SpecializedReturnClasses().contains(retType.typeSymbol) - case List(paramType0) => - Function1SpecializedParamClasses().contains(paramType0.typeSymbol) && - Function1SpecializedReturnClasses().contains(retType.typeSymbol) - case List(paramType0, paramType1) => - Function2SpecializedParamClasses().contains(paramType0.typeSymbol) && - Function2SpecializedParamClasses().contains(paramType1.typeSymbol) && - Function2SpecializedReturnClasses().contains(retType.typeSymbol) - case _ => - false - }) - - @tu lazy val Function0SpecializedApplyNames: collection.Set[TermName] = - for r <- Function0SpecializedReturnTypes - yield nme.apply.specializedFunction(r, Nil).asTermName - - @tu lazy val Function1SpecializedApplyNames: collection.Set[TermName] = - for - r <- Function1SpecializedReturnTypes - t1 <- Function1SpecializedParamTypes - yield - nme.apply.specializedFunction(r, List(t1)).asTermName - - @tu lazy val Function2SpecializedApplyNames: collection.Set[TermName] = - for - r <- Function2SpecializedReturnTypes - t1 <- Function2SpecializedParamTypes - t2 <- Function2SpecializedParamTypes - yield - nme.apply.specializedFunction(r, List(t1, t2)).asTermName - - @tu lazy val FunctionSpecializedApplyNames: collection.Set[Name] = - Function0SpecializedApplyNames ++ Function1SpecializedApplyNames ++ Function2SpecializedApplyNames - - def functionArity(tp: Type)(using Context): Int = tp.dropDependentRefinement.dealias.argInfos.length - 1 - - /** Return underlying context function type (i.e. instance of an ContextFunctionN class) - * or NoType if none exists. The following types are considered as underlying types: - * - the alias of an alias type - * - the instance or origin of a TypeVar (i.e. the result of a stripTypeVar) - * - the upper bound of a TypeParamRef in the current constraint - */ - def asContextFunctionType(tp: Type)(using Context): Type = - tp.stripTypeVar.dealias match - case tp1: TypeParamRef if ctx.typerState.constraint.contains(tp1) => - asContextFunctionType(TypeComparer.bounds(tp1).hiBound) - case tp1 => - if tp1.typeSymbol.name.isContextFunction && isFunctionType(tp1) then tp1 - else NoType - - /** Is `tp` an context function type? */ - def isContextFunctionType(tp: Type)(using Context): Boolean = - asContextFunctionType(tp).exists - - /** An extractor for context function types `As ?=> B`, possibly with - * dependent refinements. Optionally returns a triple consisting of the argument - * types `As`, the result type `B` and a whether the type is an erased context function. - */ - object ContextFunctionType: - def unapply(tp: Type)(using Context): Option[(List[Type], Type, Boolean)] = - if ctx.erasedTypes then - atPhase(erasurePhase)(unapply(tp)) - else - val tp1 = asContextFunctionType(tp) - if tp1.exists then - val args = tp1.dropDependentRefinement.argInfos - Some((args.init, args.last, tp1.typeSymbol.name.isErasedFunction)) - else None - - def isErasedFunctionType(tp: Type)(using Context): Boolean = - tp.dealias.typeSymbol.name.isErasedFunction && isFunctionType(tp) - - /** A whitelist of Scala-2 classes that are known to be pure */ - def isAssuredNoInits(sym: Symbol): Boolean = - (sym `eq` SomeClass) || isTupleClass(sym) - - /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ - def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { - if !isTupleClass(cls) then parents - else if tparams.isEmpty then parents :+ TupleTypeRef - else - assert(parents.head.typeSymbol == ObjectClass) - TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail - } - - /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ - def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = - if (isBoxedUnitClass(cls)) parents.filter(_.typeSymbol != JavaSerializableClass) - else parents - - private val HasProblematicGetClass: Set[Name] = Set( - tpnme.AnyVal, tpnme.Byte, tpnme.Short, tpnme.Char, tpnme.Int, tpnme.Long, tpnme.Float, tpnme.Double, - tpnme.Unit, tpnme.Boolean) - - /** When typing a primitive value class or AnyVal, we ignore the `getClass` - * member: it's supposed to be an override of the `getClass` defined on `Any`, - * but in dotty `Any#getClass` is polymorphic so it ends up being an overload. - * This is especially problematic because it means that when writing: - * - * 1.asInstanceOf[Int & AnyRef].getClass - * - * the `getClass` that returns `Class[Int]` defined in Int can be selected, - * but this call is specified to return `classOf[Integer]`, see - * tests/run/t5568.scala. - * - * FIXME: remove all the `getClass` methods defined in the standard library - * so we don't have to hot-patch it like this. - */ - def hasProblematicGetClass(className: Name): Boolean = - HasProblematicGetClass.contains(className) - - /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ - def isInfix(sym: Symbol)(using Context): Boolean = - (sym eq Object_eq) || (sym eq Object_ne) - - @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = - // add these for now, until we had a chance to retrofit 2.13 stdlib - // we should do a more through sweep through it then. - val strs = Map( - "Any" -> Set("scala"), - "AnyVal" -> Set("scala"), - "Matchable" -> Set("scala"), - "Product" -> Set("scala"), - "Object" -> Set("java.lang"), - "Comparable" -> Set("java.lang"), - "Serializable" -> Set("java.io"), - "BitSetOps" -> Set("scala.collection"), - "IndexedSeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "IterableOnceOps" -> Set("scala.collection"), - "IterableOps" -> Set("scala.collection"), - "LinearSeqOps" -> Set("scala.collection", "scala.collection.immutable"), - "MapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SeqOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SortedMapOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "SortedOps" -> Set("scala.collection"), - "SortedSetOps" -> Set("scala.collection", "scala.collection.mutable", "scala.collection.immutable"), - "StrictOptimizedIterableOps" -> Set("scala.collection"), - "StrictOptimizedLinearSeqOps" -> Set("scala.collection"), - "StrictOptimizedMapOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSeqOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSetOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSortedMapOps" -> Set("scala.collection", "scala.collection.immutable"), - "StrictOptimizedSortedSetOps" -> Set("scala.collection", "scala.collection.immutable"), - "ArrayDequeOps" -> Set("scala.collection.mutable"), - "DefaultSerializable" -> Set("scala.collection.generic"), - "IsIterable" -> Set("scala.collection.generic"), - "IsIterableLowPriority" -> Set("scala.collection.generic"), - "IsIterableOnce" -> Set("scala.collection.generic"), - "IsIterableOnceLowPriority" -> Set("scala.collection.generic"), - "IsMap" -> Set("scala.collection.generic"), - "IsSeq" -> Set("scala.collection.generic")) - strs.map { case (simple, pkgs) => ( - simple.toTypeName, - pkgs.map(pkg => staticRef(pkg.toTermName, isPackage = true).symbol.moduleClass) - ) - } - - def isAssumedTransparent(sym: Symbol): Boolean = - assumedTransparentNames.get(sym.name) match - case Some(pkgs) => pkgs.contains(sym.owner) - case none => false - - // ----- primitive value class machinery ------------------------------------------ - - class PerRun[T](generate: Context ?=> T) { - private var current: RunId = NoRunId - private var cached: T = _ - def apply()(using Context): T = { - if (current != ctx.runId) { - cached = generate - current = ctx.runId - } - cached - } - } - - @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( - ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) - - @tu private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet - @tu private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes `union` Set(UnitType, BooleanType) - - val ScalaNumericValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaNumericValueTypes.map(_.symbol)) - val ScalaValueClasses: PerRun[collection.Set[Symbol]] = new PerRun(ScalaValueTypes.map(_.symbol)) - - val ScalaBoxedClasses: PerRun[collection.Set[Symbol]] = new PerRun( - Set(BoxedByteClass, BoxedShortClass, BoxedCharClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass, BoxedUnitClass, BoxedBooleanClass) - ) - - private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]() - private val typeTags = mutable.Map[TypeName, Name]().withDefaultValue(nme.specializedTypeNames.Object) - -// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]() -// private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() -// private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() - - private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { - val vcls = requiredClassRef(name) - valueTypeEnc(vcls.name) = enc - typeTags(vcls.name) = tag -// unboxedTypeRef(boxed.name) = vcls -// javaTypeToValueTypeRef(jtype) = vcls -// valueTypeNamesToJavaType(vcls.name) = jtype - vcls - } - - /** The type of the boxed class corresponding to primitive value type `tp`. */ - def boxedType(tp: Type)(using Context): TypeRef = { - val cls = tp.classSymbol - if (cls eq ByteClass) BoxedByteClass - else if (cls eq ShortClass) BoxedShortClass - else if (cls eq CharClass) BoxedCharClass - else if (cls eq IntClass) BoxedIntClass - else if (cls eq LongClass) BoxedLongClass - else if (cls eq FloatClass) BoxedFloatClass - else if (cls eq DoubleClass) BoxedDoubleClass - else if (cls eq UnitClass) BoxedUnitClass - else if (cls eq BooleanClass) BoxedBooleanClass - else sys.error(s"Not a primitive value type: $tp") - }.typeRef - - def unboxedType(tp: Type)(using Context): TypeRef = { - val cls = tp.classSymbol - if (cls eq BoxedByteClass) ByteType - else if (cls eq BoxedShortClass) ShortType - else if (cls eq BoxedCharClass) CharType - else if (cls eq BoxedIntClass) IntType - else if (cls eq BoxedLongClass) LongType - else if (cls eq BoxedFloatClass) FloatType - else if (cls eq BoxedDoubleClass) DoubleType - else if (cls eq BoxedUnitClass) UnitType - else if (cls eq BoxedBooleanClass) BooleanType - else sys.error(s"Not a boxed primitive value type: $tp") - } - - /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ - def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) - -// /** The `Class[?]` of a primitive value type name */ -// def valueTypeNameToJavaType(name: TypeName)(using Context): Option[Class[?]] = -// valueTypeNamesToJavaType.get(if (name.firstPart eq nme.scala) name.lastPart.toTypeName else name) - - type PrimitiveClassEnc = Int - - val ByteEnc: Int = 2 - val ShortEnc: Int = ByteEnc * 3 - val CharEnc: Int = 5 - val IntEnc: Int = ShortEnc * CharEnc - val LongEnc: Int = IntEnc * 7 - val FloatEnc: Int = LongEnc * 11 - val DoubleEnc: Int = FloatEnc * 13 - val BooleanEnc: Int = 17 - val UnitEnc: Int = 19 - - def isValueSubType(tref1: TypeRef, tref2: TypeRef)(using Context): Boolean = - valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0 - def isValueSubClass(sym1: Symbol, sym2: Symbol): Boolean = - valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 - - @tu lazy val specialErasure: SimpleIdentityMap[Symbol, ClassSymbol] = - SimpleIdentityMap.empty[Symbol] - .updated(AnyClass, ObjectClass) - .updated(MatchableClass, ObjectClass) - .updated(AnyValClass, ObjectClass) - .updated(SingletonClass, ObjectClass) - .updated(TupleClass, ProductClass) - .updated(NonEmptyTupleClass, ProductClass) - .updated(PairClass, ObjectClass) - - // ----- Initialization --------------------------------------------------- - - /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ - @tu lazy val syntheticScalaClasses: List[TypeSymbol] = - List( - AnyClass, - MatchableClass, - AnyRefAlias, - AnyKindClass, - andType, - orType, - RepeatedParamClass, - ByNameParamClass2x, - IntoType, - AnyValClass, - NullClass, - NothingClass, - SingletonClass) - - @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( - EmptyPackageVal, - OpsPackageClass) - - /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ - @tu lazy val syntheticCoreMethods: List[TermSymbol] = - AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) - - @tu lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet - - private var isInitialized = false - - def init()(using ctx: DetachedContext): Unit = { - this.initCtx = ctx - if (!isInitialized) { - // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = - syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass - isInitialized = true - } - addSyntheticSymbolsComments - } - - /** Definitions used in Lazy Vals implementation */ - val LazyValsModuleName = "scala.runtime.LazyVals" - @tu lazy val LazyValsModule = requiredModule(LazyValsModuleName) - @tu lazy val LazyValsWaitingState = requiredClass(s"$LazyValsModuleName.Waiting") - @tu lazy val LazyValsControlState = requiredClass(s"$LazyValsModuleName.LazyValControlState") - - def addSyntheticSymbolsComments(using Context): Unit = - def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) - - add(AnyClass, - """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala - | * execution environment inherits directly or indirectly from this class. - | * - | * Starting with Scala 2.10 it is possible to directly extend `Any` using ''universal traits''. - | * A ''universal trait'' is a trait that extends `Any`, only has `def`s as members, and does no initialization. - | * - | * The main use case for universal traits is to allow basic inheritance of methods for [[scala.AnyVal value classes]]. - | * For example, - | * - | * {{{ - | * trait Printable extends Any { - | * def print(): Unit = println(this) - | * } - | * class Wrapper(val underlying: Int) extends AnyVal with Printable - | * - | * val w = new Wrapper(3) - | * w.print() - | * }}} - | * - | * See the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]] for more - | * details on the interplay of universal traits and value classes. - | */ - """.stripMargin) - - add(Any_==, - """/** Test two objects for equality. - | * The expression `x == that` is equivalent to `if (x eq null) that eq null else x.equals(that)`. - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. - | */ - """.stripMargin) - - add(Any_!=, - """/** Test two objects for inequality. - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if !(this == that), `false` otherwise. - | */ - """.stripMargin) - - add(Any_equals, - """/** Compares the receiver object (`this`) with the argument object (`that`) for equivalence. - | * - | * Any implementation of this method should be an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]]: - | * - | * - It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`. - | * - It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and - | * only if `y.equals(x)` returns `true`. - | * - It is transitive: for any instances `x`, `y`, and `z` of type `Any` if `x.equals(y)` returns `true` and - | * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`. - | * - | * If you override this method, you should verify that your implementation remains an equivalence relation. - | * Additionally, when overriding this method it is usually necessary to override `hashCode` to ensure that - | * objects which are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]. - | * (`o1.hashCode.equals(o2.hashCode)`). - | * - | * @param that the object to compare against this object for equality. - | * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. - | */ - """.stripMargin) - - add(Any_hashCode, - """/** Calculate a hash code value for the object. - | * - | * The default hashing algorithm is platform dependent. - | * - | * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet - | * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`. - | * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have - | * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure - | * to verify that the behavior is consistent with the `equals` method. - | * - | * @return the hash code value for this object. - | */ - """.stripMargin) - - add(Any_toString, - """/** Returns a string representation of the object. - | * - | * The default representation is platform dependent. - | * - | * @return a string representation of the object. - | */ - """.stripMargin) - - add(Any_##, - """/** Equivalent to `x.hashCode` except for boxed numeric types and `null`. - | * For numerics, it returns a hash value which is consistent - | * with value equality: if two value type instances compare - | * as true, then ## will produce the same hash value for each - | * of them. - | * For `null` returns a hashcode where `null.hashCode` throws a - | * `NullPointerException`. - | * - | * @return a hash value consistent with == - | */ - """.stripMargin) - - add(Any_isInstanceOf, - """/** Test whether the dynamic type of the receiver object is `T0`. - | * - | * Note that the result of the test is modulo Scala's erasure semantics. - | * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the - | * expression `List(1).isInstanceOf[List[String]]` will return `true`. - | * In the latter example, because the type argument is erased as part of compilation it is - | * not possible to check whether the contents of the list are of the specified type. - | * - | * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. - | */ - """.stripMargin) - - add(Any_asInstanceOf, - """/** Cast the receiver object to be of type `T0`. - | * - | * Note that the success of a cast at runtime is modulo Scala's erasure semantics. - | * Therefore the expression `1.asInstanceOf[String]` will throw a `ClassCastException` at - | * runtime, while the expression `List(1).asInstanceOf[List[String]]` will not. - | * In the latter example, because the type argument is erased as part of compilation it is - | * not possible to check whether the contents of the list are of the requested type. - | * - | * @throws ClassCastException if the receiver object is not an instance of the erasure of type `T0`. - | * @return the receiver object. - | */ - """.stripMargin) - - add(Any_getClass, - """/** Returns the runtime class representation of the object. - | * - | * @return a class object corresponding to the runtime type of the receiver. - | */ - """.stripMargin) - - add(MatchableClass, - """/** The base trait of types that can be safely pattern matched against. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html]]. - | */ - """.stripMargin) - - add(AnyRefAlias, - """/** Class `AnyRef` is the root class of all ''reference types''. - | * All types except the value types descend from this class. - | */ - """.stripMargin) - - add(Object_eq, - """/** Tests whether the argument (`that`) is a reference to the receiver object (`this`). - | * - | * The `eq` method implements an [[https://en.wikipedia.org/wiki/Equivalence_relation equivalence relation]] on - | * non-null instances of `AnyRef`, and has three additional properties: - | * - | * - It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of - | * `x.eq(y)` consistently returns `true` or consistently returns `false`. - | * - For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`. - | * - `null.eq(null)` returns `true`. - | * - | * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is - | * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they - | * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`). - | * - | * @param that the object to compare against this object for reference equality. - | * @return `true` if the argument is a reference to the receiver object; `false` otherwise. - | */ - """.stripMargin) - - add(Object_ne, - """/** Equivalent to `!(this eq that)`. - | * - | * @param that the object to compare against this object for reference equality. - | * @return `true` if the argument is not a reference to the receiver object; `false` otherwise. - | */ - """.stripMargin) - - add(Object_synchronized, - """/** Executes the code in `body` with an exclusive lock on `this`. - | * - | * @param body the code to execute - | * @return the result of `body` - | */ - """.stripMargin) - - add(Object_clone, - """/** Create a copy of the receiver object. - | * - | * The default implementation of the `clone` method is platform dependent. - | * - | * @note not specified by SLS as a member of AnyRef - | * @return a copy of the receiver object. - | */ - """.stripMargin) - - add(Object_finalize, - """/** Called by the garbage collector on the receiver object when there - | * are no more references to the object. - | * - | * The details of when and if the `finalize` method is invoked, as - | * well as the interaction between `finalize` and non-local returns - | * and exceptions, are all platform dependent. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_notify, - """/** Wakes up a single thread that is waiting on the receiver object's monitor. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_notifyAll, - """/** Wakes up all threads that are waiting on the receiver object's monitor. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_wait, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--]]. - | * - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_waitL, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-]]. - | * - | * @param timeout the maximum time to wait in milliseconds. - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(Object_waitLI, - """/** See [[https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-]] - | * - | * @param timeout the maximum time to wait in milliseconds. - | * @param nanos additional time, in nanoseconds range 0-999999. - | * @note not specified by SLS as a member of AnyRef - | */ - """.stripMargin) - - add(AnyKindClass, - """/** The super-type of all types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html]]. - | */ - """.stripMargin) - - add(andType, - """/** The intersection of two types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html]]. - | */ - """.stripMargin) - - add(orType, - """/** The union of two types. - | * - | * See [[https://docs.scala-lang.org/scala3/reference/new-types/union-types.html]]. - | */ - """.stripMargin) - - add(AnyValClass, - """/** `AnyVal` is the root class of all ''value types'', which describe values - | * not implemented as objects in the underlying host system. Value classes - | * are specified in Scala Language Specification, section 12.2. - | * - | * The standard implementation includes nine `AnyVal` subtypes: - | * - | * [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], - | * [[scala.Short]], and [[scala.Byte]] are the ''numeric value types''. - | * - | * [[scala.Unit]] and [[scala.Boolean]] are the ''non-numeric value types''. - | * - | * Other groupings: - | * - | * - The ''subrange types'' are [[scala.Byte]], [[scala.Short]], and [[scala.Char]]. - | * - The ''integer types'' include the subrange types as well as [[scala.Int]] and [[scala.Long]]. - | * - The ''floating point types'' are [[scala.Float]] and [[scala.Double]]. - | * - | * Prior to Scala 2.10, `AnyVal` was a sealed trait. Beginning with Scala 2.10, - | * however, it is possible to define a subclass of `AnyVal` called a ''user-defined value class'' - | * which is treated specially by the compiler. Properly-defined user value classes provide a way - | * to improve performance on user-defined types by avoiding object allocation at runtime, and by - | * replacing virtual method invocations with static method invocations. - | * - | * User-defined value classes which avoid object allocation... - | * - | * - must have a single `val` parameter that is the underlying runtime representation. - | * - can define `def`s, but no `val`s, `var`s, or nested `traits`s, `class`es or `object`s. - | * - typically extend no other trait apart from `AnyVal`. - | * - cannot be used in type tests or pattern matching. - | * - may not override `equals` or `hashCode` methods. - | * - | * A minimal example: - | * {{{ - | * class Wrapper(val underlying: Int) extends AnyVal { - | * def foo: Wrapper = new Wrapper(underlying * 19) - | * } - | * }}} - | * - | * It's important to note that user-defined value classes are limited, and in some circumstances, - | * still must allocate a value class instance at runtime. These limitations and circumstances are - | * explained in greater detail in the [[https://docs.scala-lang.org/overviews/core/value-classes.html Value Classes and Universal Traits]]. - | */ - """.stripMargin) - - add(NullClass, - """/** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. - | * - | * `Null` is the type of the `null` literal. It is a subtype of every type - | * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes - | * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. - | * - | * Since `Null` is not a subtype of value types, `null` is not a member of any such type. - | * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. - | */ - """.stripMargin) - - add(NothingClass, - """/** `Nothing` is - together with [[scala.Null]] - at the bottom of Scala's type hierarchy. - | * - | * `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist - | * ''no instances'' of this type. Although type `Nothing` is uninhabited, it is - | * nevertheless useful in several ways. For instance, the Scala library defines a value - | * [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists are covariant in Scala, - | * this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any element of type `T`. - | * - | * Another usage for Nothing is the return type for methods which never return normally. - | * One example is method error in [[scala.sys]], which always throws an exception. - | */ - """.stripMargin) - - add(SingletonClass, - """/** `Singleton` is used by the compiler as a supertype for singleton types. This includes literal types, - | * as they are also singleton types. - | * - | * {{{ - | * scala> object A { val x = 42 } - | * defined object A - | * - | * scala> implicitly[A.type <:< Singleton] - | * res12: A.type <:< Singleton = generalized constraint - | * - | * scala> implicitly[A.x.type <:< Singleton] - | * res13: A.x.type <:< Singleton = generalized constraint - | * - | * scala> implicitly[42 <:< Singleton] - | * res14: 42 <:< Singleton = generalized constraint - | * - | * scala> implicitly[Int <:< Singleton] - | * ^ - | * error: Cannot prove that Int <:< Singleton. - | * }}} - | * - | * `Singleton` has a special meaning when it appears as an upper bound on a formal type - | * parameter. Normally, type inference in Scala widens singleton types to the underlying - | * non-singleton type. When a type parameter has an explicit upper bound of `Singleton`, - | * the compiler infers a singleton type. - | * - | * {{{ - | * scala> def check42[T](x: T)(implicit ev: T =:= 42): T = x - | * check42: [T](x: T)(implicit ev: T =:= 42)T - | * - | * scala> val x1 = check42(42) - | * ^ - | * error: Cannot prove that Int =:= 42. - | * - | * scala> def singleCheck42[T <: Singleton](x: T)(implicit ev: T =:= 42): T = x - | * singleCheck42: [T <: Singleton](x: T)(implicit ev: T =:= 42)T - | * - | * scala> val x2 = singleCheck42(42) - | * x2: Int = 42 - | * }}} - | * - | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. - | */ - """.stripMargin) -} diff --git a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala b/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala deleted file mode 100644 index 6690cae3a142..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/DenotTransformers.scala +++ /dev/null @@ -1,82 +0,0 @@ -package dotty.tools.dotc -package core - -import Periods._ -import SymDenotations._ -import Contexts._ -import Types._ -import Symbols._ -import Denotations._ -import Phases._ - -object DenotTransformers { - - /** A transformer group contains a sequence of transformers, - * ordered by the phase where they apply. Transformers are added - * to a group via `install`. - */ - - /** A transformer transforms denotations at a given phase */ - trait DenotTransformer extends Phase { - - /** The last phase during which the transformed denotations are valid */ - def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) - - /** The validity period of the transformed denotations in the given context */ - def validFor(using Context): Period = - Period(ctx.runId, id + 1, lastPhaseId) - - /** The transformation method */ - def transform(ref: SingleDenotation)(using Context): SingleDenotation - } - - /** A transformer that only transforms the info field of denotations */ - trait InfoTransformer extends DenotTransformer { - - def transformInfo(tp: Type, sym: Symbol)(using Context): Type - - def transform(ref: SingleDenotation)(using Context): SingleDenotation = { - val sym = ref.symbol - if (sym.exists && !infoMayChange(sym)) ref - else { - val info1 = transformInfo(ref.info, ref.symbol) - if (info1 eq ref.info) ref - else ref match { - case ref: SymDenotation => - ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) - case _ => - ref.derivedSingleDenotation(ref.symbol, info1) - } - } - } - - /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be - * unaffected by this transform, so `transformInfo` need not be run. This - * can save time, and more importantly, can help avoid forcing symbol completers. - */ - protected def infoMayChange(sym: Symbol)(using Context): Boolean = true - } - - /** A transformer that only transforms SymDenotations. - * Note: Infos of non-sym denotations are left as is. So the transformer should - * be used before erasure only if this is not a problem. After erasure, all - * denotations are SymDenotations, so SymTransformers can be used freely. - */ - trait SymTransformer extends DenotTransformer { - - def transformSym(sym: SymDenotation)(using Context): SymDenotation - - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { - case ref: SymDenotation => transformSym(ref) - case _ => ref - } - } - - /** A `DenotTransformer` trait that has the identity as its `transform` method. - * You might want to inherit from this trait so that new denotations can be - * installed using `installAfter` and `enteredAfter` at the end of the phase. - */ - trait IdentityDenotTransformer extends DenotTransformer { - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala b/tests/pos-with-compiler-cc/dotc/core/Denotations.scala deleted file mode 100644 index 9db285975a0a..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Denotations.scala +++ /dev/null @@ -1,1376 +0,0 @@ -package dotty.tools -package dotc -package core - -import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid } -import Contexts._ -import Names._ -import NameKinds._ -import StdNames._ -import Symbols.NoSymbol -import Symbols._ -import Types._ -import Periods._ -import Flags._ -import DenotTransformers._ -import Decorators._ -import Signature.MatchDegree._ -import printing.Texts._ -import printing.Printer -import io.AbstractFile -import config.Config -import config.Printers.overload -import util.common._ -import typer.ProtoTypes.NoViewsAllowed -import collection.mutable.ListBuffer -import language.experimental.pureFunctions - -/** Denotations represent the meaning of symbols and named types. - * The following diagram shows how the principal types of denotations - * and their denoting entities relate to each other. Lines ending in - * a down-arrow `v` are member methods. The two methods shown in the diagram are - * "symbol" and "deref". Both methods are parameterized by the current context, - * and are effectively indexed by current period. - * - * Lines ending in a horizontal line mean subtyping (right is a subtype of left). - * - * NamedType - * | Symbol---------ClassSymbol - * | | | - * | denot | denot | denot - * v v v - * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation - * | | - * +-----MultiDenotation | - * | - * +--UniqueRefDenotation - * +--JointRefDenotation - * - * Here's a short summary of the classes in this diagram. - * - * NamedType A type consisting of a prefix type and a name, with fields - * prefix: Type - * name: Name - * It has two subtypes: TermRef and TypeRef - * Symbol A label for a definition or declaration in one compiler run - * ClassSymbol A symbol representing a class - * Denotation The meaning of a named type or symbol during a period - * MultiDenotation A denotation representing several overloaded members - * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields - * symbol: Symbol - * info: Type - * UniqueRefDenotation A denotation referring to a single definition with some member type - * JointRefDenotation A denotation referring to a member that could resolve to several definitions - * SymDenotation A denotation representing a single definition with its original type, with main fields - * name: Name - * owner: Symbol - * flags: Flags - * privateWithin: Symbol - * annotations: List[Annotation] - * ClassDenotation A denotation representing a single class definition. - */ -object Denotations { - - implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived - - /** A PreDenotation represents a group of single denotations or a single multi-denotation - * It is used as an optimization to avoid forming MultiDenotations too eagerly. - */ - abstract class PreDenotation extends Pure { - - /** A denotation in the group exists */ - def exists: Boolean - - /** First/last denotation in the group */ - def first: Denotation - def last: Denotation - - /** Convert to full denotation by &-ing all elements */ - def toDenot(pre: Type)(using Context): Denotation - - /** Group contains a denotation that refers to given symbol */ - def containsSym(sym: Symbol): Boolean - - /** Group contains a denotation with the same signature as `other` */ - def matches(other: SingleDenotation)(using Context): Boolean - - /** Keep only those denotations in this group which satisfy predicate `p`. */ - def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation - - /** Keep only those denotations in this group which have a signature - * that's not already defined by `denots`. - */ - def filterDisjoint(denots: PreDenotation)(using Context): PreDenotation - - /** Keep only those inherited members M of this predenotation for which the following is true - * - M is not marked Private - * - If M has a unique symbol, it does not appear in `prevDenots`. - * - M's signature as seen from prefix `pre` does not appear in `ownDenots` - * Return the denotation as seen from `pre`. - * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in - * the base class, which shadow any inherited denotations with the same signature. - * `prevDenots` are the denotations that are defined in the class or inherited from - * a base type which comes earlier in the linearization. - */ - def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): PreDenotation - - /** Keep only those denotations in this group that have all of the flags in `required`, - * but none of the flags in `excluded`. - */ - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation - - /** Map `f` over all single denotations and aggregate the results with `g`. */ - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T - - private var cachedPrefix: Type = _ - private var cachedAsSeenFrom: AsSeenFromResult = _ - private var validAsSeenFrom: Period = Nowhere - - type AsSeenFromResult <: PreDenotation - - /** The denotation with info(s) as seen from prefix type */ - def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = - if (Config.cacheAsSeenFrom) { - if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { - cachedAsSeenFrom = computeAsSeenFrom(pre) - cachedPrefix = pre - validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period - } - cachedAsSeenFrom - } - else computeAsSeenFrom(pre) - - protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult - - /** The union of two groups. */ - def union(that: PreDenotation): PreDenotation = - if (!this.exists) that - else if (!that.exists) this - else DenotUnion(this, that) - } - - /** A denotation is the result of resolving - * a name (either simple identifier or select) during a given period. - * - * Denotations can be combined with `&` and `|`. - * & is conjunction, | is disjunction. - * - * `&` will create an overloaded denotation from two - * non-overloaded denotations if their signatures differ. - * Analogously `|` of two denotations with different signatures will give - * an empty denotation `NoDenotation`. - * - * A denotation might refer to `NoSymbol`. This is the case if the denotation - * was produced from a disjunction of two denotations with different symbols - * and there was no common symbol in a superclass that could substitute for - * both symbols. Here is an example: - * - * Say, we have: - * - * class A { def f: A } - * class B { def f: B } - * val x: A | B = if (test) new A else new B - * val y = x.f - * - * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`. - * - * @param symbol The referencing symbol, or NoSymbol is none exists - */ - abstract class Denotation(val symbol: Symbol, protected var myInfo: Type) extends PreDenotation with printing.Showable { - type AsSeenFromResult <: Denotation - - /** The type info. - * The info is an instance of TypeType iff this is a type denotation - * Uncompleted denotations set myInfo to a LazyType. - */ - final def info(using Context): Type = { - def completeInfo = { // Written this way so that `info` is small enough to be inlined - this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info - } - if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo - } - - /** The type info, or, if this is a SymDenotation where the symbol - * is not yet completed, the completer - */ - def infoOrCompleter: Type - - /** The period during which this denotation is valid. */ - def validFor: Period - - /** Is this a reference to a type symbol? */ - def isType: Boolean - - /** Is this a reference to a term symbol? */ - def isTerm: Boolean = !isType - - /** Is this denotation overloaded? */ - final def isOverloaded: Boolean = isInstanceOf[MultiDenotation] - - /** Denotation points to unique symbol; false for overloaded denotations - * and JointRef denotations. - */ - def hasUniqueSym: Boolean - - /** The name of the denotation */ - def name(using Context): Name - - /** The signature of the denotation. */ - def signature(using Context): Signature - - /** Resolve overloaded denotation to pick the ones with the given signature - * when seen from prefix `site`. - * @param relaxed When true, consider only parameter signatures for a match. - */ - def atSignature(sig: Signature, targetName: Name, site: Type = NoPrefix, relaxed: Boolean = false)(using Context): Denotation - - /** The variant of this denotation that's current in the given context. - * If no such denotation exists, returns the denotation with each alternative - * at its first point of definition. - */ - def current(using Context): Denotation - - /** Is this denotation different from NoDenotation or an ErrorDenotation? */ - def exists: Boolean = true - - /** A denotation with the info of this denotation transformed using `f` */ - def mapInfo(f: Type => Type)(using Context): Denotation - - /** If this denotation does not exist, fallback to alternative */ - inline def orElse(inline that: Denotation): Denotation = if (this.exists) this else that - - /** The set of alternative single-denotations making up this denotation */ - final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue) - - /** The alternatives of this denotation that satisfy the predicate `p`. */ - def altsWith(p: Symbol => Boolean): List[SingleDenotation] - - /** The unique alternative of this denotation that satisfies the predicate `p`, - * or NoDenotation if no satisfying alternative exists. - * @throws TypeError if there is at more than one alternative that satisfies `p`. - */ - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation - - override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation - - /** If this is a SingleDenotation, return it, otherwise throw a TypeError */ - def checkUnique(using Context): SingleDenotation = suchThat(alwaysTrue) - - /** Does this denotation have an alternative that satisfies the predicate `p`? */ - def hasAltWith(p: SingleDenotation => Boolean): Boolean - - /** The denotation made up from the alternatives of this denotation that - * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists. - */ - def accessibleFrom(pre: Type, superAccess: Boolean = false)(using Context): Denotation - - /** Find member of this denotation with given `name`, all `required` - * flags and no `excluded` flag, and produce a denotation that contains the type of the member - * as seen from given prefix `pre`. - */ - def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = - info.findMember(name, pre, required, excluded) - - /** If this denotation is overloaded, filter with given predicate. - * If result is still overloaded throw a TypeError. - * Note: disambiguate is slightly different from suchThat in that - * single-denotations that do not satisfy the predicate are left alone - * (whereas suchThat would map them to NoDenotation). - */ - inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { - case sdenot: SingleDenotation => sdenot - case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) - } - - /** Return symbol in this denotation that satisfies the given predicate. - * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. - * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches. - */ - def requiredSymbol(kind: String, - name: Name, - site: Denotation = NoDenotation, - args: List[Type] = Nil, - source: AbstractFile | Null = null, - generateStubs: Boolean = true) - (p: Symbol => Boolean) - (using Context): Symbol = - disambiguate(p) match { - case m @ MissingRef(ownerd, name) if generateStubs => - if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() - newStubSymbol(ownerd.symbol, name, source) - case NoDenotation | _: NoQualifyingRef | _: MissingRef => - def argStr = if (args.isEmpty) "" else i" matching ($args%, %)" - val msg = - if site.exists then em"$site does not have a member $kind $name$argStr" - else em"missing: $kind $name$argStr" - throw TypeError(msg) - case denot => - denot.symbol - } - - def requiredMethod(pname: PreName)(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm - } - def requiredMethodRef(name: PreName)(using Context): TermRef = - requiredMethod(name).termRef - - def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("method", name, this, argTypes) { x => - x.is(Method) && { - x.info.paramInfoss match { - case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) - case _ => false - } - } - }.asTerm - } - def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = - requiredMethod(name, argTypes).termRef - - def requiredValue(pname: PreName)(using Context): TermSymbol = { - val name = pname.toTermName - info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm - } - def requiredValueRef(name: PreName)(using Context): TermRef = - requiredValue(name).termRef - - def requiredClass(pname: PreName)(using Context): ClassSymbol = { - val name = pname.toTypeName - info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass - } - - def requiredType(pname: PreName)(using Context): TypeSymbol = { - val name = pname.toTypeName - info.member(name).requiredSymbol("type", name, this)(_.isType).asType - } - - /** The alternative of this denotation that has a type matching `targetType` when seen - * as a member of type `site` and that has a target name matching `targetName`, or - * `NoDenotation` if none exists. - */ - def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { - def qualifies(sym: Symbol) = - site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) - if (isOverloaded) - atSignature(targetType.signature, targetName, site, relaxed = true) match { - case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) - case md => md.suchThat(qualifies(_)) - } - else if (exists && !qualifies(symbol)) NoDenotation - else asSingleDenotation - } - - /** Form a denotation by conjoining with denotation `that`. - * - * NoDenotations are dropped. MultiDenotations are handled by merging - * parts with same signatures. SingleDenotations with equal signatures - * are joined by following this sequence of steps: - * - * 1. If exactly one the denotations has an inaccessible symbol, pick the other one. - * 2. Otherwise, if one of the infos overrides the other one, and the associated - * symbol does not score strictly lower than the other one, - * pick the associated denotation. - * 3. Otherwise, if the two infos can be combined with `infoMeet`, pick that as - * result info, and pick the symbol that scores higher as result symbol, - * or pick `sym1` as a tie breaker. The picked info and symbol are combined - * in a JointDenotation. - * 4. Otherwise, if one of the two symbols scores strongly higher than the - * other one, pick the associated denotation. - * 5. Otherwise return a multi-denotation consisting of both denotations. - * - * Symbol scoring is determined according to the following ranking - * where earlier criteria trump later ones. Cases marked with (*) - * give a strong score advantage, the others a weak one. - * - * 1. The symbol exists, and the other one does not. (*) - * 2. The symbol is not a bridge, but the other one is. (*) - * 3. The symbol is concrete, and the other one is deferred - * 4. The symbol appears before the other in the linearization of `pre` - * 5. The symbol's visibility is strictly greater than the other one's. - * 6. The symbol is a method, but the other one is not. - */ - def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { - /** Try to merge denot1 and denot2 without adding a new signature. */ - def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { - case denot1 @ MultiDenotation(denot11, denot12) => - val d1 = mergeDenot(denot11, denot2) - if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) - else { - val d2 = mergeDenot(denot12, denot2) - if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) - else NoDenotation - } - case denot1: SingleDenotation => - if (denot1 eq denot2) denot1 - else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) - else NoDenotation - } - - /** Try to merge single-denotations. */ - def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = - val info1 = denot1.info - val info2 = denot2.info - val sym1 = denot1.symbol - val sym2 = denot2.symbol - - /** Does `owner1` come before `owner2` in the linearization of `pre`? */ - def linearScore(owner1: Symbol, owner2: Symbol): Int = - - def searchBaseClasses(bcs: List[ClassSymbol]): Int = bcs match - case bc :: bcs1 => - if bc eq owner1 then 1 - else if bc eq owner2 then -1 - else searchBaseClasses(bcs1) - case Nil => 0 - - if owner1 eq owner2 then 0 - else if owner1.derivesFrom(owner2) then 1 - else if owner2.derivesFrom(owner1) then -1 - else searchBaseClasses(pre.baseClasses) - end linearScore - - /** Similar to SymDenotation#accessBoundary, but without the special cases. */ - def accessBoundary(sym: Symbol) = - if (sym.is(Private)) sym.owner - else sym.privateWithin.orElse( - if (sym.is(Protected)) sym.owner.enclosingPackageClass - else defn.RootClass) - - def isHidden(sym: Symbol) = sym.exists && !sym.isAccessibleFrom(pre) - // In typer phase filter out denotations with symbols that are not - // accessible. After typer, this is not possible since we cannot guarantee - // that the current owner is set correctly. See pos/14660.scala. - val hidden1 = isHidden(sym1) && ctx.isTyper - val hidden2 = isHidden(sym2) && ctx.isTyper - if hidden1 && !hidden2 then denot2 - else if hidden2 && !hidden1 then denot1 - else - // The score that determines which symbol to pick for the result denotation. - // A value > 0 means pick `sym1`, < 0 means pick `sym2`. - // A value of +/- 2 means pick one of the denotations as a tie-breaker - // if a common info does not exist. - val symScore: Int = - if !sym1.exists then -2 - else if !sym2.exists then 2 - else if sym1.is(Bridge) && !sym2.is(Bridge) then -2 - else if sym2.is(Bridge) && !sym1.is(Bridge) then 2 - else if !sym1.isAsConcrete(sym2) then -1 - else if !sym2.isAsConcrete(sym1) then 1 - else - val linScore = linearScore(sym1.owner, sym2.owner) - if linScore != 0 then linScore - else - val boundary1 = accessBoundary(sym1) - val boundary2 = accessBoundary(sym2) - if boundary1.isProperlyContainedIn(boundary2) then -1 - else if boundary2.isProperlyContainedIn(boundary1) then 1 - else if sym2.is(Method) && !sym1.is(Method) then -1 - else if sym1.is(Method) && !sym2.is(Method) then 1 - else 0 - - val relaxedOverriding = ctx.explicitNulls && (sym1.is(JavaDefined) || sym2.is(JavaDefined)) - val matchLoosely = sym1.matchNullaryLoosely || sym2.matchNullaryLoosely - - if symScore <= 0 && info2.overrides(info1, relaxedOverriding, matchLoosely, checkClassInfo = false) then - denot2 - else if symScore >= 0 && info1.overrides(info2, relaxedOverriding, matchLoosely, checkClassInfo = false) then - denot1 - else - val jointInfo = infoMeet(info1, info2, safeIntersection) - if jointInfo.exists then - val sym = if symScore >= 0 then sym1 else sym2 - JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor, pre, denot1.isRefinedMethod || denot2.isRefinedMethod) - else if symScore == 2 then denot1 - else if symScore == -2 then denot2 - else - overload.println(i"overloaded with same signature: ${sym1.showLocated}: $info1 / ${sym2.showLocated}: $info2, info = ${info1.getClass}, ${info2.getClass}, $jointInfo") - MultiDenotation(denot1, denot2) - end mergeSingleDenot - - if (this eq that) this - else if (!this.exists) that - else if (!that.exists) this - else that match { - case that: SingleDenotation => - val r = mergeDenot(this, that) - if (r.exists) r else MultiDenotation(this, that) - case that @ MultiDenotation(denot1, denot2) => - this.meet(denot1, pre).meet(denot2, pre) - } - } - - final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] - final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] - - def toText(printer: Printer): Text = printer.toText(this) - - // ------ PreDenotation ops ---------------------------------------------- - - final def toDenot(pre: Type)(using Context): Denotation = this - final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) - } - - // ------ Info meets ---------------------------------------------------- - - /** Merge parameter names of lambda types. If names in corresponding positions match, keep them, - * otherwise generate new synthetic names. - */ - private def mergeParamNames(tp1: LambdaType, tp2: LambdaType): List[tp1.ThisName] = - (for ((name1, name2, idx) <- tp1.paramNames.lazyZip(tp2.paramNames).lazyZip(tp1.paramNames.indices)) - yield if (name1 == name2) name1 else tp1.companion.syntheticParamName(idx)).toList - - /** Normally, `tp1 & tp2`, with extra care taken to return `tp1` or `tp2` directly if that's - * a valid answer. Special cases for matching methods and classes, with - * the possibility of returning NoType. Special handling of ExprTypes, where mixed - * intersections widen the ExprType away. - */ - def infoMeet(tp1: Type, tp2: Type, safeIntersection: Boolean)(using Context): Type = - if tp1 eq tp2 then tp1 - else tp1 match - case tp1: TypeBounds => - tp2 match - case tp2: TypeBounds => if safeIntersection then tp1 safe_& tp2 else tp1 & tp2 - case tp2: ClassInfo => tp2 - case _ => NoType - case tp1: ClassInfo => - tp2 match - case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix) - case tp2: TypeBounds => tp1 - case _ => NoType - case tp1: MethodType => - tp2 match - case tp2: MethodType - if TypeComparer.matchingMethodParams(tp1, tp2) - && tp1.isImplicitMethod == tp2.isImplicitMethod - && tp1.isErasedMethod == tp2.isErasedMethod => - val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) - if resType.exists then - tp1.derivedLambdaType(mergeParamNames(tp1, tp2), tp1.paramInfos, resType) - else NoType - case _ => NoType - case tp1: PolyType => - tp2 match - case tp2: PolyType if tp1.paramNames.hasSameLengthAs(tp2.paramNames) => - val resType = infoMeet(tp1.resType, tp2.resType.subst(tp2, tp1), safeIntersection) - if resType.exists then - tp1.derivedLambdaType( - mergeParamNames(tp1, tp2), - tp1.paramInfos.zipWithConserve(tp2.paramInfos)( _ & _ ), - resType) - else NoType - case _ => NoType - case ExprType(rtp1) => - tp2 match - case ExprType(rtp2) => ExprType(rtp1 & rtp2) - case _ => infoMeet(rtp1, tp2, safeIntersection) - case _ => - tp2 match - case _: MethodType | _: PolyType => NoType - case _ => tp1 & tp2.widenExpr - end infoMeet - - /** A non-overloaded denotation */ - abstract class SingleDenotation(symbol: Symbol, initInfo: Type) extends Denotation(symbol, initInfo) { - protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation - - final def name(using Context): Name = symbol.name - - /** For SymDenotation, this is NoPrefix. For other denotations this is the prefix - * under which the denotation was constructed. - * - * Note that `asSeenFrom` might return a `SymDenotation` and therefore in - * general one cannot rely on `prefix` being set, see - * `Config.reuseSymDenotations` for details. - */ - def prefix: Type = NoPrefix - - /** True if the info of this denotation comes from a refinement. */ - def isRefinedMethod: Boolean = false - - /** For SymDenotations, the language-specific signature of the info, depending on - * where the symbol is defined. For non-SymDenotations, the Scala 3 - * signature. - * - * Invariants: - * - Before erasure, the signature of a denotation is always equal to the - * signature of its corresponding initial denotation. - * - Two distinct overloads will have SymDenotations with distinct - * signatures (the SELECTin tag in Tasty relies on this to refer to an - * overload unambiguously). Note that this only applies to - * SymDenotations, in general we cannot assume that distinct - * SingleDenotations will have distinct signatures (cf #9050). - */ - final def signature(using Context): Signature = - signature(sourceLanguage = if isType || !this.isInstanceOf[SymDenotation] then SourceLanguage.Scala3 else SourceLanguage(symbol)) - - /** Overload of `signature` which lets the caller pick the language used - * to compute the signature of the info. Useful to match denotations defined in - * different classes (see `matchesLoosely`). - */ - def signature(sourceLanguage: SourceLanguage)(using Context): Signature = - if (isType) Signature.NotAMethod // don't force info if this is a type denotation - else info match { - case info: MethodOrPoly => - try info.signature(sourceLanguage) - catch { // !!! DEBUG - case scala.util.control.NonFatal(ex) => - report.echo(s"cannot take signature of $info") - throw ex - } - case _ => Signature.NotAMethod - } - - def derivedSingleDenotation(symbol: Symbol, info: Type, pre: Type = this.prefix, isRefinedMethod: Boolean = this.isRefinedMethod)(using Context): SingleDenotation = - if ((symbol eq this.symbol) && (info eq this.info) && (pre eq this.prefix) && (isRefinedMethod == this.isRefinedMethod)) this - else newLikeThis(symbol, info, pre, isRefinedMethod) - - def mapInfo(f: Type => Type)(using Context): SingleDenotation = - derivedSingleDenotation(symbol, f(info)) - - inline def orElse(inline that: SingleDenotation): SingleDenotation = if (this.exists) this else that - - def altsWith(p: Symbol => Boolean): List[SingleDenotation] = - if (exists && p(symbol)) this :: Nil else Nil - - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = - if (exists && p(symbol)) this else NoDenotation - - def hasAltWith(p: SingleDenotation => Boolean): Boolean = - exists && p(this) - - def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = - if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation - - def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): SingleDenotation = - val situated = if site == NoPrefix then this else asSeenFrom(site) - val sigMatches = sig.matchDegree(situated.signature) match - case FullMatch => - true - case MethodNotAMethodMatch => - // See comment in `matches` - relaxed && !symbol.is(JavaDefined) - case ParamMatch => - relaxed - case noMatch => - false - if sigMatches && symbol.hasTargetName(targetName) then this else NoDenotation - - def matchesImportBound(bound: Type)(using Context): Boolean = - if bound.isRef(defn.NothingClass) then false - else if bound.isAny then true - else NoViewsAllowed.normalizedCompatible(info, bound, keepConstraint = false) - - // ------ Transformations ----------------------------------------- - - private var myValidFor: Period = Nowhere - - def validFor: Period = myValidFor - def validFor_=(p: Period): Unit = { - myValidFor = p - symbol.invalidateDenotCache() - } - - /** The next SingleDenotation in this run, with wrap-around from last to first. - * - * There may be several `SingleDenotation`s with different validity - * representing the same underlying definition at different phases. - * These are called a "flock". Flock members are generated by - * @See current. Flock members are connected in a ring - * with their `nextInRun` fields. - * - * There are the following invariants concerning flock members - * - * 1) validity periods are non-overlapping - * 2) the union of all validity periods is a contiguous - * interval. - */ - protected var nextInRun: SingleDenotation = this - - /** The version of this SingleDenotation that was valid in the first phase - * of this run. - */ - def initial: SingleDenotation = - if (validFor.firstPhaseId <= 1) this - else { - var current = nextInRun - while (current.validFor.code > this.myValidFor.code) current = current.nextInRun - current - } - - def history: List[SingleDenotation] = { - val b = new ListBuffer[SingleDenotation] - var current = initial - while ({ - b += (current) - current = current.nextInRun - current ne initial - }) - () - b.toList - } - - /** Invalidate all caches and fields that depend on base classes and their contents */ - def invalidateInheritedInfo(): Unit = () - - private def updateValidity()(using Context): this.type = { - assert( - ctx.runId >= validFor.runId - || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time - || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages - || symbol.isOneOf(ValidForeverFlags), - s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") - var d: SingleDenotation = this - while ({ - d.validFor = Period(ctx.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId) - d.invalidateInheritedInfo() - d = d.nextInRun - d ne this - }) - () - this - } - - /** Move validity period of this denotation to a new run. Throw a StaleSymbol error - * if denotation is no longer valid. - * However, StaleSymbol error is not thrown in the following situations: - * - * - If acceptStale returns true (e.g. because we are in the IDE), - * update the symbol to the new version if it exists, or return - * the old version otherwise. - * - If the symbol did not have a denotation that was defined at the current phase - * return a NoDenotation instead. - */ - private def bringForward()(using Context): SingleDenotation = { - this match { - case symd: SymDenotation => - if (stillValid(symd)) return updateValidity() - if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then - // New run might have fewer phases than old, so symbol might no longer be - // visible at all. TabCompleteTests have examples where this happens. - return symd.currentSymbol.denot.orElse(symd).updateValidity() - case _ => - } - if (!symbol.exists) return updateValidity() - if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation - if (ctx.debug) traceInvalid(this) - staleSymbolError - } - - /** The next defined denotation (following `nextInRun`) or an arbitrary - * undefined denotation, if all denotations in a `nextinRun` cycle are - * undefined. - */ - private def nextDefined: SingleDenotation = { - var p1 = this - var p2 = nextInRun - while (p1.validFor == Nowhere && (p1 ne p2)) { - p1 = p1.nextInRun - p2 = p2.nextInRun.nextInRun - } - p1 - } - - /** Skip any denotations that have been removed by an installAfter or that - * are otherwise undefined. - */ - def skipRemoved(using Context): SingleDenotation = - if (myValidFor.code <= 0) nextDefined else this - - /** Produce a denotation that is valid for the given context. - * Usually called when !(validFor contains ctx.period) - * (even though this is not a precondition). - * If the runId of the context is the same as runId of this denotation, - * the right flock member is located, or, if it does not exist yet, - * created by invoking a transformer (@See Transformers). - * If the runId's differ, but this denotation is a SymDenotation - * and its toplevel owner class or module - * is still a member of its enclosing package, then the whole flock - * is brought forward to be valid in the new runId. Otherwise - * the symbol is stale, which constitutes an internal error. - */ - def current(using Context): SingleDenotation = - util.Stats.record("current") - val currentPeriod = ctx.period - val valid = myValidFor - - def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match - case d: ClassDenotation => - assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") - case _ => - - def escapeToNext = nextDefined.ensuring(_.validFor != Nowhere) - - def toNewRun = - util.Stats.record("current.bringForward") - if exists then initial.bringForward().current else this - - def goForward = - var cur = this - // search for containing period as long as nextInRun increases. - var next = nextInRun - while next.validFor.code > valid.code && !(next.validFor contains currentPeriod) do - cur = next - next = next.nextInRun - if next.validFor.code > valid.code then - // in this case, next.validFor contains currentPeriod - cur = next - cur - else - //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod") - // not found, cur points to highest existing variant - val nextTransformerId = ctx.base.nextDenotTransformerId(cur.validFor.lastPhaseId) - if currentPeriod.lastPhaseId <= nextTransformerId then - cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId) - else - var startPid = nextTransformerId + 1 - val transformer = ctx.base.denotTransformers(nextTransformerId) - //println(s"transforming $this with $transformer") - val savedPeriod = ctx.period - val mutCtx = ctx.asInstanceOf[FreshContext] - try - mutCtx.setPhase(transformer) - next = transformer.transform(cur) - // We temporarily update the context with the new phase instead of creating a - // new one. This is done for performance. We cut down on about 30% of context - // creations that way, and also avoid phase caches in contexts to get large. - // To work correctly, we need to demand that the context with the new phase - // is not retained in the result. - catch case ex: CyclicReference => - // println(s"error while transforming $this") - throw ex - finally - mutCtx.setPeriod(savedPeriod) - if next eq cur then - startPid = cur.validFor.firstPhaseId - else - assertNotPackage(next, transformer) - next.insertAfter(cur) - cur = next - cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId) - //printPeriods(cur) - //println(s"new denot: $cur, valid for ${cur.validFor}") - cur.current // multiple transformations could be required - end goForward - - def goBack: SingleDenotation = - // currentPeriod < end of valid; in this case a version must exist - // but to be defensive we check for infinite loop anyway - var cur = this - var cnt = 0 - while !(cur.validFor contains currentPeriod) do - //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}") - cur = cur.nextInRun - // Note: One might be tempted to add a `prev` field to get to the new denotation - // more directly here. I tried that, but it degrades rather than improves - // performance: Test setup: Compile everything in dotc and immediate subdirectories - // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without. - cnt += 1 - if cnt > MaxPossiblePhaseId then - return atPhase(coveredInterval.firstPhaseId)(current) - cur - end goBack - - if valid.code <= 0 then - // can happen if we sit on a stale denotation which has been replaced - // wholesale by an installAfter; in this case, proceed to the next - // denotation and try again. - escapeToNext - else if valid.runId != currentPeriod.runId then - toNewRun - else if currentPeriod.code > valid.code then - goForward - else - goBack - end current - - private def demandOutsideDefinedMsg(using Context): String = - s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}" - - /** Install this denotation to be the result of the given denotation transformer. - * This is the implementation of the same-named method in SymDenotations. - * It's placed here because it needs access to private fields of SingleDenotation. - * @pre Can only be called in `phase.next`. - */ - protected def installAfter(phase: DenotTransformer)(using Context): Unit = { - val targetId = phase.next.id - if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) - else { - val current = symbol.current - // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") - // printPeriods(current) - this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) - if (current.validFor.firstPhaseId >= targetId) - current.replaceWith(this) - else { - current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) - insertAfter(current) - } - } - // printPeriods(this) - } - - /** Apply a transformation `f` to all denotations in this group that start at or after - * given phase. Denotations are replaced while keeping the same validity periods. - */ - protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { - var current = symbol.current - while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) - current = current.nextInRun - var hasNext = true - while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { - val current1: SingleDenotation = f(current.asSymDenotation) - if (current1 ne current) { - current1.validFor = current.validFor - current.replaceWith(current1) - } - hasNext = current1.nextInRun.validFor.code > current1.validFor.code - current = current1.nextInRun - } - } - - /** Insert this denotation so that it follows `prev`. */ - private def insertAfter(prev: SingleDenotation) = { - this.nextInRun = prev.nextInRun - prev.nextInRun = this - } - - /** Insert this denotation instead of `old`. - * Also ensure that `old` refers with `nextInRun` to this denotation - * and set its `validFor` field to `Nowhere`. This is necessary so that - * references to the old denotation can be brought forward via `current` - * to a valid denotation. - * - * The code to achieve this is subtle in that it works correctly - * whether the replaced denotation is the only one in its cycle or not. - */ - private[dotc] def replaceWith(newd: SingleDenotation): Unit = { - var prev = this - while (prev.nextInRun ne this) prev = prev.nextInRun - // order of next two assignments is important! - prev.nextInRun = newd - newd.nextInRun = nextInRun - validFor = Nowhere - nextInRun = newd - } - - def staleSymbolError(using Context): Nothing = - inDetachedContext: - throw new StaleSymbol(staleSymbolMsg) - - def staleSymbolMsg(using Context): String = { - def ownerMsg = this match { - case denot: SymDenotation => s"in ${denot.owner}" - case _ => "" - } - s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}" - } - - /** The period (interval of phases) for which there exists - * a valid denotation in this flock. - */ - def coveredInterval(using Context): Period = { - var cur = this - var cnt = 0 - var interval = validFor - while ({ - cur = cur.nextInRun - cnt += 1 - assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg) - interval |= cur.validFor - cur ne this - }) - () - interval - } - - /** Show declaration string; useful for showing declarations - * as seen from subclasses. - */ - def showDcl(using Context): String = ctx.printer.dclText(this).show - - override def toString: String = - if (symbol == NoSymbol) symbol.toString - else s"" - - def definedPeriodsString: String = { - var sb = new StringBuilder() - var cur = this - var cnt = 0 - while ({ - sb.append(" " + cur.validFor) - cur = cur.nextInRun - cnt += 1 - if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this } - cur ne this - }) - () - sb.toString - } - - // ------ PreDenotation ops ---------------------------------------------- - - final def first: SingleDenotation = this - final def last: SingleDenotation = this - - def matches(other: SingleDenotation)(using Context): Boolean = - symbol.hasTargetName(other.symbol.targetName) - && matchesLoosely(other) - - /** `matches` without a target name check. - * - * For definitions coming from different languages, we pick a common - * language to compute their signatures. This allows us for example to - * override some Java definitions from Scala even if they have a different - * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary - * bridge to make this work at runtime. - */ - def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = - if isType then true - else - val thisLanguage = SourceLanguage(symbol) - val otherLanguage = SourceLanguage(other.symbol) - val commonLanguage = SourceLanguage.commonLanguage(thisLanguage, otherLanguage) - val sig = signature(commonLanguage) - val otherSig = other.signature(commonLanguage) - sig.matchDegree(otherSig) match - case FullMatch => - !alwaysCompareTypes || info.matches(other.info) - case MethodNotAMethodMatch => - !ctx.erasedTypes && { - // A Scala zero-parameter method and a Scala non-method always match. - if !thisLanguage.isJava && !otherLanguage.isJava then - true - // Java allows defining both a field and a zero-parameter method with the same name, - // so they must not match. - else if thisLanguage.isJava && otherLanguage.isJava then - false - // A Java field never matches a Scala method. - else if thisLanguage.isJava then - symbol.is(Method) - else // otherLanguage.isJava - other.symbol.is(Method) - } - case ParamMatch => - // The signatures do not tell us enough to be sure about matching - !ctx.erasedTypes && info.matches(other.info) - case noMatch => - false - - def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(using Context): SingleDenotation = - if hasUniqueSym && prevDenots.containsSym(symbol) then NoDenotation - else if isType then filterDisjoint(ownDenots).asSeenFrom(pre) - else asSeenFrom(pre).filterDisjoint(ownDenots) - - def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation = - if (p(this)) this else NoDenotation - def filterDisjoint(denots: PreDenotation)(using Context): SingleDenotation = - if (denots.exists && denots.matches(this)) NoDenotation else this - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): SingleDenotation = - val realExcluded = if ctx.isAfterTyper then excluded else excluded | Invisible - def symd: SymDenotation = this match - case symd: SymDenotation => symd - case _ => symbol.denot - if !required.isEmpty && !symd.isAllOf(required) - || symd.isOneOf(realExcluded) then NoDenotation - else this - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = f(this) - - type AsSeenFromResult = SingleDenotation - - protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { - val symbol = this.symbol - val owner = this match { - case thisd: SymDenotation => thisd.owner - case _ => if (symbol.exists) symbol.owner else NoSymbol - } - - /** The derived denotation with the given `info` transformed with `asSeenFrom`. - * - * As a performance hack, we might reuse an existing SymDenotation, - * instead of creating a new denotation with a given `prefix`, - * see `Config.reuseSymDenotations`. - */ - def derived(info: Type) = - /** Do we need to return a denotation with a prefix set? */ - def needsPrefix = - // For opaque types, the prefix is used in `ElimOpaques#transform`, - // without this i7159.scala would fail when compiled from tasty. - symbol.is(Opaque) - - val derivedInfo = info.asSeenFrom(pre, owner) - if Config.reuseSymDenotations && this.isInstanceOf[SymDenotation] - && (derivedInfo eq info) && !needsPrefix then - this - else - derivedSingleDenotation(symbol, derivedInfo, pre) - end derived - - // Tt could happen that we see the symbol with prefix `this` as a member a different class - // through a self type and that it then has a different info. In this case we have to go - // through the asSeenFrom to switch the type back. Test case is pos/i9352.scala. - def hasOriginalInfo: Boolean = this match - case sd: SymDenotation => true - case _ => info eq symbol.info - - def ownerIsPrefix = pre match - case pre: ThisType => pre.sameThis(owner.thisType) - case _ => false - - if !owner.membersNeedAsSeenFrom(pre) && (!ownerIsPrefix || hasOriginalInfo) - || symbol.is(NonMember) - then this - else if symbol.isAllOf(ClassTypeParam) then - val arg = symbol.typeRef.argForParam(pre, widenAbstract = true) - if arg.exists - then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) - else derived(symbol.info) - else derived(symbol.info) - } - - /** The argument bounds, possibly intersected with the parameter's info TypeBounds, - * if the latter is not F-bounded and does not refer to other type parameters - * of the same class, and the intersection is provably nonempty. - */ - private def normalizedArgBounds(argBounds: TypeBounds)(using Context): TypeBounds = - if symbol.isCompleted && !hasBoundsDependingOnParamsOf(symbol.owner) then - val combined @ TypeBounds(lo, hi) = symbol.info.bounds & argBounds - if (lo frozen_<:< hi) then combined - else argBounds - else argBounds - - private def hasBoundsDependingOnParamsOf(cls: Symbol)(using Context): Boolean = - val acc = new TypeAccumulator[Boolean]: - def apply(x: Boolean, tp: Type): Boolean = tp match - case _: LazyRef => true - case tp: TypeRef - if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true - case _ => foldOver(x, tp) - acc(false, symbol.info) - } - - abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) extends SingleDenotation(symbol, initInfo) { - def infoOrCompleter: Type = initInfo - def isType: Boolean = infoOrCompleter.isInstanceOf[TypeType] - } - - class UniqueRefDenotation( - symbol: Symbol, - initInfo: Type, - initValidFor: Period, - prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { - validFor = initValidFor - override def hasUniqueSym: Boolean = true - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) - else - new UniqueRefDenotation(s, i, validFor, pre) - } - - class JointRefDenotation( - symbol: Symbol, - initInfo: Type, - initValidFor: Period, - prefix: Type, - override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { - validFor = initValidFor - override def hasUniqueSym: Boolean = false - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) - } - - class ErrorDenotation(using DetachedContext) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { - override def exists: Boolean = false - override def hasUniqueSym: Boolean = false - validFor = Period.allInRun(ctx.runId) - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - this - } - - /** An error denotation that provides more info about the missing reference. - * Produced by staticRef, consumed by requiredSymbol. - */ - case class MissingRef(val owner: SingleDenotation, name: Name)(using DetachedContext) extends ErrorDenotation { - val ex: Exception = new Exception // DEBUG - } - - /** An error denotation that provides more info about alternatives - * that were found but that do not qualify. - * Produced by staticRef, consumed by requiredSymbol. - */ - case class NoQualifyingRef(alts: List[SingleDenotation])(using DetachedContext) extends ErrorDenotation - - /** A double definition - */ - def isDoubleDef(sym1: Symbol, sym2: Symbol)(using Context): Boolean = - (sym1.exists && sym2.exists && - (sym1 `ne` sym2) && (sym1.effectiveOwner `eq` sym2.effectiveOwner) && - !sym1.is(Bridge) && !sym2.is(Bridge)) - - // --- Overloaded denotations and predenotations ------------------------------------------------- - - trait MultiPreDenotation extends PreDenotation { - def denot1: PreDenotation - def denot2: PreDenotation - - assert(denot1.exists && denot2.exists, s"Union of non-existing denotations ($denot1) and ($denot2)") - def first: Denotation = denot1.first - def last: Denotation = denot2.last - def matches(other: SingleDenotation)(using Context): Boolean = - denot1.matches(other) || denot2.matches(other) - def mapInherited(owndenot: PreDenotation, prevdenot: PreDenotation, pre: Type)(using Context): PreDenotation = - derivedUnion(denot1.mapInherited(owndenot, prevdenot, pre), denot2.mapInherited(owndenot, prevdenot, pre)) - def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation = - derivedUnion(denot1 filterWithPredicate p, denot2 filterWithPredicate p) - def filterDisjoint(denot: PreDenotation)(using Context): PreDenotation = - derivedUnion(denot1 filterDisjoint denot, denot2 filterDisjoint denot) - def filterWithFlags(required: FlagSet, excluded: FlagSet)(using Context): PreDenotation = - derivedUnion(denot1.filterWithFlags(required, excluded), denot2.filterWithFlags(required, excluded)) - def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T = - g(denot1.aggregate(f, g), denot2.aggregate(f, g)) - protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = - if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this - else denot1 union denot2 - } - - final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { - def exists: Boolean = true - def toDenot(pre: Type)(using Context): Denotation = - denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) - def containsSym(sym: Symbol): Boolean = - (denot1 containsSym sym) || (denot2 containsSym sym) - type AsSeenFromResult = PreDenotation - def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = - derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) - } - - /** An overloaded denotation consisting of the alternatives of both given denotations. - */ - case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType) with MultiPreDenotation { - final def infoOrCompleter: Type = multiHasNot("info") - final def validFor: Period = denot1.validFor & denot2.validFor - final def isType: Boolean = false - final def hasUniqueSym: Boolean = false - final def name(using Context): Name = denot1.name - final def signature(using Context): Signature = Signature.OverloadedSignature - def atSignature(sig: Signature, targetName: Name, site: Type, relaxed: Boolean)(using Context): Denotation = - if (sig eq Signature.OverloadedSignature) this - else derivedUnionDenotation( - denot1.atSignature(sig, targetName, site, relaxed), - denot2.atSignature(sig, targetName, site, relaxed)) - def current(using Context): Denotation = - derivedUnionDenotation(denot1.current, denot2.current) - def altsWith(p: Symbol => Boolean): List[SingleDenotation] = - denot1.altsWith(p) ++ denot2.altsWith(p) - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { - val sd1 = denot1.suchThat(p) - val sd2 = denot2.suchThat(p) - if sd1.exists then - if sd2.exists then - throw TypeError( - em"""Failure to disambiguate overloaded reference with - | ${denot1.symbol.showLocated}: ${denot1.info} and - | ${denot2.symbol.showLocated}: ${denot2.info}""") - else sd1 - else sd2 - } - override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = - derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) - def hasAltWith(p: SingleDenotation => Boolean): Boolean = - denot1.hasAltWith(p) || denot2.hasAltWith(p) - def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { - val d1 = denot1 accessibleFrom (pre, superAccess) - val d2 = denot2 accessibleFrom (pre, superAccess) - if (!d1.exists) d2 - else if (!d2.exists) d1 - else derivedUnionDenotation(d1, d2) - } - def mapInfo(f: Type => Type)(using Context): Denotation = - derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) - def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = - if ((d1 eq denot1) && (d2 eq denot2)) this - else if (!d1.exists) d2 - else if (!d2.exists) d1 - else MultiDenotation(d1, d2) - type AsSeenFromResult = Denotation - def computeAsSeenFrom(pre: Type)(using Context): Denotation = - derivedUnionDenotation(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) - override def toString: String = alternatives.mkString(" ") - - private def multiHasNot(op: String): Nothing = - throw new UnsupportedOperationException( - s"multi-denotation with alternatives $alternatives does not implement operation $op") - } - - /** The current denotation of the static reference given by path, - * or a MissingRef or NoQualifyingRef instance, if it does not exist. - * if generateStubs is set, generates stubs for missing top-level symbols - */ - def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { - def select(prefix: Denotation, selector: Name): Denotation = { - val owner = prefix.disambiguate(_.info.isParameterless) - def isPackageFromCoreLibMissing: Boolean = - // if the scala package is missing, the stdlib must be missing - owner.symbol == defn.RootClass && selector == nme.scala - if (owner.exists) { - val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) - if (result.exists) result - else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) - else { - val alt = - if (generateStubs) missingHook(owner.symbol.moduleClass, selector) - else NoSymbol - if (alt.exists) alt.denot - else MissingRef(owner, selector) - } - } - else owner - } - def recur( - path: Name, - wrap: TermName -> Name = identity[Name] // !cc! default argument needs to be instantiated, error if [Name] is dropped - ): Denotation = path match { - case path: TypeName => - recur(path.toTermName, n => n.toTypeName) - case ModuleClassName(underlying) => - recur(underlying, n => wrap(ModuleClassName(n))) - case QualifiedName(prefix, selector) => - select(recur(prefix), wrap(selector)) - case qn @ AnyQualifiedName(prefix, _) => - recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) - case path: SimpleName => - def recurSimple(len: Int, wrap: TermName -> Name): Denotation = { - val point = path.lastIndexOf('.', len - 1) - val selector = wrap(path.slice(point + 1, len).asTermName) - val prefix = - if (point > 0) recurSimple(point, identity) - else if (selector.isTermName) defn.RootClass.denot - else defn.EmptyPackageClass.denot - select(prefix, selector) - } - recurSimple(path.length, wrap) - } - - val run = ctx.run - if run == null then recur(path) - else run.staticRefs.getOrElseUpdate(path, recur(path)) - } - - /** If we are looking for a non-existing term name in a package, - * assume it is a package for which we do not have a directory and - * enter it. - */ - def missingHook(owner: Symbol, name: Name)(using Context): Symbol = - if (owner.is(Package) && name.isTermName) - newCompletePackageSymbol(owner, name.asTermName).entered - else - NoSymbol - - /** An exception for accessing symbols that are no longer valid in current run */ - class StaleSymbol(msg: -> String) extends Exception { - util.Stats.record("stale symbol") - override def getMessage(): String = msg - } -} diff --git a/tests/pos-with-compiler-cc/dotc/core/Flags.scala b/tests/pos-with-compiler-cc/dotc/core/Flags.scala deleted file mode 100644 index f23dce020f10..000000000000 --- a/tests/pos-with-compiler-cc/dotc/core/Flags.scala +++ /dev/null @@ -1,612 +0,0 @@ -package dotty.tools.dotc -package core - -object Flags { - - object opaques { - - /** A FlagSet represents a set of flags. Flags are encoded as follows: - * The first two bits indicate whether a flag set applies to terms, - * to types, or to both. Bits 2..63 are available for properties - * and can be doubly used for terms and types. - */ - opaque type FlagSet = Long - def FlagSet(bits: Long): FlagSet = bits - def toBits(fs: FlagSet): Long = fs - - /** A flag set consisting of a single flag */ - opaque type Flag <: FlagSet = Long - private[Flags] def Flag(bits: Long): Flag = bits - } - export opaques.FlagSet - - type Flag = opaques.Flag - - extension (x: FlagSet) { - - inline def bits: Long = opaques.toBits(x) - - /** The union of the given flag sets. - * Combining two FlagSets with `|` will give a FlagSet - * that has the intersection of the applicability to terms/types - * of the two flag sets. It is checked that the intersection is not empty. - */ - def | (y: FlagSet): FlagSet = - if (x.bits == 0) y - else if (y.bits == 0) x - else { - val tbits = x.bits & y.bits & KINDFLAGS - if (tbits == 0) - assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") - FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) - } - - /** The intersection of the given flag sets */ - def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) - - /** The intersection of a flag set with the complement of another flag set */ - def &~ (y: FlagSet): FlagSet = { - val tbits = x.bits & KINDFLAGS - if ((tbits & y.bits) == 0) x - else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) - } - - def ^ (y: FlagSet) = - FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) - - /** Does the given flag set contain the given flag? - * This means that both the kind flags and the carrier bits have non-empty intersection. - */ - def is (flag: Flag): Boolean = { - val fs = x.bits & flag.bits - (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } - - /** Does the given flag set contain the given flag - * and at the same time contain none of the flags in the `butNot` set? - */ - def is (flag: Flag, butNot: FlagSet): Boolean = x.is(flag) && !x.isOneOf(butNot) - - /** Does the given flag set have a non-empty intersection with another flag set? - * This means that both the kind flags and the carrier bits have non-empty intersection. - */ - def isOneOf (flags: FlagSet): Boolean = { - val fs = x.bits & flags.bits - (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } - - /** Does the given flag set have a non-empty intersection with another flag set, - * and at the same time contain none of the flags in the `butNot` set? - */ - def isOneOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isOneOf(flags) && !x.isOneOf(butNot) - - /** Does a given flag set have all of the flags of another flag set? - * Pre: The intersection of the term/type flags of both sets must be non-empty. - */ - def isAllOf (flags: FlagSet): Boolean = { - val fs = x.bits & flags.bits - ((fs & KINDFLAGS) != 0 || flags.bits == 0) && - (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) - } - - /** Does a given flag set have all of the flags in another flag set - * and at the same time contain none of the flags in the `butNot` set? - * Pre: The intersection of the term/type flags of both sets must be non-empty. - */ - def isAllOf (flags: FlagSet, butNot: FlagSet): Boolean = x.isAllOf(flags) && !x.isOneOf(butNot) - - def isEmpty: Boolean = (x.bits & ~KINDFLAGS) == 0 - - /** Is a given flag set a subset of another flag set? */ - def <= (y: FlagSet): Boolean = (x.bits & y.bits) == x.bits - - /** Does the given flag set apply to terms? */ - def isTermFlags: Boolean = (x.bits & TERMS) != 0 - - /** Does the given flag set apply to terms? */ - def isTypeFlags: Boolean = (x.bits & TYPES) != 0 - - /** The given flag set with all flags transposed to be type flags */ - def toTypeFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TYPES) - - /** The given flag set with all flags transposed to be term flags */ - def toTermFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits & ~KINDFLAGS | TERMS) - - /** The given flag set with all flags transposed to be common flags */ - def toCommonFlags: FlagSet = if (x.bits == 0) x else FlagSet(x.bits | KINDFLAGS) - - /** The number of non-kind flags in the given flag set */ - def numFlags: Int = java.lang.Long.bitCount(x.bits & ~KINDFLAGS) - - /** The lowest non-kind bit set in the given flag set */ - def firstBit: Int = java.lang.Long.numberOfTrailingZeros(x.bits & ~KINDFLAGS) - - /** The list of non-empty names of flags with given index idx that are set in the given flag set */ - private def flagString(idx: Int): List[String] = - if ((x.bits & (1L << idx)) == 0) Nil - else { - def halfString(kind: Int) = - if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" - val termFS = halfString(TERMindex) - val typeFS = halfString(TYPEindex) - val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) - strs filter (_.nonEmpty) - } - - /** The list of non-empty names of flags that are set in the given flag set */ - def flagStrings(privateWithin: String = ""): Seq[String] = { - var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) - if (!privateWithin.isEmpty && !x.is(Protected)) - rawStrings = rawStrings :+ "private" - val scopeStr = if (x.is(Local)) "this" else privateWithin - if (scopeStr != "") - rawStrings.filter(_ != "").map { - case "private" => s"private[$scopeStr]" - case "protected" => s"protected[$scopeStr]" - case str => str - } - else rawStrings - } - - /** The string representation of the given flag set */ - def flagsString: String = x.flagStrings("").mkString(" ") - } - - // Temporary while extension names are in flux - def or(x1: FlagSet, x2: FlagSet) = x1 | x2 - def and(x1: FlagSet, x2: FlagSet) = x1 & x2 - - def termFlagSet(x: Long) = FlagSet(TERMS | x) - - private inline val TYPESHIFT = 2 - private inline val TERMindex = 0 - private inline val TYPEindex = 1 - private inline val TERMS = 1 << TERMindex - private inline val TYPES = 1 << TYPEindex - private inline val KINDFLAGS = TERMS | TYPES - - private inline val FirstFlag = 2 - private inline val FirstNotPickledFlag = 48 - private inline val MaxFlag = 63 - - private val flagName = Array.fill(64, 2)("") - - private def isDefinedAsFlag(idx: Int) = flagName(idx).exists(_.nonEmpty) - - /** The flag set containing all defined flags of either kind whose bits - * lie in the given range - */ - private def flagRange(start: Int, end: Int) = - FlagSet((start until end).foldLeft(KINDFLAGS.toLong) ((bits, idx) => - if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) - - /** The union of all flags in given flag set */ - def union(flagss: FlagSet*): FlagSet = { - var flag = EmptyFlags - for (f <- flagss) - flag |= f - flag - } - - def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) - - /** The empty flag set */ - val EmptyFlags: FlagSet = FlagSet(0) - - /** The undefined flag set */ - val UndefinedFlags: FlagSet = FlagSet(~KINDFLAGS) - - /** Three flags with given index between 2 and 63. - * The first applies to both terms and types. the second is a term flag, and - * the third is a type flag. Installs given name(s) as the name(s) of the flags. - * @param name The name to be used for the term flag - * @param typeName The name to be used for the type flag, if it is different from `name`. - */ - private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { - flagName(index)(TERMindex) = name - flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName - val bits = 1L << index - (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) - } - - // ----------------- Available flags ----------------------------------------------------- - - /** Labeled with `private` modifier */ - val (Private @ _, PrivateTerm @ _, PrivateType @ _) = newFlags(2, "private") - - /** Labeled with `protected` modifier */ - val (Protected @ _, _, _) = newFlags(3, "protected") - - /** Labeled with `override` modifier */ - val (Override @ _, _, _) = newFlags(4, "override") - - /** A declared, but not defined member */ - val (Deferred @ _, DeferredTerm @ _, DeferredType @ _) = newFlags(5, "") - - /** Labeled with `final` modifier */ - val (Final @ _, _, _) = newFlags(6, "final") - - /** A method symbol / a super trait */ - val (_, Method @ _, _) = newFlags(7, "") - - /** A (term or type) parameter to a class or method */ - val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") - - /** Labeled with `implicit` modifier (implicit value) */ - val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") - - /** Labeled with `lazy` (a lazy val) / a trait */ - val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") - - /** A value or variable accessor (getter or setter) */ - val (AccessorOrSealed @ _, Accessor @ _, Sealed @ _) = newFlags(11, "", "sealed") - - /** A mutable var, an open class */ - val (MutableOrOpen @ __, Mutable @ _, Open @ _) = newFlags(12, "mutable", "open") - - /** Symbol is local to current class (i.e. private[this] or protected[this] - * pre: Private or Protected are also set - */ - val (Local @ _, _, _) = newFlags(13, "") - - /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), - * or an accessor of such a field. - */ - val (_, ParamAccessor @ _, _) = newFlags(14, "") - - /** A value or class implementing a module */ - val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") - - /** A value or class representing a package */ - val (Package @ _, PackageVal @ _, PackageClass @ _) = newFlags(16, "") - - /** A case class or its companion object - * Note: Case is also used to indicate that a symbol is bound by a pattern. - */ - val (Case @ _, CaseVal @ _, CaseClass @ _) = newFlags(17, "case") - - /** A compiler-generated symbol, which is visible for type-checking - * (compare with artifact) - */ - val (Synthetic @ _, _, _) = newFlags(18, "") - - /** Labelled with `inline` modifier */ - val (Inline @ _, _, _) = newFlags(19, "inline") - - /** An outer accessor / a covariant type variable */ - val (OuterOrCovariant @ _, OuterAccessor @ _, Covariant @ _) = newFlags(20, "", "") - - /** The label of a labeled block / a contravariant type variable */ - val (LabelOrContravariant @ _, Label @ _, Contravariant @ _) = newFlags(21, "