diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..6d63ee37 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +* @Paebbels + +/.github/ @Paebbels diff --git a/.github/dependabot.yml b/.github/dependabot.yml index e91b107c..92e46e01 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,7 +10,5 @@ updates: - Dependencies assignees: - Paebbels - reviewers: - - Paebbels schedule: interval: "daily" # Checks on Monday trough Friday. diff --git a/.github/workflows/ApplicationTesting.yml b/.github/workflows/ApplicationTesting.yml index b8b15b00..14429c32 100644 --- a/.github/workflows/ApplicationTesting.yml +++ b/.github/workflows/ApplicationTesting.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -134,20 +134,21 @@ jobs: packages = { "coverage": "python-coverage:p", - "docstr_coverage": "python-pyyaml:p", + "docstr_coverage": "python-pyyaml:p python-types-pyyaml:p", "igraph": "igraph:p", "jinja2": "python-markupsafe:p", "lxml": "python-lxml:p", "numpy": "python-numpy:p", "markupsafe": "python-markupsafe:p", "pip": "python-pip:p", - "pyyaml": "python-pyyaml:p", + "pyyaml": "python-pyyaml:p python-types-pyyaml:p", "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", "sphinx": "python-markupsafe:p", "tomli": "python-tomli:p", "wheel": "python-wheel:p", "pyEDAA.ProjectModel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p", "pyEDAA.Reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p", + "sphinx-reports": "python-markupsafe:p python-pyaml:p python-types-pyyaml:p", } subPackages = { "pytooling": { diff --git a/.github/workflows/ArtifactCleanUp.yml b/.github/workflows/ArtifactCleanUp.yml index b19e833b..f082e0bb 100644 --- a/.github/workflows/ArtifactCleanUp.yml +++ b/.github/workflows/ArtifactCleanUp.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/BuildTheDocs.yml b/.github/workflows/BuildTheDocs.yml index dcf87828..391d22c3 100644 --- a/.github/workflows/BuildTheDocs.yml +++ b/.github/workflows/BuildTheDocs.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -38,7 +38,7 @@ jobs: steps: - name: '❗ Deprecation message' - run: printf "%s\n" "::warning title=Deprecated::'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'." + run: printf "::warning title=%s::%s\n" "Deprecated" "'BuildTheDocs.yml' is not maintained anymore. Please switch to 'SphinxDocumentation.yml', 'LaTeXDocumentation.yml' and 'ExtractConfiguration.yml'." - name: ⏬ Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/CheckDocumentation.yml b/.github/workflows/CheckDocumentation.yml index 21e1bd8c..6b6205ff 100644 --- a/.github/workflows/CheckDocumentation.yml +++ b/.github/workflows/CheckDocumentation.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -64,9 +64,20 @@ jobs: - name: Run 'interrogate' Documentation Coverage Check continue-on-error: true run: | - interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} && printf "%s\n" "::error title=interrogate::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + set +e + + interrogate -c pyproject.toml --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} + if [[ $? -ne 0 ]]; then + printf "::error title=%s::%s\n" "interrogate" "Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + fi - name: Run 'docstr_coverage' Documentation Coverage Check continue-on-error: true run: | - docstr-coverage -v 2 --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} && printf "%s\n" "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + set +e + + docstr-coverage --fail-under=${{ inputs.fail_under }} ${{ inputs.directory }} + if [[ $? -ne 0 ]]; then + printf "%s\n" "::error title=docstr-coverage::Insufficient documentation quality (goal: ${{ inputs.fail_under }})" + fi + diff --git a/.github/workflows/CompletePipeline.yml b/.github/workflows/CompletePipeline.yml index 03551185..530a2532 100644 --- a/.github/workflows/CompletePipeline.yml +++ b/.github/workflows/CompletePipeline.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -96,23 +96,23 @@ on: codecov: description: 'Publish merged coverage and unittest reports to Codecov.' required: false - default: false - type: boolean + default: 'false' + type: string codacy: description: 'Publish merged coverage report to Codacy.' required: false - default: false - type: boolean + default: 'false' + type: string dorny: description: 'Publish merged unittest report via Dorny Test-Reporter.' required: false - default: false - type: boolean + default: 'false' + type: string cleanup: description: 'Cleanup artifacts afterwards.' required: false - default: true - type: boolean + default: 'true' + type: string secrets: PYPI_TOKEN: description: "Token for pushing releases to PyPI." @@ -120,11 +120,14 @@ on: CODECOV_TOKEN: description: "Token for pushing coverage and unittest results to Codecov." required: false - CODACY_PROJECT_TOKEN: + CODACY_TOKEN: description: "Token for pushing coverage results to Codacy." required: false jobs: + Prepare: + uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@dev + ConfigParams: uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@dev with: @@ -138,22 +141,34 @@ jobs: package_name: ${{ inputs.package_name }} python_version: ${{ inputs.unittest_python_version }} python_version_list: ${{ inputs.unittest_python_version_list }} - system_list: ${{ inputs.unittest_system_list }} - include_list: ${{ inputs.unittest_include_list }} - exclude_list: ${{ inputs.unittest_exclude_list }} - disable_list: ${{ inputs.unittest_disable_list }} + system_list: ${{ inputs.unittest_system_list }} + include_list: ${{ inputs.unittest_include_list }} + exclude_list: ${{ inputs.unittest_exclude_list }} + disable_list: ${{ inputs.unittest_disable_list }} - AppTestingParams: +# AppTestingParams: +# uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev +# with: +# package_namespace: ${{ inputs.package_namespace }} +# package_name: ${{ inputs.package_name }} +# python_version: ${{ inputs.apptest_python_version }} +# python_version_list: ${{ inputs.apptest_python_version_list }} +# system_list: ${{ inputs.apptest_system_list }} +# include_list: ${{ inputs.apptest_include_list }} +# exclude_list: ${{ inputs.apptest_exclude_list }} +# disable_list: ${{ inputs.apptest_disable_list }} + + InstallParams: uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev with: package_namespace: ${{ inputs.package_namespace }} package_name: ${{ inputs.package_name }} - python_version: ${{ inputs.apptest_python_version }} - python_version_list: ${{ inputs.apptest_python_version_list }} - system_list: ${{ inputs.apptest_system_list }} - include_list: ${{ inputs.apptest_include_list }} - exclude_list: ${{ inputs.apptest_exclude_list }} - disable_list: ${{ inputs.apptest_disable_list }} + python_version: ${{ inputs.unittest_python_version }} + python_version_list: '' + system_list: ${{ inputs.unittest_system_list }} + include_list: ${{ inputs.unittest_include_list }} + exclude_list: ${{ inputs.unittest_exclude_list }} + disable_list: ${{ inputs.unittest_disable_list }} UnitTesting: uses: pyTooling/Actions/.github/workflows/UnitTesting.yml@dev @@ -180,7 +195,7 @@ jobs: commands: | ${{ needs.ConfigParams.outputs.mypy_prepare_command }} mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }} - html_report: 'report/typing' + html_report: 'report/typing' html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} DocCoverage: @@ -190,18 +205,29 @@ jobs: - UnitTestingParams with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} - directory: ${{ inputs.package_namespace }}/${{ inputs.package_name }} -# fail_below: 70 + directory: ${{ needs.ConfigParams.outputs.package_directory }} Package: uses: pyTooling/Actions/.github/workflows/Package.yml@dev needs: - UnitTestingParams - - UnitTesting +# - UnitTesting with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + Install: + uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@dev + needs: + - ConfigParams + - UnitTestingParams + - InstallParams + - Package + with: + jobs: ${{ needs.InstallParams.outputs.python_jobs }} + wheel: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + package_name: ${{ needs.ConfigParams.outputs.package_fullname }} + # AppTesting: # uses: pyTooling/Actions/.github/workflows/ApplicationTesting.yml@dev # needs: @@ -233,7 +259,7 @@ jobs: codacy: ${{ inputs.codacy }} secrets: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }} + CODACY_TOKEN: ${{ secrets.CODACY_TOKEN }} PublishTestResults: uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@dev @@ -242,15 +268,13 @@ jobs: - UnitTestingParams - UnitTesting with: - additional_merge_args: '-d "--pytest=rewrite-dunder-init;reduce-depth:pytest.tests.unit"' - testsuite-summary-name: ${{ inputs.package_name }} + testsuite-summary-name: ${{ needs.ConfigParams.outputs.package_fullname }} merged_junit_filename: ${{ needs.ConfigParams.outputs.unittest_merged_report_xml_filename }} merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} dorny: ${{ inputs.dorny }} codecov: ${{ inputs.codecov }} - secrets: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} # VerifyDocs: # uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@dev @@ -281,8 +305,7 @@ jobs: - UnitTestingParams - PublishCoverageResults - PublishTestResults - - Documentation - if: ${{ inputs.cleanup }} + if: inputs.cleanup == 'true' with: sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}- xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}- @@ -310,26 +333,57 @@ jobs: coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} - ReleasePage: - uses: pyTooling/Actions/.github/workflows/Release.yml@dev - if: startsWith(github.ref, 'refs/tags') + TriggerTaggedRelease: + uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@dev needs: + - Prepare + - UnitTesting + - Install +# - AppTesting +# - StaticTypeCheck - Package + - PublishToGitHubPages + if: needs.Prepare.outputs.is_release_commit + permissions: + contents: write # required for create tag + actions: write # required for trigger workflow + with: + version: ${{ needs.Prepare.outputs.version }} + auto_tag: ${{ needs.Prepare.outputs.is_release_commit }} + secrets: inherit + + ReleasePage: + uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@dev + needs: + - Prepare + - UnitTesting + - Install # - AppTesting +# - StaticTypeCheck + - Package - PublishToGitHubPages + if: needs.Prepare.outputs.is_release_tag == 'true' + permissions: + contents: write + actions: write + with: + tag: ${{ needs.Prepare.outputs.version }} + secrets: inherit PublishOnPyPI: uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@dev - if: startsWith(github.ref, 'refs/tags') needs: + - Prepare - UnitTestingParams + - Package - ReleasePage + if: needs.Prepare.outputs.is_release_tag == 'true' with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} requirements: -r dist/requirements.txt artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} secrets: - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} ArtifactCleanUp: uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@dev @@ -344,7 +398,7 @@ jobs: - PublishToGitHubPages # - PublishOnPyPI - IntermediateCleanUp - if: ${{ inputs.cleanup }} + if: inputs.cleanup == 'true' with: package: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} remaining: | diff --git a/.github/workflows/CoverageCollection.yml b/.github/workflows/CoverageCollection.yml index a9df9dc4..e6cdde2e 100644 --- a/.github/workflows/CoverageCollection.yml +++ b/.github/workflows/CoverageCollection.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -72,7 +72,7 @@ jobs: steps: - name: '❗ Deprecation message' - run: printf "%s\n" "::warning title=Deprecated::'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'." + run: printf "::warning title=%s::%s\n" "Deprecated" "'CoverageCollection.yml' is not maintained anymore. Please switch to 'UnitTesting.yml', 'PublishCoverageResults.yml' and 'PublishTestResults.yml'." - name: ⏬ Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/ExtractConfiguration.yml b/.github/workflows/ExtractConfiguration.yml index 29e9379e..f7137084 100644 --- a/.github/workflows/ExtractConfiguration.yml +++ b/.github/workflows/ExtractConfiguration.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -145,6 +145,12 @@ jobs: namespace = "${{ inputs.package_namespace }}".strip() name = "${{ inputs.package_name }}".strip() + print(dedent(f"""\ + INPUTS: + package_namespace: {namespace} + package_name: {name} + """)) + if namespace == "" or namespace == ".": fullname = f"{name}" directory = f"{name}" @@ -154,6 +160,13 @@ jobs: directory = f"{namespace}/{name}" mypy_prepare_command = f"touch {namespace}/__init__.py" + print(dedent(f"""\ + OUTPUTS: + package_fullname: {fullname} + package_directory: {directory} + mypy_prepare_command: {mypy_prepare_command} + """)) + github_output = Path(getenv("GITHUB_OUTPUT")) print(f"GITHUB_OUTPUT: {github_output}") with github_output.open("a+", encoding="utf-8") as f: @@ -234,4 +247,11 @@ jobs: coverage_report_json={coverageJSONFile.as_posix()} """)) - print(f"DEBUG:\n unittest xml: {unittestXMLFile}\n merged unittest xml: {mergedUnittestXMLFile}\n coverage html: {coverageHTMLDirectory}\n coverage xml: {coverageXMLFile}\n coverage json: {coverageJSONFile}") + print(dedent(f"""\ + DEBUG: + unittest xml: {unittestXMLFile} + merged unittest xml: {mergedUnittestXMLFile} + coverage html: {coverageHTMLDirectory} + coverage xml: {coverageXMLFile} + coverage json: {coverageJSONFile} + """)) diff --git a/.github/workflows/InstallPackage.yml b/.github/workflows/InstallPackage.yml new file mode 100644 index 00000000..9f0ee5dd --- /dev/null +++ b/.github/workflows/InstallPackage.yml @@ -0,0 +1,130 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2025-2025 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Install Package + +on: + workflow_call: + inputs: + jobs: + description: 'JSON list with environment fields, telling the system and Python versions to run tests with.' + required: true + type: string + wheel: + description: "Wheel package as input artifact." + required: true + type: string + package_name: + description: "Name of the Python package." + required: true + type: string + +jobs: + PackageInstallation: + name: ${{ matrix.sysicon }} ${{ matrix.pyicon }} Package installation using Python ${{ matrix.python }} + runs-on: ${{ matrix.runs-on }} + + strategy: + fail-fast: false + matrix: + include: ${{ fromJson(inputs.jobs) }} + + defaults: + run: + shell: ${{ matrix.shell }} + + steps: + - name: 📥 Download artifacts '${{ inputs.wheel }}' from 'Package' job + uses: pyTooling/download-artifact@v4 + with: + name: ${{ inputs.wheel }} + path: install + + - name: '🟦 Setup MSYS2 for ${{ matrix.runtime }}' + uses: msys2/setup-msys2@v2 + if: matrix.system == 'msys2' + with: + msystem: ${{ matrix.runtime }} + update: true + pacboy: >- + python-pip:p python-wheel:p + python-lxml:p + python-markupsafe:p + python-pyaml:p python-types-pyyaml:p + python-ruamel-yaml:p python-ruamel.yaml.clib:p + python-tomli:p + + - name: 🐍 Setup Python ${{ matrix.python }} + uses: actions/setup-python@v5 + if: matrix.system != 'msys2' + with: + python-version: ${{ matrix.python }} + + - name: 🔧 Install wheel and pip dependencies (native) + if: matrix.system != 'msys2' + run: | + python -m pip install --disable-pip-version-check -U wheel + + - name: 🔧 Install wheel from artifact (Ubuntu/macOS) + if: matrix.system != 'windows' + run: | + python -m pip install --disable-pip-version-check -U install/*.whl + + - name: 🔧 Install wheel from artifact (Windows) + if: matrix.system == 'windows' + run: | + python -m pip install -v --disable-pip-version-check (Get-Item .\install\*.whl).FullName + + - name: 📦 Run application tests (Ubuntu/macOS) + if: matrix.system != 'windows' + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_NOCOLOR=$'\x1b[0m' + + printf "Import package and checking package version ...\n " + python3 - << EOF | tee ImportTest.log | grep -E "^Package version:\s+[0-9]+\.[0-9]+\.[0-9]+" + from ${{ inputs.package_name }} import __version__ + + print(f"Package version: {__version__}") + EOF + if [[ $? -eq 0 ]]; then + printf " ${ANSI_LIGHT_GREEN}[PASSED]${ANSI_NOCOLOR}\n" + else + printf " ${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "::error title=%s::%s\n" "InstallPackage" "Couldn't check package version of '${{ inputs.package_name }}'." + exit 1 + fi + + - name: 📦 Run application tests (Windows) + if: matrix.system == 'windows' + run: | + $result=$(python -c "from ${{ inputs.package_name }} import __version__; print(f""Package version: {__version__}"")") + Write-Host $result + if ($result -match "Package version:\s+\d+\.\d+\.\d+") { + Write-Host -ForegroundColor Green "[PASSED]" + } else { + Write-Host -ForegroundColor Red "[FAILED]" + Write-Host ("::error title={0}::{1}" -f "InstallPackage", "Couldn't check package version of '${{ inputs.package_name }}'.") + exit 1 + } diff --git a/.github/workflows/IntermediateCleanUp.yml b/.github/workflows/IntermediateCleanUp.yml index 1ba9295f..b3ca6e74 100644 --- a/.github/workflows/IntermediateCleanUp.yml +++ b/.github/workflows/IntermediateCleanUp.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/LaTeXDocumentation.yml b/.github/workflows/LaTeXDocumentation.yml index 9027bc2c..c197b498 100644 --- a/.github/workflows/LaTeXDocumentation.yml +++ b/.github/workflows/LaTeXDocumentation.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/NightlyRelease.yml b/.github/workflows/NightlyRelease.yml index 260fb8b2..eada2f7a 100644 --- a/.github/workflows/NightlyRelease.yml +++ b/.github/workflows/NightlyRelease.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -96,7 +96,7 @@ jobs: continue-on-error: ${{ inputs.can-fail }} permissions: contents: write - actions: write + actions: write # attestations: write steps: @@ -130,7 +130,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=InternalError::Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'." + printf "::error title=%s::%s\n" "InternalError" "Couldn't delete release '${{ inputs.nightly_name }}' -> Error: '${message}'." exit 1 fi @@ -190,7 +190,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=InternalError::Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'." + printf "::error title=%s::%s\n" "InternalError" "Couldn't create release '${{ inputs.nightly_name }}' -> Error: '${message}'." exit 1 fi @@ -289,7 +289,7 @@ jobs: printf " %s" "Checked asset for duplicates ... " if [[ -n "${assetFilenames[$asset]}" ]]; then printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" - printf "%s\n" "::error title=DuplicateAsset::Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'." + printf "::error title=%s::%s\n" "DuplicateAsset" "Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.nightly_name }}'." ERRORS=$((ERRORS + 1)) continue else @@ -309,7 +309,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't download artifact '${artifact}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=ArtifactNotFound::Couldn't download artifact '${artifact}'." + printf "::error title=%s::%s\n" "ArtifactNotFound" "Couldn't download artifact '${artifact}'." ERRORS=$((ERRORS + 1)) continue fi @@ -361,7 +361,7 @@ jobs: else printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zip file '${asset}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zip file '${asset}'." + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zip file '${asset}'." ERRORS=$((ERRORS + 1)) continue fi @@ -390,7 +390,7 @@ jobs: else printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to tgz file '${asset}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to tgz file '${asset}'." + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to tgz file '${asset}'." ERRORS=$((ERRORS + 1)) continue fi @@ -419,7 +419,7 @@ jobs: else printf " %s\n" "Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't compress '${artifact}' to zst file '${asset}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=CompressionError::Couldn't compress '${artifact}' to zst file '${asset}'." + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zst file '${asset}'." ERRORS=$((ERRORS + 1)) continue fi @@ -429,7 +429,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't find asset '${asset}' in artifact '${artifact}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=FileNotFound::Couldn't find asset '${asset}' in artifact '${artifact}'." + printf "::error title=%s::%s\n" "FileNotFound" "Couldn't find asset '${asset}' in artifact '${artifact}'." ERRORS=$((ERRORS + 1)) continue fi @@ -469,7 +469,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=UploadError::Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'." + printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.nightly_name }}'." ERRORS=$((ERRORS + 1)) continue fi @@ -492,7 +492,7 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=UploadError::Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'." + printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.nightly_name }}'." ERRORS=$((ERRORS + 1)) continue fi @@ -526,5 +526,5 @@ jobs: else printf "%s\n" "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}" printf " %s\n" "${ANSI_LIGHT_RED}Couldn't remove draft-state from release '${{ inputs.nightly_name }}'.${ANSI_NOCOLOR}" - printf "%s\n" "::error title=ReleasePage::Couldn't remove draft-state from release '${{ inputs.nightly_name }}'." + printf "::error title=%s::%s\n" "ReleasePage" "Couldn't remove draft-state from release '${{ inputs.nightly_name }}'." fi diff --git a/.github/workflows/Package.yml b/.github/workflows/Package.yml index a3618d4d..950969be 100644 --- a/.github/workflows/Package.yml +++ b/.github/workflows/Package.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/Parameters.yml b/.github/workflows/Parameters.yml index 2e827290..d3d2c0b6 100644 --- a/.github/workflows/Parameters.yml +++ b/.github/workflows/Parameters.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -95,6 +95,11 @@ on: required: false default: 'macos-14' type: string + pipeline-delay: + description: 'Slow down this job, to delay the startup of the GitHub Action pipline.' + required: false + default: 0 + type: number outputs: python_version: @@ -121,6 +126,12 @@ jobs: params: ${{ steps.params.outputs.params }} steps: + - name: Generate a startup delay of ${{ inputs.pipeline-delay }} seconds + id: delay + if: inputs.pipeline-delay >= 0 + run: | + sleep ${{ inputs.pipeline-delay }} + - name: Generate 'params' and 'python_jobs' id: params shell: python diff --git a/.github/workflows/PrepareJob.yml b/.github/workflows/PrepareJob.yml new file mode 100644 index 00000000..64a7d881 --- /dev/null +++ b/.github/workflows/PrepareJob.yml @@ -0,0 +1,334 @@ +name: Prepare Variables + +on: + workflow_call: + inputs: + ubuntu_image: + description: 'Name of the Ubuntu image.' + required: false + default: 'ubuntu-24.04' + type: string + main_branch: + description: 'Name of the branch containing releases.' + required: false + default: 'main' + type: string + development_branch: + description: 'Name of the development branch containing features.' + required: false + default: 'dev' + type: string + release_branch: + description: 'Name of the branch containing releases and nightly builds.' + required: false + default: 'main' + type: string + nightly_tag_pattern: + description: 'Pattern for nightly tags on the release branch.' + required: false + default: 'nightly' + type: string + release_tag_pattern: + description: 'Pattern for release tags on the release branch. Usually: vXX.YY.ZZ' + required: false + default: '(v|r)?[0-9]+(\.[0-9]+){0,2}(-(dev|alpha|beta|rc)([0-9]*))?' + type: string + + outputs: + on_main_branch: + description: "" + value: ${{ jobs.Prepare.outputs.on_main_branch }} + on_dev_branch: + description: "" + value: ${{ jobs.Prepare.outputs.on_dev_branch }} + on_release_branch: + description: "" + value: ${{ jobs.Prepare.outputs.on_release_branch }} + is_regular_commit: + description: "" + value: ${{ jobs.Prepare.outputs.is_regular_commit }} + is_merge_commit: + description: "" + value: ${{ jobs.Prepare.outputs.is_merge_commit }} + is_release_commit: + description: "" + value: ${{ jobs.Prepare.outputs.is_release_commit }} + is_nightly_tag: + description: "" + value: ${{ jobs.Prepare.outputs.is_nightly_tag }} + is_release_tag: + description: "" + value: ${{ jobs.Prepare.outputs.is_release_tag }} + ref_kind: + description: "" + value: ${{ jobs.Prepare.outputs.ref_kind }} + branch: + description: "" + value: ${{ jobs.Prepare.outputs.branch }} + tag: + description: "" + value: ${{ jobs.Prepare.outputs.tag }} + version: + description: "" + value: ${{ jobs.Prepare.outputs.version }} + pr_title: + description: "" + value: ${{ jobs.Prepare.outputs.pr_title }} + pr_number: + description: "" + value: ${{ jobs.Prepare.outputs.pr_number }} +# pr_mergedby: +# description: "" +# value: ${{ jobs.Prepare.outputs.pr_mergedby }} +# pr_mergedat: +# description: "" +# value: ${{ jobs.Prepare.outputs.pr_mergedat }} + +jobs: + Prepare: + name: Extract Information + runs-on: ubuntu-24.04 + outputs: + on_main_branch: ${{ steps.Classify.outputs.on_main_branch }} + on_dev_branch: ${{ steps.Classify.outputs.on_dev_branch }} + on_release_branch: ${{ steps.Classify.outputs.on_release_branch }} + is_regular_commit: ${{ steps.Classify.outputs.is_regular_commit }} + is_merge_commit: ${{ steps.Classify.outputs.is_merge_commit }} + is_release_commit: ${{ steps.Classify.outputs.is_release_commit }} + is_nightly_tag: ${{ steps.Classify.outputs.is_nightly_tag }} + is_release_tag: ${{ steps.Classify.outputs.is_release_tag }} + ref_kind: ${{ steps.Classify.outputs.ref_kind }} + branch: ${{ steps.Classify.outputs.branch }} + tag: ${{ steps.Classify.outputs.tag }} + version: ${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }} +# release_version: ${{ steps.FindPullRequest.outputs.release_version }} + pr_title: ${{ steps.FindPullRequest.outputs.pr_title }} + pr_number: ${{ steps.FindPullRequest.outputs.pr_number }} + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + with: + # The command 'git describe' (used for version) needs the history. + fetch-depth: 0 + + - name: 🖉 GitHub context information + run: | + printf "%s\n" "github.event_name: ${{ github.event_name }}" + printf "%s\n" "github.actor: ${{ github.actor }}" + printf "%s\n" "github.ref: ${{ github.ref }}" + printf "%s\n" "github.base_ref: ${{ github.base_ref }}" + printf "%s\n" "github.head_ref: ${{ github.head_ref }}" + printf "%s\n" "github.sha: ${{ github.sha }}" + + - name: 🖉 Classify commit + id: Classify + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + ref="${{ github.ref }}" + on_main_branch="false" + on_dev_branch="false" + on_release_branch="false" + is_regular_commit="false" + is_merge_commit="false" + is_release_commit="false" + is_nightly_tag="false" + is_release_tag="false" + ref_kind="unknown" + branch="" + tag="" + version="" + + if [[ "${ref:0:11}" == "refs/heads/" ]]; then + ref_kind="branch" + branch="${ref:11}" + + printf "Commit check:\n" + + if [[ "${branch}" == "${{ inputs.main_branch }}" ]]; then + on_main_branch="true" + + if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then + is_regular_commit="true" + printf " ${ANSI_LIGHT_YELLOW}regular " + else + is_merge_commit="true" + printf " ${ANSI_LIGHT_GREEN}merge " + fi + printf "commit${ANSI_NOCOLOR} on main branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.main_branch }}" + fi + + if [[ "${branch}" == "${{ inputs.development_branch }}" ]]; then + on_dev_branch="true" + + if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then + is_regular_commit="true" + printf " ${ANSI_LIGHT_YELLOW}regular " + else + is_merge_commit="true" + printf " ${ANSI_LIGHT_GREEN}merge " + fi + printf "commit${ANSI_NOCOLOR} on development branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.development_branch }}" + fi + + if [[ "${branch}" == "${{ inputs.release_branch }}" ]]; then + on_release_branch="true" + + if [[ -z "$(git rev-list -1 --merges ${{ github.sha }}~1..${{ github.sha }})" ]]; then + is_regular_commit="true" + printf " ${ANSI_LIGHT_YELLOW}regular " + else + is_release_commit="true" + printf " ${ANSI_LIGHT_GREEN}release " + fi + printf "commit${ANSI_NOCOLOR} on release branch ${ANSI_LIGHT_BLUE}'%s'${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" + fi + elif [[ "${ref:0:10}" == "refs/tags/" ]]; then + ref_kind="tag" + tag="${ref:10}" + + printf "Tag check:\n" + + printf " Check if tag is on release branch '%s' ... " "${{ inputs.release_branch }}" + git branch --remotes --contains $(git rev-parse --verify "tags/${tag}~0") | grep "origin/${{ inputs.release_branch }}" > /dev/null + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Tag '%s' isn't on branch '%s'.${ANSI_NOCOLOR}\n" "${tag}" "${{ inputs.release_branch }}" + printf "::error title=TagCheck::Tag '%s' isn't on branch '%s'.\n" "${tag}" "${{ inputs.release_branch }}" + exit 1 + fi + + NIGHTLY_TAG_PATTERN='^${{ inputs.nightly_tag_pattern }}$' + RELEASE_TAG_PATTERN='^${{ inputs.release_tag_pattern }}$' + printf " Check tag name against regexp '%s' ... " "${RELEASE_TAG_PATTERN}" + if [[ "${tag}" =~ NIGHTLY_TAG_PATTERN ]]; then + printf "${ANSI_LIGHT_GREEN}[NIGHTLY]${ANSI_NOCOLOR}\n" + is_nightly_tag="true" + elif [[ "${tag}" =~ $RELEASE_TAG_PATTERN ]]; then + printf "${ANSI_LIGHT_GREEN}[RELEASE]${ANSI_NOCOLOR}\n" + version="${tag}" + is_release_tag="true" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Tag name '%s' doesn't conform to regexp${ANSI_NOCOLOR}\n" "${tag}" + printf " ${ANSI_LIGHT_RED}nightly tag: %s${ANSI_NOCOLOR}\n" "${NIGHTLY_TAG_PATTERN}" + printf " ${ANSI_LIGHT_RED}release tag: %s${ANSI_NOCOLOR}\n" "${RELEASE_TAG_PATTERN}" + printf "::error title=RexExpCheck::Tag name '%s' doesn't conform to regexp '%s' nor '%s'.\n" "${tag}" "${NIGHTLY_TAG_PATTERN}" "${RELEASE_TAG_PATTERN}" + exit 1 + fi + else + printf "${ANSI_LIGHT_RED}Unknown Git reference '%s'.${ANSI_NOCOLOR}\n" "${{ github.ref }}" + printf "::error title=Classify Commit::Unknown Git reference '%s'.\n" "${{ github.ref }}" + exit 1 + fi + + tee --append "${GITHUB_OUTPUT}" <<EOF + on_main_branch=${on_main_branch} + on_dev_branch=${on_dev_branch} + on_release_branch=${on_release_branch} + is_regular_commit=${is_regular_commit} + is_merge_commit=${is_merge_commit} + is_release_commit=${is_release_commit} + is_nightly_tag=${is_nightly_tag} + is_release_tag=${is_release_tag} + ref_kind=${ref_kind} + branch=${branch} + tag=${tag} + version=${version} + EOF + + - name: 🔁 Find merged PullRequest from second parent of current SHA (${{ github.sha }}) + id: FindPullRequest + if: steps.Classify.outputs.is_merge_commit == 'true' + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}" + FATHER_SHA=$(git rev-parse ${{ github.ref }}^2) + if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Couldn't read second parent (father) of '%s'.${ANSI_NOCOLOR}\n" "${{ github.ref }}^2" + printf "::error title=GitCommitHistoryError::Couldn't read second parent (father) of '%s'. -> %s\n" "${{ github.ref }}^2" "${FATHER_SHA}" + exit 1 + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + fi + + printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}" + PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt") + if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}" + printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}" + exit 1 + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + + PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")" + PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")" + PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")" + PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")" + + printf "${ANSI_LIGHT_BLUE}Found Pull Request:${ANSI_NOCOLOR}\n" + printf " %s\n" "Title: ${PR_TITLE}" + printf " %s\n" "Number: ${PR_NUMBER}" + printf " %s\n" "MergedBy: ${PR_MERGED_BY}" + printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))" + fi + + RELEASE_TAG_PATTERN='^${{ inputs.release_tag_pattern }}$' + printf "Check Pull Request title against regexp '%s' ... " "${RELEASE_TAG_PATTERN}" + if [[ "${PR_TITLE}" =~ $RELEASE_TAG_PATTERN ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + RELEASE_VERSION="${PR_TITLE}" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Pull Request title '%s' doesn't conform to regexp '%s'.${ANSI_NOCOLOR}\n" "${PR_TITLE}" "${RELEASE_TAG_PATTERN}" + printf "::error title=RexExpCheck::Pull Request title '%s' doesn't conform to regexp '%s'.\n" "${PR_TITLE}" "${RELEASE_TAG_PATTERN}" + exit 1 + fi + + printf "Release tag: ${ANSI_LIGHT_GREEN}%s${ANSI_NOCOLOR}\n" "${RELEASE_VERSION}" + tee --append "${GITHUB_OUTPUT}" <<EOF + pr_version=${RELEASE_VERSION} + pr_title=${PR_TITLE} + pr_number=${PR_NUMBER} + pr_mergedby=${PR_MERGED_BY} + pr_mergedat=${PR_MERGED_AT} + EOF + + - name: Debug + run: | + printf "on_main_branch: %s\n" "${{ steps.Classify.outputs.on_main_branch }}" + printf "on_dev_branch: %s\n" "${{ steps.Classify.outputs.on_dev_branch }}" + printf "on_release_branch: %s\n" "${{ steps.Classify.outputs.on_release_branch }}" + printf "is_regular_commit: %s\n" "${{ steps.Classify.outputs.is_regular_commit }}" + printf "is_merge_commit: %s\n" "${{ steps.Classify.outputs.is_merge_commit }}" + printf "is_release_commit: %s\n" "${{ steps.Classify.outputs.is_release_commit }}" + printf "is_nightly_tag: %s\n" "${{ steps.Classify.outputs.is_nightly_tag }}" + printf "is_release_tag: %s\n" "${{ steps.Classify.outputs.is_release_tag }}" + printf "ref_kind: %s\n" "${{ steps.Classify.outputs.ref_kind }}" + printf "branch: %s\n" "${{ steps.Classify.outputs.branch }}" + printf "tag: %s\n" "${{ steps.Classify.outputs.tag }}" + printf "version from tag: %s\n" "${{ steps.Classify.outputs.version }}" + printf "version from pr: %s\n" "${{ steps.FindPullRequest.outputs.pr_version }}" + printf "version: %s\n" "${{ steps.Classify.outputs.version || steps.FindPullRequest.outputs.pr_version }}" + printf "pr title: %s\n" "${{ steps.FindPullRequest.outputs.pr_title }}" + printf "pr number: %s\n" "${{ steps.FindPullRequest.outputs.pr_number }}" diff --git a/.github/workflows/PublishCoverageResults.yml b/.github/workflows/PublishCoverageResults.yml index bafa84ae..08f0611f 100644 --- a/.github/workflows/PublishCoverageResults.yml +++ b/.github/workflows/PublishCoverageResults.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -86,20 +86,20 @@ on: codecov: description: 'Publish merged coverage report to Codecov.' required: false - default: false - type: boolean + default: 'false' + type: string codacy: description: 'Publish merged coverage report to Codacy.' required: false - default: false - type: boolean + default: 'false' + type: string secrets: CODECOV_TOKEN: description: 'Token to push result to Codecov.' - required: true + required: false CODACY_TOKEN: description: 'Token to push result to Codacy.' - required: true + required: false jobs: PublishCoverageResults: @@ -200,7 +200,8 @@ jobs: - name: 📊 Publish code coverage at CodeCov uses: codecov/codecov-action@v5 - if: inputs.codecov + id: codecov + if: inputs.codecov == 'true' continue-on-error: true with: token: ${{ secrets.CODECOV_TOKEN }} @@ -212,8 +213,22 @@ jobs: - name: 📉 Publish code coverage at Codacy uses: codacy/codacy-coverage-reporter-action@v1 - if: inputs.codacy + id: codacy + if: inputs.codacy == 'true' continue-on-error: true with: project-token: ${{ secrets.CODACY_TOKEN }} coverage-reports: ${{ inputs.coverage_report_xml_directory }}/${{ inputs.coverage_report_xml_filename }} + + - name: Generate error messages + run: | + if [[ "${{ steps.codecov.outcome }}" == "failure" ]]; then + printf "::error title=%s::%s\n" "Publish Code Coverage Results / Codecov" "Failed to publish code coverage results." + else + printf "Codecov: No errors to report.\n" + fi + if [[ "${{ steps.codacy.outcome }}" == "failure" ]]; then + printf "::error title=%s::%s\n" "Publish Code Coverage Results / Codacy" "Failed to publish code coverage results." + else + printf "Codacy: No errors to report.\n" + fi diff --git a/.github/workflows/PublishOnPyPI.yml b/.github/workflows/PublishOnPyPI.yml index 0eb37350..5f179661 100644 --- a/.github/workflows/PublishOnPyPI.yml +++ b/.github/workflows/PublishOnPyPI.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -50,7 +50,6 @@ on: required: false jobs: - PublishOnPyPI: name: 🚀 Publish to PyPI runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" diff --git a/.github/workflows/PublishReleaseNotes.yml b/.github/workflows/PublishReleaseNotes.yml new file mode 100644 index 00000000..43e98fbb --- /dev/null +++ b/.github/workflows/PublishReleaseNotes.yml @@ -0,0 +1,801 @@ +# ==================================================================================================================== # +# Authors: # +# Patrick Lehmann # +# # +# ==================================================================================================================== # +# Copyright 2020-2025 The pyTooling Authors # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +# # +# SPDX-License-Identifier: Apache-2.0 # +# ==================================================================================================================== # +name: Release + +on: + workflow_call: + inputs: + ubuntu_image: + description: 'Name of the Ubuntu image.' + required: false + default: 'ubuntu-24.04' + type: string + release_branch: + description: 'Name of the branch containing releases.' + required: false + default: 'main' + type: string + mode: + description: 'Release mode: nightly or release.' + required: false + default: 'release' + type: string + tag: + description: 'Name of the release (tag).' + required: false + default: '' + type: string + title: + description: 'Title of the release.' + required: false + default: '' + type: string + description: + description: 'Multi-line description of the release.' + required: false + default: '' + type: string + description_file: + description: 'Description of the release from a Markdown file.' + required: false + default: '' + type: string + description_footer: + description: 'Footer line(s) in every release.' + required: false + default: | + + -------- + Published from [%%gh_workflow_name%%](%%gh_server%%/%%gh_owner_repo%%/actions/runs/%%gh_runid%%) workflow triggered by %%gh_actor%% on %%datetime%%. + + This automatic release was created by [pyTooling/Actions](http://github.com/pyTooling/Actions)::Release.yml + type: string + draft: + description: 'Specify if this is a draft.' + required: false + default: false + type: boolean + prerelease: + description: 'Specify if this is a pre-release.' + required: false + default: false + type: boolean + latest: + description: 'Specify if this is the latest release.' + required: false + default: false + type: boolean + replacements: + description: 'Multi-line string containing search=replace patterns.' + required: false + default: '' + type: string + assets: + description: 'Multi-line string containing artifact:file:title asset descriptions.' + required: false + type: string + default: '' + inventory-json: + type: string + required: false + default: '' + inventory-version: + type: string + required: false + default: '' + inventory-categories: + type: string + required: false + default: '' + tarball-name: + type: string + required: false + default: '__pyTooling_upload_artifact__.tar' + can-fail: + type: boolean + required: false + default: false + outputs: + release-page: + description: "URL to the release page." + value: ${{ jobs.Release.outputs.release-page }} + +jobs: + Release: + name: 📝 Create or Update Release Page on GitHub + runs-on: ${{ inputs.ubuntu_image }} + continue-on-error: ${{ inputs.can-fail }} + permissions: + contents: write + actions: write +# attestations: write + outputs: + release-page: ${{ steps.removeDraft.outputs.release_page }} + + steps: + - name: ⏬ Checkout repository + uses: actions/checkout@v4 + with: + # The command 'git describe' (used for version) needs the history. + fetch-depth: 0 + + - name: 🔧 Install zstd + run: sudo apt-get install -y --no-install-recommends zstd + + - name: 📑 Prepare + id: prepare + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + printf "Release mode: ${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "${{ inputs.mode }}" + case "${{ inputs.mode }}" in + "release") + ;; + "nightly") + printf "→ Allow deletion and recreation of existing release pages for rolling releases (nightly releases)\n" + ;; + *) + printf "Unknown mode '%s'\n" "${{ inputs.mode }}" + printf "::error title=%s::%s\n" "InternalError" "Unknown mode '${{ inputs.mode }}'." + exit 1 + esac + + - name: 📑 Delete (old) Release Page + id: deleteReleasePage + if: inputs.mode == 'nightly' + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + printf "Deleting release '%s' ... " "${{ inputs.tag }}" + message="$(gh release delete ${{ inputs.tag }} --yes 2>&1)" + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + elif [[ "${message}" == "release not found" ]]; then + printf "${ANSI_LIGHT_YELLOW}[NOT FOUND]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't delete release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}" + printf "::error title=%s::%s\n" "InternalError" "Couldn't delete release '${{ inputs.tag }}' -> Error: '${message}'." + exit 1 + fi + + - name: 📑 Assemble Release Notes + id: createReleaseNotes + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + # Save release description (from parameter in a file) + head -c -1 <<'EOF' > __DESCRIPTION__.md + ${{ inputs.description }} + EOF + + # Save release footer (from parameter in a file) + head -c -1 <<'EOF' > __FOOTER__.md + ${{ inputs.description_footer }} + EOF + + # Download Markdown from PullRequest + # Readout second parent's SHA + # Search PR with that SHA + # Load description of that PR + printf "Read second parent of current SHA (%s) ... " "${{ github.ref }}" + FATHER_SHA=$(git rev-parse ${{ github.ref }}^2 -- 2> /dev/null) + if [[ $? -ne 0 || "{FATHER_SHA}" == "" ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "→ ${ANSI_LIGHT_YELLOW}Skipped readout of pull request description. This is not a merge commit.${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + + printf "Search Pull Request to '%s' and branch containing SHA %s ... " "${{ inputs.release_branch }}" "${FATHER_SHA}" + PULL_REQUESTS=$(gh pr list --base "${{ inputs.release_branch }}" --search "${FATHER_SHA}" --state "merged" --json "title,number,mergedBy,mergedAt,body") + if [[ $? -ne 0 || "${PULL_REQUESTS}" == "" ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf "${ANSI_LIGHT_RED}Couldn't find a merged Pull Request to '%s'. -> %s${ANSI_NOCOLOR}\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}" + printf "::error title=PullRequest::Couldn't find a merged Pull Request to '%s'. -> %s\n" "${{ inputs.release_branch }}" "${PULL_REQUESTS}" + exit 1 + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + + PR_TITLE="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].title")" + PR_NUMBER="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].number")" + PR_BODY="$( printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].body")" + PR_MERGED_BY="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedBy.login")" + PR_MERGED_AT="$(printf "%s\n" "${PULL_REQUESTS}" | jq --raw-output ".[0].mergedAt")" + + printf "Found Pull Request:\n" + printf " %s\n" "Title: ${PR_TITLE}" + printf " %s\n" "Number: ${PR_NUMBER}" + printf " %s\n" "MergedBy: ${PR_MERGED_BY}" + printf " %s\n" "MergedAt: ${PR_MERGED_AT} ($(date -d"${PR_MERGED_AT}" '+%d.%m.%Y - %H:%M:%S'))" + fi + + echo "${PR_BODY}" > __PULLREQUEST__.md + fi + + # Check if a release description file should be used and exists. + if [[ "${{ inputs.description_file }}" != "" ]]; then + if [[ ! -f "${{ inputs.description_file }}" ]]; then + printf "${ANSI_LIGHT_RED}Release description file '%s' not found.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}" + printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' not found." + exit 1 + elif [[ -s "${{ inputs.description_file }}" ]]; then + printf "Use '%s' as main release description.\n" "${{ inputs.description_file }}" + cp -v "${{ inputs.description_file }}" __NOTES__.md + else + printf "${ANSI_LIGHT_RED}Release description file '%s' is empty.${ANSI_NOCOLOR}\n" "${{ inputs.description_file }}" + printf "::error title=%s::%s\n" "FileNotFound" "Release description file '${{ inputs.description_file }}' is empty." + exit 1 + fi + # Check if the main release description is provided by a template parameter + elif [[ -s __DESCRIPTION__.md ]]; then + printf "Use '__DESCRIPTION__.md' as main release description.\n" + mv -v __DESCRIPTION__.md __NOTES__.md + # Check if the pull request serves as the main release description text. + elif [[ -s __PULLREQUEST__.md ]]; then + printf "Use '__PULLREQUEST__.md' as main release description.\n" + mv -v __PULLREQUEST__.md __NOTES__.md + + printf "Append '%%%%FOOTER%%%%' to '__NOTES__.md'.\n" + printf "\n%%%%FOOTER%%%%\n" >> __NOTES__.md + else + printf "${ANSI_LIGHT_RED}No release description specified (file, parameter, PR text).${ANSI_NOCOLOR}\n" + printf "::error title=%s::%s\n" "MissingDescription" "No release description specified (file, parameter, PR text)." + exit 1 + fi + + # Read release notes main file for placeholder substitution + NOTES=$(<__NOTES__.md) + + # Inline description + if [[ -s __DESCRIPTION__.md ]]; then + NOTES="${NOTES//%%DESCRIPTION%%/$(<__DESCRIPTION__.md)}" + fi + + # Inline PullRequest and increase headline levels + if [[ -s __PULLREQUEST__.md ]]; then + while [[ "${NOTES}" =~ %%(PULLREQUEST(\+[0-3])?)%% ]]; do + case "${BASH_REMATCH[1]}" in + "PULLREQUEST+0" | "PULLREQUEST") + NOTES="${NOTES//${BASH_REMATCH[0]}/$(<__PULLREQUEST__.md)}" + ;; + "PULLREQUEST+1") + NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1# /gm;t')}" + ;; + "PULLREQUEST+2") + NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}" + ;; + "PULLREQUEST+3") + NOTES="${NOTES//${BASH_REMATCH[0]}/$(cat __PULLREQUEST__.md | sed -E 's/^(#+) /\1### /gm;t')}" + ;; + esac + done + fi + + # inline Footer + if [[ -s __FOOTER__.md ]]; then + NOTES="${NOTES//%%FOOTER%%/$(<__FOOTER__.md)}" + fi + + # Apply replacements + while IFS=$'\r\n' read -r patternLine; do + # skip empty lines + [[ "$patternLine" == "" ]] && continue + + pattern="%${patternLine%%=*}%" + replacement="${patternLine#*=}" + NOTES="${NOTES//$pattern/$replacement}" + done <<<'${{ inputs.replacements }}' + + # Workarounds for stupid GitHub variables + owner_repo="${{ github.repository }}" + repo=${owner_repo##*/} + + # Replace special identifiers + NOTES="${NOTES//%%gh_server%%/${{ github.server_url }}}" + NOTES="${NOTES//%%gh_workflow_name%%/${{ github.workflow }}}" + NOTES="${NOTES//%%gh_owner%%/${{ github.repository_owner }}}" + NOTES="${NOTES//%%gh_repo%%/${repo}}" + NOTES="${NOTES//%%gh_owner_repo%%/${{ github.repository_owner }}}" + #NOTES="${NOTES//%%gh_pages%%/https://${{ github.repository_owner }}.github.io/${repo}/}" + NOTES="${NOTES//%%gh_runid%%/${{ github.run_id }}}" + NOTES="${NOTES//%%gh_actor%%/${{ github.actor }}}" + NOTES="${NOTES//%%gh_sha%%/${{ github.sha }}}" + NOTES="${NOTES//%%date%%/$(date '+%Y-%m-%d')}" + NOTES="${NOTES//%%time%%/$(date '+%H:%M:%S %Z')}" + NOTES="${NOTES//%%datetime%%/$(date '+%Y-%m-%d %H:%M:%S %Z')}" + + # Write final release notes to file + echo "${NOTES}" > __NOTES__.md + + # Display partial contents for debugging + if [[ -s __DESCRIPTION__.md ]]; then + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__DESCRIPTION__.md' ($(stat --printf="%s" "__DESCRIPTION__.md") B) ...." + cat __DESCRIPTION__.md + printf "::endgroup::\n" + else + printf "${ANSI_LIGHT_YELLOW}No '__DESCRIPTION__.md' found.${ANSI_NOCOLOR}\n" + fi + if [[ -s __PULLREQUEST__.md ]]; then + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__PULLREQUEST__.md' ($(stat --printf="%s" "__PULLREQUEST__.md") B) ...." + cat __PULLREQUEST__.md + printf "::endgroup::\n" + else + printf "${ANSI_LIGHT_YELLOW}No '__PULLREQUEST__.md' found.${ANSI_NOCOLOR}\n" + fi + if [[ -s __FOOTER__.md ]]; then + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__FOOTER__.md' ($(stat --printf="%s" "__FOOTER__.md") B) ...." + cat __FOOTER__.md + printf "::endgroup::\n" + else + printf "${ANSI_LIGHT_YELLOW}No '__FOOTER__.md' found.${ANSI_NOCOLOR}\n" + fi + + # Print final release notes + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Content of '__NOTES__.md' ($(stat --printf="%s" "__NOTES__.md") B) ...." + cat __NOTES__.md + printf "::endgroup::\n" + + - name: 📑 Create new Release Page + id: createReleasePage + if: inputs.mode == 'release' + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + if [[ "${{ inputs.prerelease }}" == "true" ]]; then + addPreRelease="--prerelease" + fi + + if [[ "${{ inputs.latest }}" == "false" ]]; then + addLatest="--latest=false" + fi + + if [[ "${{ inputs.title }}" != "" ]]; then + addTitle=("--title" "${{ inputs.title }}") + fi + + if [[ -s __NOTES__.md ]]; then + addNotes=("--notes-file" "__NOTES__.md") + fi + + printf "Creating release '%s' ... " "${{ inputs.tag }}" + message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + printf " Release page: %s\n" "${message}" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't create release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}" + printf "::error title=%s::%s\n" "InternalError" "Couldn't create release '${{ inputs.tag }}' -> Error: '${message}'." + exit 1 + fi + + - name: 📑 Recreate Release Page + id: recreateReleasePage + if: inputs.mode == 'nightly' + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + addDraft="--draft" + if [[ "${{ inputs.prerelease }}" == "true" ]]; then + addPreRelease="--prerelease" + fi + + if [[ "${{ inputs.latest }}" == "false" ]]; then + addLatest="--latest=false" + fi + + if [[ "${{ inputs.title }}" != "" ]]; then + addTitle=("--title" "${{ inputs.title }}") + fi + + if [[ -s __NOTES__.md ]]; then + addNotes=("--notes-file" "__NOTES__.md") + fi + + printf "Creating release '%s' ... " "${{ inputs.tag }}" + message="$(gh release create "${{ inputs.tag }}" --verify-tag --draft $addPreRelease $addLatest "${addTitle[@]}" "${addNotes[@]}" 2>&1)" + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + printf " Release page: %s\n" "${message}" + else + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't recreate release '%s' -> Error: '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" "${message}" + printf "::error title=%s::%s\n" "InternalError" "Couldn't recreate release '${{ inputs.tag }}' -> Error: '${message}'." + exit 1 + fi + + - name: 📥 Download artifacts and upload as assets + id: uploadAssets + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_LIGHT_YELLOW=$'\x1b[93m' + ANSI_LIGHT_BLUE=$'\x1b[94m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + Replace() { + line="$1" + while IFS=$'\r\n' read -r patternLine; do + # skip empty lines + [[ "$patternLine" == "" ]] && continue + + pattern="${patternLine%%=*}" + replacement="${patternLine#*=}" + line="${line//"%$pattern%"/"$replacement"}" + done <<<'${{ inputs.replacements }}' + printf "%s\n" "$line" + } + + # Create JSON inventory + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + VERSION="1.0" + + # Split categories by ',' into a Bash array. + # See https://stackoverflow.com/a/45201229/3719459 + if [[ "${{ inputs.inventory-categories }}" != "" ]]; then + readarray -td, inventoryCategories <<<"${{ inputs.inventory-categories }}," + unset 'inventoryCategories[-1]' + declare -p inventoryCategories + else + inventoryCategories="" + fi + + jsonInventory=$(jq -c -n \ + --arg version "${VERSION}" \ + --arg date "$(date +"%Y-%m-%dT%H-%M-%S%:z")" \ + --argjson jsonMeta "$(jq -c -n \ + --arg tag "${{ inputs.tag }}" \ + --arg version "${{ inputs.inventory-version }}" \ + --arg hash "${{ github.sha }}" \ + --arg repo "${{ github.server_url }}/${{ github.repository }}" \ + --arg release "${{ github.server_url }}/${{ github.repository }}/releases/download/${{ inputs.tag }}" \ + --argjson categories "$(jq -c -n \ + '$ARGS.positional' \ + --args "${inventoryCategories[@]}" \ + )" \ + '{"tag": $tag, "version": $version, "git-hash": $hash, "repository-url": $repo, "release-url": $release, "categories": $categories}' \ + )" \ + '{"version": 1.0, "timestamp": $date, "meta": $jsonMeta, "files": {}}' + ) + fi + + ERRORS=0 + # A dictionary of 0/1 to avoid duplicate downloads + declare -A downloadedArtifacts + # A dictionary to check for duplicate asset files in release + declare -A assetFilenames + while IFS=$'\r\n' read -r assetLine; do + if [[ "${assetLine}" == "" || "${assetLine:0:1}" == "#" ]]; then + continue + fi + + # split assetLine colon separated triple: artifact:asset:title + artifact="${assetLine%%:*}" + assetLine="${assetLine#*:}" + asset="${assetLine%%:*}" + assetLine="${assetLine#*:}" + if [[ "${{ inputs.inventory-json }}" == "" ]]; then + categories="" + title="${assetLine##*:}" + else + categories="${assetLine%%:*}" + title="${assetLine##*:}" + fi + + # remove leading whitespace + asset="${asset#"${asset%%[![:space:]]*}"}" + categories="${categories#"${categories%%[![:space:]]*}"}" + title="${title#"${title%%[![:space:]]*}"}" + + # apply replacements + asset="$(Replace "${asset}")" + title="$(Replace "${title}")" + + printf "Publish asset '%s' from artifact '%s' with title '%s'\n" "${asset}" "${artifact}" "${title}" + printf " Checked asset for duplicates ... " + if [[ -n "${assetFilenames[$asset]}" ]]; then + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf "::error title=%s::%s\n" "DuplicateAsset" "Asset '${asset}' from artifact '${artifact}' was already uploaded to release '${{ inputs.tag }}'." + ERRORS=$((ERRORS + 1)) + continue + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + assetFilenames[$asset]=1 + fi + + # Download artifact by artifact name + if [[ -n "${downloadedArtifacts[$artifact]}" ]]; then + printf " downloading '%s' ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}\n" "${artifact}" + else + echo " downloading '${artifact}' ... " + printf " gh run download $GITHUB_RUN_ID --dir \"%s\" --name \"%s\" " "${artifact}" "${artifact}" + gh run download $GITHUB_RUN_ID --dir "${artifact}" --name "${artifact}" + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't download artifact '%s'.${ANSI_NOCOLOR}\n" "${artifact}" + printf "::error title=%s::%s\n" "ArtifactNotFound" "Couldn't download artifact '${artifact}'." + ERRORS=$((ERRORS + 1)) + continue + fi + downloadedArtifacts[$artifact]=1 + + printf " Checking for embedded tarball ... " + if [[ -f "${artifact}/${{ inputs.tarball-name }}" ]]; then + printf "${ANSI_LIGHT_GREEN}[FOUND]${ANSI_NOCOLOR}\n" + + pushd "${artifact}" > /dev/null + + printf " Extracting embedded tarball ... " + tar -xf "${{ inputs.tarball-name }}" + if [[ $? -ne 0 ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + fi + + printf " Removing temporary tarball ... " + rm -f "${{ inputs.tarball-name }}" + if [[ $? -ne 0 ]]; then + printf "${ANSI_LIGHT_RED}[FAILED]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + fi + + popd > /dev/null + else + printf "${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}\n" + fi + fi + + # Check if artifact should be compressed (zip, tgz) or if asset was part of the downloaded artifact. + printf " checking asset '%s' ... " "${artifact}/${asset}" + if [[ "${asset}" == !*.zip ]]; then + printf "${ANSI_LIGHT_GREEN}[ZIP]${ANSI_NOCOLOR}\n" + asset="${asset##*!}" + printf "::group:: %s\n" "Compressing artifact '${artifact}' to '${asset}' ..." + ( + cd "${artifact}" && \ + zip -r "../${asset}" * + ) + retCode=$? + printf "::endgroup::\n" + if [[ $retCode -eq 0 ]]; then + printf " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + uploadFile="${asset}" + else + printf " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't compress '%s' to zip file '%s'.${ANSI_NOCOLOR}\n" "${artifact}" "${asset}" + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zip file '${asset}'." + ERRORS=$((ERRORS + 1)) + continue + fi + elif [[ "${asset}" == !*.tgz || "${asset}" == !*.tar.gz || "${asset}" == \$*.tgz || "${asset}" == \$*.tar.gz ]]; then + printf "${ANSI_LIGHT_GREEN}[TAR/GZ]${ANSI_NOCOLOR}\n" + + if [[ "${asset:0:1}" == "\$" ]]; then + asset="${asset##*$}" + dirName="${asset%.*}" + printf " Compressing artifact '%s' to '%s' ...\n" "${artifact}" "${asset}" + tar -c --gzip --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + retCode=$? + else + asset="${asset##*!}" + printf " Compressing artifact '%s' to '%s' ...\n" "${artifact}" "${asset}" + ( + cd "${artifact}" && \ + tar -c --gzip --owner=0 --group=0 --file="../${asset}" * + ) + retCode=$? + fi + + if [[ $retCode -eq 0 ]]; then + printf " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + uploadFile="${asset}" + else + printf " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't compress '%s' to tgz file '%s'.${ANSI_NOCOLOR}\n" "${artifact}" "${asset}" + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to tgz file '${asset}'." + ERRORS=$((ERRORS + 1)) + continue + fi + elif [[ "${asset}" == !*.tzst || "${asset}" == !*.tar.zst || "${asset}" == \$*.tzst || "${asset}" == \$*.tar.zst ]]; then + printf "${ANSI_LIGHT_GREEN}[ZST]${ANSI_NOCOLOR}\n" + + if [[ "${asset:0:1}" == "\$" ]]; then + asset="${asset##*$}" + dirName="${asset%.*}" + printf " Compressing artifact '%s' to '%s' ...\n" "${artifact}" "${asset}" + tar -c --zstd --owner=0 --group=0 --file="${asset}" --directory="${artifact}" --transform "s|^\.|${dirName%.tar}|" . + retCode=$? + else + asset="${asset##*!}" + printf " Compressing artifact '%s' to '%s' ...\n" "${artifact}" "${asset}" + ( + cd "${artifact}" && \ + tar -c --zstd --owner=0 --group=0 --file="../${asset}" * + ) + retCode=$? + fi + + if [[ $retCode -eq 0 ]]; then + printf " Compression ${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + uploadFile="${asset}" + else + printf " Compression ${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't compress '%s' to zst file '%s'.${ANSI_NOCOLOR}\n" "${artifact}" "${asset}" + printf "::error title=%s::%s\n" "CompressionError" "Couldn't compress '${artifact}' to zst file '${asset}'." + ERRORS=$((ERRORS + 1)) + continue + fi + elif [[ -e "${artifact}/${asset}" ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + uploadFile="${artifact}/${asset}" + else + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't find asset '%s' in artifact '%s'.${ANSI_NOCOLOR}\n" "${asset}" "${artifact}" + printf "::error title=%s::%s\n" "FileNotFound" "Couldn't find asset '${asset}' in artifact '${artifact}'." + ERRORS=$((ERRORS + 1)) + continue + fi + + # Add asset to JSON inventory + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + if [[ "${categories}" != "${title}" ]]; then + printf " adding file '%s' with '%s' to JSON inventory ...\n" "${uploadFile#*/}" "${categories//;/ → }" + category="" + jsonEntry=$(jq -c -n \ + --arg title "${title}" \ + --arg file "${uploadFile#*/}" \ + '{"file": $file, "title": $title}' \ + ) + + while [[ "${categories}" != "${category}" ]]; do + category="${categories##*,}" + categories="${categories%,*}" + jsonEntry=$(jq -c -n --arg cat "${category}" --argjson value "${jsonEntry}" '{$cat: $value}') + done + + jsonInventory=$(jq -c -n \ + --argjson inventory "${jsonInventory}" \ + --argjson file "${jsonEntry}" \ + '$inventory * {"files": $file}' \ + ) + else + printf " adding file '%s' to JSON inventory ... ${ANSI_LIGHT_YELLOW}[SKIPPED]${ANSI_NOCOLOR}\n" "${uploadFile#*/}" + fi + fi + + # Upload asset to existing release page + printf " uploading asset '%s' from '%s' with title '%s' ... " "${asset}" "${uploadFile}" "${title}" + gh release upload ${{ inputs.tag }} "${uploadFile}#${title}" --clobber + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't upload asset '%s' from '%s' to release '%s'.${ANSI_NOCOLOR}\n" "${asset}" "${uploadFile}" "${{ inputs.tag }}" + printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${asset}' from '${uploadFile}' to release '${{ inputs.tag }}'." + ERRORS=$((ERRORS + 1)) + continue + fi + done <<<'${{ inputs.assets }}' + + if [[ "${{ inputs.inventory-json }}" != "" ]]; then + inventoryTitle="Release Inventory (JSON)" + + printf "Publish asset '%s' with title '%s'\n" "${{ inputs.inventory-json }}" "${inventoryTitle}" + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Writing JSON inventory to '${{ inputs.inventory-json }}' ...." + printf "%s\n" "$(jq -n --argjson inventory "${jsonInventory}" '$inventory')" > "${{ inputs.inventory-json }}" + cat "${{ inputs.inventory-json }}" + printf "::endgroup::\n" + + # Upload inventory asset to existing release page + printf " uploading asset '%s' title '%s' ... " "${{ inputs.inventory-json }}" "${inventoryTitle}" + gh release upload ${{ inputs.tag }} "${{ inputs.inventory-json }}#${inventoryTitle}" --clobber + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + else + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't upload asset '%s' to release '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.inventory-json }}" "${{ inputs.tag }}" + printf "::error title=%s::%s\n" "UploadError" "Couldn't upload asset '${{ inputs.inventory-json }}' to release '${{ inputs.tag }}'." + ERRORS=$((ERRORS + 1)) + continue + fi + fi + + printf "::group::${ANSI_LIGHT_BLUE}%s${ANSI_NOCOLOR}\n" "Inspecting downloaded artifacts ..." + tree -pash -L 3 . + printf "::endgroup::\n" + + if [[ $ERRORS -ne 0 ]]; then + printf "${ANSI_LIGHT_RED}%s errors detected in previous steps.${ANSI_NOCOLOR}\n" "${ERRORS}" + exit 1 + fi + + - name: 📑 Remove draft state from Release Page + id: removeDraft + if: ${{ ! inputs.draft }} + run: | + set +e + + ANSI_LIGHT_RED=$'\x1b[91m' + ANSI_LIGHT_GREEN=$'\x1b[92m' + ANSI_NOCOLOR=$'\x1b[0m' + + export GH_TOKEN=${{ github.token }} + + # Remove draft-state from release page + printf "Remove draft-state from release '%s' ... " "${title}" + releasePage=$(gh release edit --draft=false "${{ inputs.tag }}") + if [[ $? -eq 0 ]]; then + printf "${ANSI_LIGHT_GREEN}[OK]${ANSI_NOCOLOR}\n" + printf " Release page: %s\n" "${releasePage}" + + printf "release_page=%s\n" "${releasePage}" >> "${GITHUB_OUTPUT}" + else + printf "${ANSI_LIGHT_RED}[ERROR]${ANSI_NOCOLOR}\n" + printf " ${ANSI_LIGHT_RED}Couldn't remove draft-state from release '%s'.${ANSI_NOCOLOR}\n" "${{ inputs.tag }}" + printf "::error title=%s::%s\n" "ReleasePage" "Couldn't remove draft-state from release '${{ inputs.tag }}'." + fi diff --git a/.github/workflows/PublishTestResults.yml b/.github/workflows/PublishTestResults.yml index de1a4503..c09f56c7 100644 --- a/.github/workflows/PublishTestResults.yml +++ b/.github/workflows/PublishTestResults.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -57,8 +57,8 @@ on: publish: description: 'Publish test report summary via Dorny Test-Reporter' required: false - default: true - type: boolean + default: 'true' + type: string report_title: description: 'Title of the summary report in the pipeline''s sidebar' required: false @@ -67,17 +67,22 @@ on: dorny: description: 'Publish merged unittest results via Dorny Test-Reporter.' required: false - default: true - type: boolean + default: 'true' + type: string codecov: description: 'Publish merged unittest results to Codecov.' required: false - default: true - type: boolean + default: 'false' + type: string + codecov_flags: + description: 'Flags applied to the upload to Codecov' + required: false + default: 'unittest' + type: string secrets: CODECOV_TOKEN: description: 'Token to push result to Codecov.' - required: true + required: false jobs: PublishTestResults: @@ -117,7 +122,7 @@ jobs: - name: 📊 Publish Unit Test Results uses: dorny/test-reporter@v2 - if: (inputs.dorny || inputs.publish) && inputs.report_title != '' + if: ( inputs.dorny == 'true' || inputs.publish == 'true' ) && inputs.report_title != '' with: name: ${{ inputs.report_title }} path: ${{ inputs.merged_junit_filename }} @@ -125,13 +130,24 @@ jobs: - name: 📊 Publish unittest results at CodeCov uses: codecov/test-results-action@v1 - if: inputs.codecov + id: codecov + if: inputs.codecov == 'true' + continue-on-error: true with: token: ${{ secrets.CODECOV_TOKEN }} disable_search: true files: ${{ inputs.merged_junit_filename }} + flags: ${{ inputs.codecov_flags }} fail_ci_if_error: true + - name: Generate error messages + run: | + if [[ "${{ steps.codecov.outcome }}" == "failure" ]]; then + printf "::error title=%s::%s\n" "Publish Unit Test Results / Codecov" "Failed to publish unittest results." + else + printf "Codecov: No errors to report.\n" + fi + - name: 📤 Upload merged 'JUnit Test Summary' artifact uses: pyTooling/upload-artifact@v4 if: inputs.merged_junit_artifact != '' diff --git a/.github/workflows/PublishToGitHubPages.yml b/.github/workflows/PublishToGitHubPages.yml index 020aefa8..ca7e165f 100644 --- a/.github/workflows/PublishToGitHubPages.yml +++ b/.github/workflows/PublishToGitHubPages.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/SphinxDocumentation.yml b/.github/workflows/SphinxDocumentation.yml index 3cf20890..d61c5e24 100644 --- a/.github/workflows/SphinxDocumentation.yml +++ b/.github/workflows/SphinxDocumentation.yml @@ -3,7 +3,7 @@ # Patrick Lehmann # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/StaticTypeCheck.yml b/.github/workflows/StaticTypeCheck.yml index 02571fb6..5bb62173 100644 --- a/.github/workflows/StaticTypeCheck.yml +++ b/.github/workflows/StaticTypeCheck.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/Release.yml b/.github/workflows/TagReleaseCommit.yml similarity index 60% rename from .github/workflows/Release.yml rename to .github/workflows/TagReleaseCommit.yml index 1dea8d19..d1101646 100644 --- a/.github/workflows/Release.yml +++ b/.github/workflows/TagReleaseCommit.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -20,74 +20,65 @@ # # # SPDX-License-Identifier: Apache-2.0 # # ==================================================================================================================== # -name: Release +name: Auto Tag on: workflow_call: inputs: - ubuntu_image_version: - description: 'Ubuntu image version.' + ubuntu_image: + description: 'Name of the Ubuntu image.' required: false - default: '24.04' + default: 'ubuntu-24.04' + type: string + version: + description: 'Version used as tag name.' + required: true + type: string + auto_tag: + description: 'Automatically add and push a tag.' + required: true + type: string + workflow: + description: 'Workflow to start after adding a tag.' + required: false + default: 'Pipeline.yml' type: string jobs: - Release: - name: 📝 Create 'Release Page' on GitHub - runs-on: "ubuntu-${{ inputs.ubuntu_image_version }}" - - steps: - - name: 🔁 Extract Git tag from GITHUB_REF - id: getVariables - run: | - GIT_TAG=${GITHUB_REF#refs/*/} - RELEASE_VERSION=${GIT_TAG#v} - RELEASE_DATETIME="$(date --utc '+%d.%m.%Y - %H:%M:%S')" - # write to step outputs - printf "%s\n" "gitTag=${GIT_TAG}" >> $GITHUB_OUTPUT - printf "%s\n" "version=${RELEASE_VERSION}" >> $GITHUB_OUTPUT - printf "%s\n" "datetime=${RELEASE_DATETIME}" >> $GITHUB_OUTPUT - - - name: 📑 Create Release Page - uses: actions/create-release@v1 - id: createReleasePage - env: - GITHUB_TOKEN: ${{ github.token }} - with: - tag_name: ${{ steps.getVariables.outputs.gitTag }} -# release_name: ${{ steps.getVariables.outputs.gitTag }} - body: | - **Automated Release created on: ${{ steps.getVariables.outputs.datetime }}** - - # New Features - - * tbd - * tbd + AutoTag: + name: "🏷 Create tag '${{ inputs.version}}' on GitHub" + runs-on: ${{ inputs.ubuntu_image }} + if: inputs.auto_tag == 'true' - # Changes +# if: github.ref == 'refs/heads/${{ inputs.release_branch }}' - * tbd - * tbd + permissions: + contents: write # required for tag creation + actions: write # required to start a new pipeline - # Bug Fixes - - * tbd - * tbd - - # Documentation - - * tbd - * tbd - - # Unit Tests - - * tbd - * tbd - - ---------- - # Related Issues and Pull-Requests - - * tbd - * tbd - draft: true - prerelease: false + steps: + - name: 🏷 Create release tag '${{ steps.FindPullRequest.outputs.version }}' + uses: actions/github-script@v7 + id: createReleaseTag +# if: inputs.auto_tag == 'true' + with: + script: | + github.rest.git.createRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: 'refs/tags/${{ inputs.version }}', + sha: context.sha + }) + + - name: Trigger Workflow + uses: actions/github-script@v7 + id: runReleaseTag +# if: inputs.auto_tag == 'true' + with: + script: | + github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: '${{ inputs.workflow }}', + ref: '${{ inputs.version }}' + }) diff --git a/.github/workflows/UnitTesting.yml b/.github/workflows/UnitTesting.yml index eaf2f564..d3483e34 100644 --- a/.github/workflows/UnitTesting.yml +++ b/.github/workflows/UnitTesting.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # @@ -225,14 +225,14 @@ jobs: packages = { "coverage": "python-coverage:p", - "docstr_coverage": "python-pyaml:p", + "docstr_coverage": "python-pyaml:p python-types-pyyaml:p", "igraph": "igraph:p", "jinja2": "python-markupsafe:p", "lxml": "python-lxml:p", "numpy": "python-numpy:p", "markupsafe": "python-markupsafe:p", "pip": "python-pip:p", - "pyyaml": "python-pyyaml:p", + "pyyaml": "python-pyyaml:p python-types-pyyaml:p", "ruamel.yaml": "python-ruamel-yaml:p", # "ruamel.yaml": "python-ruamel-yaml:p python-ruamel.yaml.clib:p", "sphinx": "python-markupsafe:p", @@ -240,6 +240,7 @@ jobs: "wheel": "python-wheel:p", "pyedaa.projectmodel": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p", "pyedaa.reports": "python-ruamel-yaml:p python-ruamel.yaml.clib:p python-lxml:p", + "sphinx-reports": "python-markupsafe:p python-pyaml:p python-types-pyyaml:p", } subPackages = { "pytooling": { @@ -342,6 +343,7 @@ jobs: - name: ✅ Run unit tests (Ubuntu/macOS) if: matrix.system != 'windows' + continue-on-error: true run: | export ENVIRONMENT_NAME="${{ matrix.envname }}" export PYTHONPATH=$(pwd) diff --git a/.github/workflows/VerifyDocs.yml b/.github/workflows/VerifyDocs.yml index dad2d535..bb53f873 100644 --- a/.github/workflows/VerifyDocs.yml +++ b/.github/workflows/VerifyDocs.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/.github/workflows/_Checking_JobTemplates.yml b/.github/workflows/_Checking_JobTemplates.yml index 8995492b..0f713908 100644 --- a/.github/workflows/_Checking_JobTemplates.yml +++ b/.github/workflows/_Checking_JobTemplates.yml @@ -5,11 +5,20 @@ on: workflow_dispatch: jobs: + Prepare: + uses: pyTooling/Actions/.github/workflows/PrepareJob.yml@dev + ConfigParams: uses: pyTooling/Actions/.github/workflows/ExtractConfiguration.yml@dev with: package_name: pyDummy + InstallParams: + uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev + with: + package_name: pyDummy + python_version_list: '' + UnitTestingParams: uses: pyTooling/Actions/.github/workflows/Parameters.yml@dev with: @@ -69,8 +78,8 @@ jobs: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} commands: | ${{ needs.ConfigParams.outputs.mypy_prepare_command }} - mypy --html-report htmlmypy -p ${{ needs.ConfigParams.outputs.package_fullname }} - html_report: 'htmlmypy' + mypy --html-report report/typing -p ${{ needs.ConfigParams.outputs.package_fullname }} + html_report: 'report/typing' html_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} DocCoverage: @@ -80,19 +89,31 @@ jobs: - UnitTestingParams with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} - directory : ${{ needs.ConfigParams.outputs.package_directors }} + directory : ${{ needs.ConfigParams.outputs.package_directory }} # fail_below: 70 Package: uses: pyTooling/Actions/.github/workflows/Package.yml@dev needs: - UnitTestingParams - - UnitTesting - - PlatformTesting +# - UnitTesting +# - PlatformTesting with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + Install: + uses: pyTooling/Actions/.github/workflows/InstallPackage.yml@dev + needs: + - ConfigParams + - UnitTestingParams + - InstallParams + - Package + with: + jobs: ${{ needs.InstallParams.outputs.python_jobs }} + wheel: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} + package_name: ${{ needs.ConfigParams.outputs.package_fullname }} + PublishCoverageResults: uses: pyTooling/Actions/.github/workflows/PublishCoverageResults.yml@dev needs: @@ -112,9 +133,7 @@ jobs: coverage_report_html_directory: ${{ needs.ConfigParams.outputs.coverage_report_html_directory }} codecov: true codacy: true - secrets: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - CODACY_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }} + secrets: inherit PublishTestResults: uses: pyTooling/Actions/.github/workflows/PublishTestResults.yml@dev @@ -129,8 +148,7 @@ jobs: merged_junit_artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }} codecov: true dorny: true - secrets: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + secrets: inherit # VerifyDocs: # uses: pyTooling/Actions/.github/workflows/VerifyDocs.yml@dev @@ -161,7 +179,6 @@ jobs: - UnitTestingParams - PublishCoverageResults - PublishTestResults - - Documentation with: sqlite_coverage_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_sqlite }}- xml_unittest_artifacts_prefix: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).unittesting_xml }}- @@ -185,33 +202,58 @@ jobs: - PublishCoverageResults - StaticTypeCheck with: - doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} + doc: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).documentation_html }} coverage: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).codecoverage_html }} - typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} + typing: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).statictyping_html }} + + TriggerTaggedRelease: + uses: pyTooling/Actions/.github/workflows/TagReleaseCommit.yml@dev + needs: + - Prepare + - UnitTesting + - PlatformTesting + - Install +# - StaticTypeCheck + - Package + - PublishToGitHubPages + permissions: + contents: write # required for create tag + actions: write # required for trigger workflow + with: + version: ${{ needs.Prepare.outputs.version }} + auto_tag: ${{ needs.Prepare.outputs.is_release_commit }} + secrets: inherit ReleasePage: - uses: pyTooling/Actions/.github/workflows/Release.yml@dev - if: startsWith(github.ref, 'refs/tags') + uses: pyTooling/Actions/.github/workflows/PublishReleaseNotes.yml@dev needs: + - Prepare - UnitTesting - PlatformTesting + - Install # - StaticTypeCheck - Package - PublishToGitHubPages + if: needs.Prepare.outputs.is_release_tag == 'true' + permissions: + contents: write + actions: write + with: + tag: ${{ needs.Prepare.outputs.version }} + secrets: inherit PublishOnPyPI: uses: pyTooling/Actions/.github/workflows/PublishOnPyPI.yml@dev - if: startsWith(github.ref, 'refs/tags') needs: - UnitTestingParams - ReleasePage # - Package + if: needs.Prepare.outputs.is_release_tag == 'true' with: python_version: ${{ needs.UnitTestingParams.outputs.python_version }} requirements: -r dist/requirements.txt artifact: ${{ fromJson(needs.UnitTestingParams.outputs.artifact_names).package_all }} - secrets: - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + secrets: inherit ArtifactCleanUp: uses: pyTooling/Actions/.github/workflows/ArtifactCleanUp.yml@dev diff --git a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml index 46a19dcf..a0eb01bb 100644 --- a/.github/workflows/_Checking_NamespacePackage_Pipeline.yml +++ b/.github/workflows/_Checking_NamespacePackage_Pipeline.yml @@ -14,6 +14,6 @@ jobs: codacy: true dorny: true secrets: - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }} + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + CODACY_TOKEN: ${{ secrets.CODACY_TOKEN }} diff --git a/.github/workflows/_Checking_Nightly.yml b/.github/workflows/_Checking_Nightly.yml index 57fcc9ee..e1e012d2 100644 --- a/.github/workflows/_Checking_Nightly.yml +++ b/.github/workflows/_Checking_Nightly.yml @@ -45,11 +45,9 @@ jobs: uses: ./.github/workflows/NightlyRelease.yml needs: - Build - secrets: inherit permissions: contents: write - actions: write -# attestations: write + actions: write with: can-fail: true prerelease: true @@ -78,16 +76,15 @@ jobs: document:$archive7.tar.gz: Archive 7 - tar.gz + dir document:$archive8.tzst: Archive 8 - tzst + dir document:$archive9.tar.zst:Archive 9 - tar.zst + dir + secrets: inherit NightlyPageWithInventory: uses: ./.github/workflows/NightlyRelease.yml needs: - Build - secrets: inherit permissions: contents: write - actions: write -# attestations: write + actions: write with: can-fail: true replacements: | @@ -121,3 +118,4 @@ jobs: document:$archive7.tar.gz: Archive 7 - tar.gz + dir document:$archive8.tzst: Archive 8 - tzst + dir document:$archive9.tar.zst: Archive 9 - tar.zst + dir + secrets: inherit diff --git a/.github/workflows/_Checking_SimplePackage_Pipeline.yml b/.github/workflows/_Checking_SimplePackage_Pipeline.yml index 19f9573b..9c683b8c 100644 --- a/.github/workflows/_Checking_SimplePackage_Pipeline.yml +++ b/.github/workflows/_Checking_SimplePackage_Pipeline.yml @@ -14,6 +14,6 @@ jobs: dorny: true cleanup: false secrets: - PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - CODACY_PROJECT_TOKEN: ${{ secrets.CODACY_PROJECT_TOKEN }} + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + CODACY_TOKEN: ${{ secrets.CODACY_TOKEN }} diff --git a/README.md b/README.md index 499e97c0..ec7dc161 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ As shown in the screenshots above, the expected order is: [**NightlyRelease**](.github/workflows/NightlyRelease.yml): publish GitHub Release. - [**Release**](.github/workflows/Release.yml): publish GitHub Release. + [**PublishReleaseNotes**](.github/workflows/PublishReleaseNotes.yml): publish GitHub Release. - **Documentation:** [**SphinxDocumentation**](.github/workflows/PublishCoverageResults.yml): create HTML and LaTeX documentation using Sphinx. diff --git a/doc/JobTemplate/CoverageCollection.rst b/doc/JobTemplate/CoverageCollection.rst index 8b9c3dc6..4749ce32 100644 --- a/doc/JobTemplate/CoverageCollection.rst +++ b/doc/JobTemplate/CoverageCollection.rst @@ -55,8 +55,7 @@ Simple Example uses: pyTooling/Actions/.github/workflows/CoverageCollection.yml@r0 with: artifact: Coverage - secrets: - codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + secrets: inherit Complex Example =============== @@ -71,8 +70,7 @@ Complex Example with: python_version: ${{ needs.Params.outputs.python_version }} artifact: ${{ fromJson(needs.Params.outputs.artifact_names).codecoverage_html }} - secrets: - codacy_token: ${{ secrets.CODACY_PROJECT_TOKEN }} + secrets: inherit Parameters ********** diff --git a/doc/conf.py b/doc/conf.py index 327b3caa..e386788b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,20 +1,29 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -from importlib.util import find_spec from sys import path as sys_path from os.path import abspath from pathlib import Path -from json import loads from pyTooling.Packaging import extractVersionInformation +# ============================================================================== +# Project configuration +# ============================================================================== +githubNamespace = "pyTooling" +githubProject = "Actions" +pythonProject = "pyDummy" +directoryName = pythonProject.replace('.', '/') + + +# ============================================================================== +# Project paths +# ============================================================================== ROOT = Path(__file__).resolve().parent sys_path.insert(0, abspath(".")) sys_path.insert(0, abspath("..")) -sys_path.insert(0, abspath("../pyDummy")) -# sys_path.insert(0, abspath("_extensions")) +sys_path.insert(0, abspath(f"../{directoryName}")) # ============================================================================== @@ -23,11 +32,7 @@ # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -githubNamespace = "pyTooling" -githubProject = "Actions" -project = "pyDummy" - -packageInformationFile = Path(f"../{project}/__init__.py") +packageInformationFile = Path(f"../{directoryName}/__init__.py") versionInformation = extractVersionInformation(packageInformationFile) author = versionInformation.Author @@ -105,7 +110,7 @@ # Python settings # ============================================================================== modindex_common_prefix = [ - f"{project}." + f"{pythonProject}." ] # ============================================================================== @@ -257,13 +262,13 @@ # ============================================================================== # report_unittest_testsuites = { # "src": { -# "name": f"{project}", +# "name": f"{pythonProject}", # "xml_report": "../report/unit/unittest.xml", # } # } # report_codecov_packages = { # "src": { -# "name": f"{project}", +# "name": f"{pythonProject}", # "json_report": "../report/coverage/coverage.json", # "fail_below": 80, # "levels": "default" @@ -271,8 +276,8 @@ # } # report_doccov_packages = { # "src": { -# "name": f"{project}", -# "directory": f"../{project}", +# "name": f"{pythonProject}", +# "directory": f"../{directoryName}", # "fail_below": 80, # "levels": "default" # } @@ -289,17 +294,17 @@ # AutoAPI.Sphinx # ============================================================================== autoapi_modules = { - f"{project}": { + f"{pythonProject}": { "template": "package", - "output": project, + "output": pythonProject, "override": True } } -for directory in [mod for mod in Path(f"../{project}").iterdir() if mod.is_dir() and mod.name != "__pycache__"]: - print(f"Adding module rule for '{project}.{directory.name}'") - autoapi_modules[f"{project}.{directory.name}"] = { +for directory in [mod for mod in Path(f"../{directoryName}").iterdir() if mod.is_dir() and mod.name != "__pycache__"]: + print(f"Adding module rule for '{pythonProject}.{directory.name}'") + autoapi_modules[f"{pythonProject}.{directory.name}"] = { "template": "module", - "output": project, + "output": pythonProject, "override": True } diff --git a/doc/coverage/index.rst b/doc/coverage/index.rst index ef8d044d..02fd7323 100644 --- a/doc/coverage/index.rst +++ b/doc/coverage/index.rst @@ -4,4 +4,4 @@ Code Coverage Report Code coverage report generated with `pytest <https://github.com/pytest-dev/pytest>`__ and `Coverage.py <https://github.com/nedbat/coveragepy/tree/master>`__. .. #report:code-coverage:: - :packageid: src + :reportid: src diff --git a/doc/make.bat b/doc/make.bat index 39e6f087..2525c960 100644 --- a/doc/make.bat +++ b/doc/make.bat @@ -5,7 +5,7 @@ pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build + set SPHINXBUILD=py -3.13 -m sphinx.cmd.build ) set SOURCEDIR=. set BUILDDIR=_build diff --git a/doc/requirements.txt b/doc/requirements.txt index c2de8963..709b41b6 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,6 +1,6 @@ -r ../requirements.txt -pyTooling ~= 8.4 +pyTooling ~= 8.5 # Enforce latest version on ReadTheDocs sphinx ~= 8.2 @@ -15,5 +15,5 @@ sphinxcontrib-mermaid ~= 1.0 autoapi >= 2.0.1 sphinx_design ~= 0.6.1 sphinx-copybutton >= 0.5.2 -sphinx_autodoc_typehints ~= 3.1 -sphinx_reports ~= 0.7 +sphinx_autodoc_typehints ~= 3.2 +sphinx_reports ~= 0.9 diff --git a/pyDummy/__init__.py b/pyDummy/__init__.py index c453bd03..2b2eff5c 100644 --- a/pyDummy/__init__.py +++ b/pyDummy/__init__.py @@ -36,7 +36,7 @@ __email__ = "Paebbels@gmail.com" __copyright__ = "2017-2025, Patrick Lehmann" __license__ = "Apache License, Version 2.0" -__version__ = "0.4.4" +__version__ = "0.4.5" __keywords__ = ["GitHub Actions"] __issue_tracker__ = "https://GitHub.com/pyTooling/Actions/issues" @@ -53,9 +53,9 @@ class Base: _value: int #: An internal value. def __init__(self) -> None: - """ - Initializes the base-class. - """ + # """ + # Initializes the base-class. + # """ self._value = 0 @readonly @@ -75,9 +75,9 @@ class Application(Base): """ def __init__(self) -> None: - """ - Initializes the dummy application. - """ + # """ + # Initializes the dummy application. + # """ super().__init__() platform = Platform() diff --git a/pyproject.toml b/pyproject.toml index 6d28fca0..eb223ab4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,8 @@ [build-system] requires = [ - "setuptools ~= 78.1", + "setuptools >= 80.0", "wheel ~= 0.45", - "pyTooling ~= 8.4" + "pyTooling ~= 8.5" ] build-backend = "setuptools.build_meta" diff --git a/releaser/action.yml b/releaser/action.yml index d36d0122..62068a4e 100644 --- a/releaser/action.yml +++ b/releaser/action.yml @@ -3,7 +3,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/releaser/composite/action.yml b/releaser/composite/action.yml index bc1f1801..3f4e6387 100644 --- a/releaser/composite/action.yml +++ b/releaser/composite/action.yml @@ -3,7 +3,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # diff --git a/requirements.txt b/requirements.txt index 3d2b3a74..3781d6d9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -pyTooling ~= 8.4 +pyTooling ~= 8.5 diff --git a/run.ps1 b/run.ps1 index e18144ff..f55babc3 100644 --- a/run.ps1 +++ b/run.ps1 @@ -88,7 +88,7 @@ if ($build) rm -Force .\build\bdist.win-amd64 rm -Force .\build\lib Write-Host -ForegroundColor Yellow "[live][BUILD] Building $PackageName package as wheel ..." - py -3.13 -m build --wheel + py -3.13 -m build --wheel --no-isolation Write-Host -ForegroundColor Yellow "[live][BUILD] Building wheel finished" } diff --git a/setup.py b/setup.py index af9a65c8..f6b637a9 100644 --- a/setup.py +++ b/setup.py @@ -39,13 +39,16 @@ packageDirectory = packageName packageInformationFile = Path(f"{packageDirectory}/__init__.py") -setup(**DescribePythonPackageHostedOnGitHub( - packageName=packageName, - description="pyDummy is a test package to verify GitHub actions for Python projects.", - gitHubNamespace=gitHubNamespace, - unittestRequirementsFile=Path("tests/requirements.txt"), - sourceFileWithVersion=packageInformationFile, - dataFiles={ - packageName: ["py.typed"] - } -)) +setup( + **DescribePythonPackageHostedOnGitHub( + packageName=packageName, + description="pyDummy is a test package to verify GitHub actions for Python projects.", + gitHubNamespace=gitHubNamespace, + unittestRequirementsFile=Path("tests/requirements.txt"), + sourceFileWithVersion=packageInformationFile, + dataFiles={ + packageName: ["py.typed"] + }, + debug=True + ) +) diff --git a/tests/requirements.txt b/tests/requirements.txt index e44d7b60..f83dca0a 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,13 +1,13 @@ -r ../requirements.txt # Coverage collection -Coverage ~= 7.8 +Coverage ~= 7.9 # Test Runner -pytest ~= 8.3 -pytest-cov ~= 6.1 +pytest ~= 8.4 +pytest-cov ~= 6.2 # Static Type Checking -mypy ~= 1.15 +mypy ~= 1.16 typing_extensions ~= 4.13 -lxml ~= 5.3 +lxml ~= 5.4 diff --git a/with-post-step/action.yml b/with-post-step/action.yml index 69c2a6ef..bd9337ab 100644 --- a/with-post-step/action.yml +++ b/with-post-step/action.yml @@ -4,7 +4,7 @@ # Unai Martinez-Corral # # # # ==================================================================================================================== # -# Copyright 2020-2024 The pyTooling Authors # +# Copyright 2020-2025 The pyTooling Authors # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. #