diff --git a/.eggs/README.txt b/.eggs/README.txt new file mode 100644 index 0000000..5d01668 --- /dev/null +++ b/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/LICENSE b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/LICENSE new file mode 100644 index 0000000..89de354 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/LICENSE @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/PKG-INFO b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000..184f32c --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,164 @@ +Metadata-Version: 2.1 +Name: setuptools-scm +Version: 8.0.4 +Summary: the blessed package to manage your versions by scm tags +Author-email: Ronny Pfannschmidt +License: Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +Project-URL: documentation, https://setuptools-scm.readthedocs.io/ +Project-URL: repository, https://github.com/pypa/setuptools_scm/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Version Control +Classifier: Topic :: System :: Software Distribution +Classifier: Topic :: Utilities +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: packaging >=20 +Requires-Dist: setuptools +Requires-Dist: typing-extensions +Requires-Dist: tomli >=1 ; python_version < "3.11" +Provides-Extra: docs +Requires-Dist: entangled-cli[rich] ; extra == 'docs' +Requires-Dist: mkdocs ; extra == 'docs' +Requires-Dist: mkdocs-entangled-plugin ; extra == 'docs' +Requires-Dist: mkdocs-material ; extra == 'docs' +Requires-Dist: mkdocstrings[python] ; extra == 'docs' +Requires-Dist: pygments ; extra == 'docs' +Provides-Extra: rich +Requires-Dist: rich ; extra == 'rich' +Provides-Extra: test +Requires-Dist: build ; extra == 'test' +Requires-Dist: pytest ; extra == 'test' +Requires-Dist: rich ; extra == 'test' +Requires-Dist: wheel ; extra == 'test' +Provides-Extra: toml + +# setuptools_scm +[![github ci](https://github.com/pypa/setuptools_scm/workflows/python%20tests+artifacts+release/badge.svg)](https://github.com/pypa/setuptools_scm/actions) +[![Documentation Status](https://readthedocs.org/projects/setuptools-scm/badge/?version=latest)](https://setuptools-scm.readthedocs.io/en/latest/?badge=latest) +[![tidelift](https://tidelift.com/badges/package/pypi/setuptools-scm) ](https://tidelift.com/subscription/pkg/pypi-setuptools-scm?utm_source=pypi-setuptools-scm&utm_medium=readme) + +## about + +[setuptools-scm] extracts Python package versions from `git` or +`hg` metadata instead of declaring them as the version argument +or in an SCM managed file. + +Additionally, [setuptools-scm] provides setuptools +with a list of files that are managed by the SCM
+(i.e. it automatically adds **all of** the SCM-managed files to the sdist).
+Unwanted files must be excluded via `MANIFEST.in`. + + +## `pyproject.toml` usage + +The preferred way to configure [setuptools-scm] is to author +settings in a `tool.setuptools_scm` section of `pyproject.toml`. + +This feature requires setuptools 60 or later. +First, ensure that [setuptools-scm] is present during the project's +build step by specifying it as one of the build requirements. + +```toml +[build-system] +requires = [ + "setuptools>=60", + "setuptools-scm>=8.0"] +``` + +That will be sufficient to require [setuptools-scm] for projects +that support [PEP 518] like [pip] and [build]. + +[pip]: https://pypi.org/project/pip +[build]: https://pypi.org/project/build +[PEP 518]: https://peps.python.org/pep-0518/ + + +To enable version inference, you need to set the version +dynamically in the `project` section of `pyproject.toml`: + +```toml title="pyproject.toml" +[project] +# version = "0.0.1" # Remove any existing version parameter. +dynamic = ["version"] +[tool.setuptools_scm] +``` + +Additionally, a version file can be written by specifying: + +```toml title="pyproject.toml" +[tool.setuptools_scm] +version_file = "pkg/_version.py" +``` + +Where `pkg` is the name of your package. + +If you need to confirm which version string is being generated or debug the configuration, +you can install [setuptools-scm] directly in your working environment and run: + +```console +$ python -m setuptools_scm +# To explore other options, try: +$ python -m setuptools_scm --help +``` + +For further configuration see the [documentation]. + +[setuptools-scm]: https://github.com/pypa/setuptools_scm +[documentation]: https://setuptools-scm.readthedocs.io/ + + +## Interaction with Enterprise Distributions + +Some enterprise distributions like RHEL7 +ship rather old setuptools versions. + +In those cases its typically possible to build by using an sdist against `setuptools_scm<2.0`. +As those old setuptools versions lack sensible types for versions, +modern [setuptools-scm] is unable to support them sensibly. + +It's strongly recommended to build a wheel artifact using modern Python and setuptools, +then installing the artifact instead of trying to run against old setuptools versions. + + +## Code of Conduct + + +Everyone interacting in the [setuptools-scm] project's codebases, issue +trackers, chat rooms, and mailing lists is expected to follow the +[PSF Code of Conduct]. + +[PSF Code of Conduct]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + + +## Security Contact + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/RECORD b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/RECORD new file mode 100644 index 0000000..ebcb4ba --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/RECORD @@ -0,0 +1,35 @@ +setuptools_scm/.git_archival.txt,sha256=7hhQtCsXxyEnQcc8hjZYFcJAwBDbwynYLopeiYEn6m0,125 +setuptools_scm/__init__.py,sha256=bM4TUtVIcPgNzNkmKN6N-wXs9A06cJfPHVYgIHuY4r4,861 +setuptools_scm/__main__.py,sha256=PUkvI9W-hylvxv5sd1sosqWXEliN3d30x7BIKVEAQaQ,98 +setuptools_scm/_cli.py,sha256=gsBkEmv7SKu8nZBYpXerzCZ1zZg7yEAQHMrEVbJ_q5M,2890 +setuptools_scm/_config.py,sha256=29pxSz30qLob5F5RX9v_0tahovFw5aGx5A3cX1a6foM,4964 +setuptools_scm/_entrypoints.py,sha256=zwJOqeb7sjC9vJtiANojUov40Xe8mpdZNhxWDCdbBsU,3793 +setuptools_scm/_get_version_impl.py,sha256=FhHrF5K4Nlgt9c9UsULQzUsqj0PzGwx6H6ZBQjYUWcw,5942 +setuptools_scm/_log.py,sha256=5GQzzFBBoldPlBSEdzdWHJvq-6O0x74WkYkCVkTghTY,2135 +setuptools_scm/_modify_version.py,sha256=3dGCx9wzIeRuxZkV69_tw7TqCXAfFyi1BpIQpyPItF8,1747 +setuptools_scm/_overrides.py,sha256=iA3XDa-RYKKGLgYYb3hkmmh4YHovhkBF404AmZ_8hxE,1654 +setuptools_scm/_run_cmd.py,sha256=jcP20Qp4izQMLU3AI5GtbjfnEoLt3LLFrFigSm0UUSs,5912 +setuptools_scm/_types.py,sha256=WvfAIDbCOr2_IQPztoLEO-Qhsvhz7JGk8HaEpqTp1bM,604 +setuptools_scm/_version_cls.py,sha256=G5d8sJXPmHRrfrTtmYWt2RVVnGlp3xvnE183EWVlOOc,2925 +setuptools_scm/discover.py,sha256=TGlKeP-uLSWmBLIGL6es4etKfbvOUdjDjcxgZxYrLxk,2026 +setuptools_scm/fallbacks.py,sha256=59W02TAzq7iIZ1uSRSZH7ELwY958KU4yNh7v5-0FCEs,1447 +setuptools_scm/git.py,sha256=DO8Dk6rlMTKR143AgdCm6c2WCEp6OiOn5UAul6uB5fM,10365 +setuptools_scm/hg.py,sha256=LracAVnGGluu4xk28JtgpZxc5wR9s3CMGIaX40Eyhng,6190 +setuptools_scm/hg_git.py,sha256=CCZm1ErjeS0sPkKC2COjT_d4omZK21FrTiIg4GwH5LA,4545 +setuptools_scm/integration.py,sha256=a0c6-lIBSzKgNJybvE76jkKL8sdJuHtkvAx3IwTPkK0,805 +setuptools_scm/scm_workdir.py,sha256=oreoRhJfvhhxVGIhDPWH8-dg9umsmZM0sV2oRIzGXc8,327 +setuptools_scm/version.py,sha256=klVnCWFwY7OlTNPvSum3VhTSlBBcMwDpIcb7n_Ndprw,13835 +setuptools_scm/_file_finders/__init__.py,sha256=Gc7CcjwzlC7jXstfQysYl5wv-x8RalTAnzieM6mSAVU,3686 +setuptools_scm/_file_finders/git.py,sha256=XpYe2h80g_nOFxZR3E2gwma_geOT4CE8oP2T33dQzPI,4179 +setuptools_scm/_file_finders/hg.py,sha256=V6T9s91xigTtn1TRlwqnrgZV4eHuqixovF0CwMI3lXQ,2256 +setuptools_scm/_integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +setuptools_scm/_integration/dump_version.py,sha256=ixcET9z3-oNdx6mM-yYbV9xktHsdPfCmHCCKGRA0_eo,2584 +setuptools_scm/_integration/pyproject_reading.py,sha256=6PEog4x2TofynEnN4rkjcmfIMys49nO4TtNdt6wLLzg,2589 +setuptools_scm/_integration/setuptools.py,sha256=kdM2C4jtAchJQCQnxWj_x1FoeD1B5SO9OF6to2oxr4c,3420 +setuptools_scm/_integration/toml.py,sha256=P3MBO01jBeaEYeaqR_vfDiaDaytOhLvYmzSISoJ7EUg,1379 +setuptools_scm-8.0.4.dist-info/LICENSE,sha256=iYB6zyMJvShfAzQE7nhYFgLzzZuBmhasLw5fYP9KRz4,1023 +setuptools_scm-8.0.4.dist-info/METADATA,sha256=XktL5dowfcRZfNPKaf4lPx83_gRFXVLZwqhFLxC17k8,6403 +setuptools_scm-8.0.4.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 +setuptools_scm-8.0.4.dist-info/entry_points.txt,sha256=TxfQoR1oJCfa_OWTNvozyEymcj-Avnq8kE3RLO7cN44,1725 +setuptools_scm-8.0.4.dist-info/top_level.txt,sha256=kiu-91q3_rJLUoc2wl8_lC4cIlpgtgdD_4NaChF4hOA,15 +setuptools_scm-8.0.4.dist-info/RECORD,, diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/WHEEL b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/WHEEL new file mode 100644 index 0000000..7e68873 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/entry_points.txt b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/entry_points.txt new file mode 100644 index 0000000..f19235d --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/entry_points.txt @@ -0,0 +1,41 @@ +[distutils.setup_keywords] +use_scm_version = setuptools_scm._integration.setuptools:version_keyword + +[setuptools.file_finders] +setuptools_scm = setuptools_scm._file_finders:find_files + +[setuptools.finalize_distribution_options] +setuptools_scm = setuptools_scm._integration.setuptools:infer_version + +[setuptools_scm.files_command] +.git = setuptools_scm._file_finders.git:git_find_files +.hg = setuptools_scm._file_finders.hg:hg_find_files + +[setuptools_scm.files_command_fallback] +.git_archival.txt = setuptools_scm._file_finders.git:git_archive_find_files +.hg_archival.txt = setuptools_scm._file_finders.hg:hg_archive_find_files + +[setuptools_scm.local_scheme] +dirty-tag = setuptools_scm.version:get_local_dirty_tag +no-local-version = setuptools_scm.version:get_no_local_node +node-and-date = setuptools_scm.version:get_local_node_and_date +node-and-timestamp = setuptools_scm.version:get_local_node_and_timestamp + +[setuptools_scm.parse_scm] +.git = setuptools_scm.git:parse +.hg = setuptools_scm.hg:parse + +[setuptools_scm.parse_scm_fallback] +.git_archival.txt = setuptools_scm.git:parse_archival +.hg_archival.txt = setuptools_scm.hg:parse_archival +PKG-INFO = setuptools_scm.fallbacks:parse_pkginfo +pyproject.toml = setuptools_scm.fallbacks:fallback_version +setup.py = setuptools_scm.fallbacks:fallback_version + +[setuptools_scm.version_scheme] +calver-by-date = setuptools_scm.version:calver_by_date +guess-next-dev = setuptools_scm.version:guess_next_dev_version +no-guess-dev = setuptools_scm.version:no_guess_dev_version +post-release = setuptools_scm.version:postrelease_version +python-simplified-semver = setuptools_scm.version:simplified_semver_version +release-branch-semver = setuptools_scm.version:release_branch_semver_version diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/requires.txt b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/requires.txt new file mode 100644 index 0000000..818a19b --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/requires.txt @@ -0,0 +1,22 @@ +packaging>=20 +setuptools +typing-extensions + +[docs] +entangled-cli[rich] +mkdocs +mkdocs-entangled-plugin +mkdocs-material +mkdocstrings[python] +pygments + +[rich] +rich + +[test] +build +pytest +rich +wheel + +[toml] diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/top_level.txt b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000..cba8d88 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/EGG-INFO/top_level.txt @@ -0,0 +1 @@ +setuptools_scm diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/.git_archival.txt b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/.git_archival.txt new file mode 100644 index 0000000..8fb235d --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ +ref-names: $Format:%D$ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__init__.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__init__.py new file mode 100644 index 0000000..aa40ab3 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__init__.py @@ -0,0 +1,30 @@ +""" +:copyright: 2010-2023 by Ronny Pfannschmidt +:license: MIT +""" +from __future__ import annotations + +from ._config import Configuration +from ._config import DEFAULT_LOCAL_SCHEME # soft deprecated +from ._config import DEFAULT_VERSION_SCHEME # soft deprecated +from ._get_version_impl import _get_version # soft deprecated +from ._get_version_impl import get_version # soft deprecated +from ._integration.dump_version import dump_version # soft deprecated +from ._version_cls import NonNormalizedVersion +from ._version_cls import Version +from .version import ScmVersion + + +# Public API +__all__ = [ + # soft deprecated imports, left for backward compatibility + "get_version", + "_get_version", + "dump_version", + "DEFAULT_VERSION_SCHEME", + "DEFAULT_LOCAL_SCHEME", + "Configuration", + "Version", + "ScmVersion", + "NonNormalizedVersion", +] diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__main__.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__main__.py new file mode 100644 index 0000000..dab6068 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__main__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +from ._cli import main + +if __name__ == "__main__": + main() diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/__init__.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..a515684 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/__init__.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_config.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_config.cpython-311.pyc new file mode 100644 index 0000000..59b60bf Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_config.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_entrypoints.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_entrypoints.cpython-311.pyc new file mode 100644 index 0000000..eb03c91 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_entrypoints.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_get_version_impl.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_get_version_impl.cpython-311.pyc new file mode 100644 index 0000000..54d585b Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_get_version_impl.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_log.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_log.cpython-311.pyc new file mode 100644 index 0000000..76571f8 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_log.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_modify_version.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_modify_version.cpython-311.pyc new file mode 100644 index 0000000..3270290 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_modify_version.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_overrides.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_overrides.cpython-311.pyc new file mode 100644 index 0000000..64a5e3a Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_overrides.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_run_cmd.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_run_cmd.cpython-311.pyc new file mode 100644 index 0000000..de6636a Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_run_cmd.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_types.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_types.cpython-311.pyc new file mode 100644 index 0000000..1fa66bb Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_types.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_version_cls.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_version_cls.cpython-311.pyc new file mode 100644 index 0000000..c1e1eee Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/_version_cls.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/discover.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/discover.cpython-311.pyc new file mode 100644 index 0000000..92503c4 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/discover.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/git.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/git.cpython-311.pyc new file mode 100644 index 0000000..336bc40 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/git.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/integration.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/integration.cpython-311.pyc new file mode 100644 index 0000000..c29b320 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/integration.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/scm_workdir.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/scm_workdir.cpython-311.pyc new file mode 100644 index 0000000..3d60ccf Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/scm_workdir.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/version.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/version.cpython-311.pyc new file mode 100644 index 0000000..2842b66 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/__pycache__/version.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_cli.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_cli.py new file mode 100644 index 0000000..66099b1 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_cli.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import argparse +import os +import sys + +from setuptools_scm import Configuration +from setuptools_scm._file_finders import find_files +from setuptools_scm._get_version_impl import _get_version +from setuptools_scm.discover import walk_potential_roots + + +def main(args: list[str] | None = None) -> None: + opts = _get_cli_opts(args) + inferred_root: str = opts.root or "." + + pyproject = opts.config or _find_pyproject(inferred_root) + + try: + config = Configuration.from_file( + pyproject, + root=(os.path.abspath(opts.root) if opts.root is not None else None), + ) + except (LookupError, FileNotFoundError) as ex: + # no pyproject.toml OR no [tool.setuptools_scm] + print( + f"Warning: could not use {os.path.relpath(pyproject)}," + " using default configuration.\n" + f" Reason: {ex}.", + file=sys.stderr, + ) + config = Configuration(inferred_root) + + version = _get_version(config, force_write_version_files=False) + if version is None: + raise SystemExit("ERROR: no version found for", opts) + if opts.strip_dev: + version = version.partition(".dev")[0] + print(version) + + if opts.command == "ls": + for fname in find_files(config.root): + print(fname) + + +def _get_cli_opts(args: list[str] | None) -> argparse.Namespace: + prog = "python -m setuptools_scm" + desc = "Print project version according to SCM metadata" + parser = argparse.ArgumentParser(prog, description=desc) + # By default, help for `--help` starts with lower case, so we keep the pattern: + parser.add_argument( + "-r", + "--root", + default=None, + help='directory managed by the SCM, default: inferred from config file, or "."', + ) + parser.add_argument( + "-c", + "--config", + default=None, + metavar="PATH", + help="path to 'pyproject.toml' with setuptools_scm config, " + "default: looked up in the current or parent directories", + ) + parser.add_argument( + "--strip-dev", + action="store_true", + help="remove the dev/local parts of the version before printing the version", + ) + sub = parser.add_subparsers(title="extra commands", dest="command", metavar="") + # We avoid `metavar` to prevent printing repetitive information + desc = "List files managed by the SCM" + sub.add_parser("ls", help=desc[0].lower() + desc[1:], description=desc) + return parser.parse_args(args) + + +def _find_pyproject(parent: str) -> str: + for directory in walk_potential_roots(os.path.abspath(parent)): + pyproject = os.path.join(directory, "pyproject.toml") + if os.path.isfile(pyproject): + return pyproject + + return os.path.abspath( + "pyproject.toml" + ) # use default name to trigger the default errors diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_config.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_config.py new file mode 100644 index 0000000..5e5feb1 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_config.py @@ -0,0 +1,151 @@ +""" configuration """ +from __future__ import annotations + +import dataclasses +import os +import re +import warnings +from pathlib import Path +from typing import Any +from typing import Pattern +from typing import Protocol + +from . import _log +from . import _types as _t +from ._integration.pyproject_reading import ( + get_args_for_pyproject as _get_args_for_pyproject, +) +from ._integration.pyproject_reading import read_pyproject as _read_pyproject +from ._overrides import read_toml_overrides +from ._version_cls import _validate_version_cls +from ._version_cls import _VersionT +from ._version_cls import Version as _Version + +log = _log.log.getChild("config") + +DEFAULT_TAG_REGEX = re.compile( + r"^(?:[\w-]+-)?(?P[vV]?\d+(?:\.\d+){0,2}[^\+]*)(?:\+.*)?$" +) +"""default tag regex that tries to match PEP440 style versions +with prefix consisting of dashed words""" + +DEFAULT_VERSION_SCHEME = "guess-next-dev" +DEFAULT_LOCAL_SCHEME = "node-and-date" + + +def _check_tag_regex(value: str | Pattern[str] | None) -> Pattern[str]: + if not value: + regex = DEFAULT_TAG_REGEX + else: + regex = re.compile(value) + + group_names = regex.groupindex.keys() + if regex.groups == 0 or (regex.groups > 1 and "version" not in group_names): + warnings.warn( + "Expected tag_regex to contain a single match group or a group named" + " 'version' to identify the version part of any tag." + ) + + return regex + + +class ParseFunction(Protocol): + def __call__( + self, root: _t.PathT, *, config: Configuration + ) -> _t.SCMVERSION | None: + ... + + +def _check_absolute_root(root: _t.PathT, relative_to: _t.PathT | None) -> str: + log.debug("check absolute root=%s relative_to=%s", root, relative_to) + if relative_to: + if ( + os.path.isabs(root) + and os.path.isabs(relative_to) + and not os.path.commonpath([root, relative_to]) == root + ): + warnings.warn( + f"absolute root path '{root}' overrides relative_to '{relative_to}'" + ) + if os.path.isdir(relative_to): + warnings.warn( + "relative_to is expected to be a file," + f" its the directory {relative_to}\n" + "assuming the parent directory was passed" + ) + log.debug("dir %s", relative_to) + root = os.path.join(relative_to, root) + else: + log.debug("file %s", relative_to) + root = os.path.join(os.path.dirname(relative_to), root) + return os.path.abspath(root) + + +@dataclasses.dataclass +class Configuration: + """Global configuration model""" + + relative_to: _t.PathT | None = None + root: _t.PathT = "." + version_scheme: _t.VERSION_SCHEME = DEFAULT_VERSION_SCHEME + local_scheme: _t.VERSION_SCHEME = DEFAULT_LOCAL_SCHEME + tag_regex: Pattern[str] = DEFAULT_TAG_REGEX + parentdir_prefix_version: str | None = None + fallback_version: str | None = None + fallback_root: _t.PathT = "." + write_to: _t.PathT | None = None + write_to_template: str | None = None + version_file: _t.PathT | None = None + version_file_template: str | None = None + parse: ParseFunction | None = None + git_describe_command: _t.CMD_TYPE | None = None + dist_name: str | None = None + version_cls: type[_VersionT] = _Version + search_parent_directories: bool = False + + parent: _t.PathT | None = None + + @property + def absolute_root(self) -> str: + return _check_absolute_root(self.root, self.relative_to) + + @classmethod + def from_file( + cls, + name: str | os.PathLike[str] = "pyproject.toml", + dist_name: str | None = None, + _require_section: bool = True, + **kwargs: Any, + ) -> Configuration: + """ + Read Configuration from pyproject.toml (or similar). + Raises exceptions when file is not found or toml is + not installed or the file has invalid format or does + not contain the [tool.setuptools_scm] section. + """ + + pyproject_data = _read_pyproject(Path(name), require_section=_require_section) + args = _get_args_for_pyproject(pyproject_data, dist_name, kwargs) + + args.update(read_toml_overrides(args["dist_name"])) + relative_to = args.pop("relative_to", name) + return cls.from_data(relative_to=relative_to, data=args) + + @classmethod + def from_data( + cls, relative_to: str | os.PathLike[str], data: dict[str, Any] + ) -> Configuration: + """ + given configuration data + create a config instance after validating tag regex/version class + """ + tag_regex = _check_tag_regex(data.pop("tag_regex", None)) + version_cls = _validate_version_cls( + data.pop("version_cls", None), data.pop("normalize", True) + ) + return cls( + relative_to, + version_cls=version_cls, + tag_regex=tag_regex, + **data, + ) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_entrypoints.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_entrypoints.py new file mode 100644 index 0000000..50c9182 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_entrypoints.py @@ -0,0 +1,139 @@ +from __future__ import annotations + +import sys +from typing import Any +from typing import Callable +from typing import cast +from typing import Iterator +from typing import overload +from typing import TYPE_CHECKING + +from . import _log +from . import version + +if TYPE_CHECKING: + from . import _types as _t + from ._config import Configuration, ParseFunction + + +from importlib.metadata import EntryPoint as EntryPoint + + +if sys.version_info[:2] < (3, 10): + from importlib.metadata import entry_points as legacy_entry_points + + class EntryPoints: + _groupdata: list[EntryPoint] + + def __init__(self, groupdata: list[EntryPoint]) -> None: + self._groupdata = groupdata + + def select(self, name: str) -> EntryPoints: + return EntryPoints([x for x in self._groupdata if x.name == name]) + + def __iter__(self) -> Iterator[EntryPoint]: + return iter(self._groupdata) + + def entry_points(group: str) -> EntryPoints: + return EntryPoints(legacy_entry_points()[group]) + +else: + from importlib.metadata import entry_points, EntryPoints + + +log = _log.log.getChild("entrypoints") + + +def version_from_entrypoint( + config: Configuration, *, entrypoint: str, root: _t.PathT +) -> version.ScmVersion | None: + from .discover import iter_matching_entrypoints + + log.debug("version_from_ep %s in %s", entrypoint, root) + for ep in iter_matching_entrypoints(root, entrypoint, config): + fn: ParseFunction = ep.load() + maybe_version: version.ScmVersion | None = fn(root, config=config) + log.debug("%s found %r", ep, maybe_version) + if maybe_version is not None: + return maybe_version + return None + + +def iter_entry_points(group: str, name: str | None = None) -> Iterator[EntryPoint]: + eps: EntryPoints = entry_points(group=group) + res = eps if name is None else eps.select(name=name) + + return iter(res) + + +def _get_ep(group: str, name: str) -> Any | None: + for ep in iter_entry_points(group, name): + log.debug("ep found: %s", ep.name) + return ep.load() + else: + return None + + +def _get_from_object_reference_str(path: str, group: str) -> Any | None: + # todo: remove for importlib native spelling + ep = EntryPoint(path, path, group) + try: + return ep.load() + except (AttributeError, ModuleNotFoundError): + return None + + +def _iter_version_schemes( + entrypoint: str, + scheme_value: _t.VERSION_SCHEMES, + _memo: set[object] | None = None, +) -> Iterator[Callable[[version.ScmVersion], str]]: + if _memo is None: + _memo = set() + if isinstance(scheme_value, str): + scheme_value = cast( + "_t.VERSION_SCHEMES", + _get_ep(entrypoint, scheme_value) + or _get_from_object_reference_str(scheme_value, entrypoint), + ) + + if isinstance(scheme_value, (list, tuple)): + for variant in scheme_value: + if variant not in _memo: + _memo.add(variant) + yield from _iter_version_schemes(entrypoint, variant, _memo=_memo) + elif callable(scheme_value): + yield scheme_value + + +@overload +def _call_version_scheme( + version: version.ScmVersion, + entrypoint: str, + given_value: _t.VERSION_SCHEMES, + default: str, +) -> str: + ... + + +@overload +def _call_version_scheme( + version: version.ScmVersion, + entrypoint: str, + given_value: _t.VERSION_SCHEMES, + default: None, +) -> str | None: + ... + + +def _call_version_scheme( + version: version.ScmVersion, + entrypoint: str, + given_value: _t.VERSION_SCHEMES, + default: str | None, +) -> str | None: + for scheme in _iter_version_schemes(entrypoint, given_value): + result = scheme(version) + if result is not None: + return result + return default diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__init__.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__init__.py new file mode 100644 index 0000000..591aa90 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__init__.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +import itertools +import os +from typing import Callable +from typing import TYPE_CHECKING + +from .. import _log +from .. import _types as _t +from .._entrypoints import iter_entry_points + +if TYPE_CHECKING: + from typing_extensions import TypeGuard + + +log = _log.log.getChild("file_finder") + + +def scm_find_files( + path: _t.PathT, + scm_files: set[str], + scm_dirs: set[str], + force_all_files: bool = False, +) -> list[str]: + """ setuptools compatible file finder that follows symlinks + + - path: the root directory from which to search + - scm_files: set of scm controlled files and symlinks + (including symlinks to directories) + - scm_dirs: set of scm controlled directories + (including directories containing no scm controlled files) + - force_all_files: ignore ``scm_files`` and ``scm_dirs`` and list everything. + + scm_files and scm_dirs must be absolute with symlinks resolved (realpath), + with normalized case (normcase) + + Spec here: http://setuptools.readthedocs.io/en/latest/setuptools.html#\ + adding-support-for-revision-control-systems + """ + realpath = os.path.normcase(os.path.realpath(path)) + seen: set[str] = set() + res: list[str] = [] + for dirpath, dirnames, filenames in os.walk(realpath, followlinks=True): + # dirpath with symlinks resolved + realdirpath = os.path.normcase(os.path.realpath(dirpath)) + + def _link_not_in_scm(n: str, realdirpath: str = realdirpath) -> bool: + fn = os.path.join(realdirpath, os.path.normcase(n)) + return os.path.islink(fn) and fn not in scm_files + + if not force_all_files and realdirpath not in scm_dirs: + # directory not in scm, don't walk it's content + dirnames[:] = [] + continue + if os.path.islink(dirpath) and not os.path.relpath( + realdirpath, realpath + ).startswith(os.pardir): + # a symlink to a directory not outside path: + # we keep it in the result and don't walk its content + res.append(os.path.join(path, os.path.relpath(dirpath, path))) + dirnames[:] = [] + continue + if realdirpath in seen: + # symlink loop protection + dirnames[:] = [] + continue + dirnames[:] = [ + dn for dn in dirnames if force_all_files or not _link_not_in_scm(dn) + ] + for filename in filenames: + if not force_all_files and _link_not_in_scm(filename): + continue + # dirpath + filename with symlinks preserved + fullfilename = os.path.join(dirpath, filename) + is_tracked = os.path.normcase(os.path.realpath(fullfilename)) in scm_files + if force_all_files or is_tracked: + res.append(os.path.join(path, os.path.relpath(fullfilename, realpath))) + seen.add(realdirpath) + return res + + +def is_toplevel_acceptable(toplevel: str | None) -> TypeGuard[str]: + """ """ + if toplevel is None: + return False + + ignored: list[str] = os.environ.get("SETUPTOOLS_SCM_IGNORE_VCS_ROOTS", "").split( + os.pathsep + ) + ignored = [os.path.normcase(p) for p in ignored] + + log.debug("toplevel: %r\n ignored %s", toplevel, ignored) + + return toplevel not in ignored + + +def find_files(path: _t.PathT = "") -> list[str]: + for ep in itertools.chain( + iter_entry_points("setuptools_scm.files_command"), + iter_entry_points("setuptools_scm.files_command_fallback"), + ): + command: Callable[[_t.PathT], list[str]] = ep.load() + res: list[str] = command(path) + if res: + return res + return [] diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/__init__.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..f015dae Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/__init__.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/git.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/git.cpython-311.pyc new file mode 100644 index 0000000..f4a375b Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/__pycache__/git.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/git.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/git.py new file mode 100644 index 0000000..873b4ba --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/git.py @@ -0,0 +1,115 @@ +from __future__ import annotations + +import logging +import os +import subprocess +import tarfile +from typing import IO + +from . import is_toplevel_acceptable +from . import scm_find_files +from .. import _types as _t +from .._run_cmd import run as _run +from ..integration import data_from_mime + +log = logging.getLogger(__name__) + + +def _git_toplevel(path: str) -> str | None: + try: + cwd = os.path.abspath(path or ".") + res = _run(["git", "rev-parse", "HEAD"], cwd=cwd) + if res.returncode: + # BAIL if there is no commit + log.error("listing git files failed - pretending there aren't any") + return None + res = _run( + ["git", "rev-parse", "--show-prefix"], + cwd=cwd, + ) + if res.returncode: + return None + out = res.stdout[:-1] # remove the trailing pathsep + if not out: + out = cwd + else: + # Here, ``out`` is a relative path to root of git. + # ``cwd`` is absolute path to current working directory. + # the below method removes the length of ``out`` from + # ``cwd``, which gives the git toplevel + assert cwd.replace("\\", "/").endswith(out), f"cwd={cwd!r}\nout={out!r}" + # In windows cwd contains ``\`` which should be replaced by ``/`` + # for this assertion to work. Length of string isn't changed by replace + # ``\\`` is just and escape for `\` + out = cwd[: -len(out)] + log.debug("find files toplevel %s", out) + return os.path.normcase(os.path.realpath(out.strip())) + except subprocess.CalledProcessError: + # git returned error, we are not in a git repo + return None + except OSError: + # git command not found, probably + return None + + +def _git_interpret_archive(fd: IO[bytes], toplevel: str) -> tuple[set[str], set[str]]: + with tarfile.open(fileobj=fd, mode="r|*") as tf: + git_files = set() + git_dirs = {toplevel} + for member in tf.getmembers(): + name = os.path.normcase(member.name).replace("/", os.path.sep) + if member.type == tarfile.DIRTYPE: + git_dirs.add(name) + else: + git_files.add(name) + return git_files, git_dirs + + +def _git_ls_files_and_dirs(toplevel: str) -> tuple[set[str], set[str]]: + # use git archive instead of git ls-file to honor + # export-ignore git attribute + + cmd = ["git", "archive", "--prefix", toplevel + os.path.sep, "HEAD"] + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, cwd=toplevel, stderr=subprocess.DEVNULL + ) + assert proc.stdout is not None + try: + try: + return _git_interpret_archive(proc.stdout, toplevel) + finally: + # ensure we avoid resource warnings by cleaning up the process + proc.stdout.close() + proc.terminate() + except Exception: + if proc.wait() != 0: + log.error("listing git files failed - pretending there aren't any") + return set(), set() + + +def git_find_files(path: _t.PathT = "") -> list[str]: + toplevel = _git_toplevel(os.fspath(path)) + if not is_toplevel_acceptable(toplevel): + return [] + fullpath = os.path.abspath(os.path.normpath(path)) + if not fullpath.startswith(toplevel): + log.warning("toplevel mismatch computed %s vs resolved %s ", toplevel, fullpath) + git_files, git_dirs = _git_ls_files_and_dirs(toplevel) + return scm_find_files(path, git_files, git_dirs) + + +def git_archive_find_files(path: _t.PathT = "") -> list[str]: + # This function assumes that ``path`` is obtained from a git archive + # and therefore all the files that should be ignored were already removed. + archival = os.path.join(path, ".git_archival.txt") + if not os.path.exists(archival): + return [] + + data = data_from_mime(archival) + + if "$Format" in data.get("node", ""): + # Substitutions have not been performed, so not a reliable archive + return [] + + log.warning("git archive detected - fallback to listing all files") + return scm_find_files(path, set(), set(), force_all_files=True) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/hg.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/hg.py new file mode 100644 index 0000000..f87ba06 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_file_finders/hg.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +import logging +import os +import subprocess + +from .. import _types as _t +from .._file_finders import is_toplevel_acceptable +from .._file_finders import scm_find_files +from .._run_cmd import run as _run +from ..integration import data_from_mime + +log = logging.getLogger(__name__) + + +def _hg_toplevel(path: str) -> str | None: + try: + res = _run( + ["hg", "root"], + cwd=(path or "."), + ) + res.check_returncode() + return os.path.normcase(os.path.realpath(res.stdout)) + except subprocess.CalledProcessError: + # hg returned error, we are not in a mercurial repo + return None + except OSError: + # hg command not found, probably + return None + + +def _hg_ls_files_and_dirs(toplevel: str) -> tuple[set[str], set[str]]: + hg_files: set[str] = set() + hg_dirs = {toplevel} + res = _run(["hg", "files"], cwd=toplevel) + if res.returncode: + return set(), set() + for name in res.stdout.splitlines(): + name = os.path.normcase(name).replace("/", os.path.sep) + fullname = os.path.join(toplevel, name) + hg_files.add(fullname) + dirname = os.path.dirname(fullname) + while len(dirname) > len(toplevel) and dirname not in hg_dirs: + hg_dirs.add(dirname) + dirname = os.path.dirname(dirname) + return hg_files, hg_dirs + + +def hg_find_files(path: str = "") -> list[str]: + toplevel = _hg_toplevel(path) + if not is_toplevel_acceptable(toplevel): + return [] + assert toplevel is not None + hg_files, hg_dirs = _hg_ls_files_and_dirs(toplevel) + return scm_find_files(path, hg_files, hg_dirs) + + +def hg_archive_find_files(path: _t.PathT = "") -> list[str]: + # This function assumes that ``path`` is obtained from a mercurial archive + # and therefore all the files that should be ignored were already removed. + archival = os.path.join(path, ".hg_archival.txt") + if not os.path.exists(archival): + return [] + + data = data_from_mime(archival) + + if "node" not in data: + # Ensure file is valid + return [] + + log.warning("hg archive detected - fallback to listing all files") + return scm_find_files(path, set(), set(), force_all_files=True) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_get_version_impl.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_get_version_impl.py new file mode 100644 index 0000000..2d9d947 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_get_version_impl.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +import logging +import re +import warnings +from pathlib import Path +from typing import Any +from typing import NoReturn +from typing import Pattern + +from . import _config +from . import _entrypoints +from . import _run_cmd +from . import _types as _t +from ._config import Configuration +from ._overrides import _read_pretended_version_for +from ._version_cls import _validate_version_cls +from .version import format_version as _format_version +from .version import ScmVersion + +_log = logging.getLogger(__name__) + + +def parse_scm_version(config: Configuration) -> ScmVersion | None: + try: + if config.parse is not None: + parse_result = config.parse(config.absolute_root, config=config) + if parse_result is not None and not isinstance(parse_result, ScmVersion): + raise TypeError( + f"version parse result was {str!r}\n" + "please return a parsed version (ScmVersion)" + ) + return parse_result + else: + return _entrypoints.version_from_entrypoint( + config, + entrypoint="setuptools_scm.parse_scm", + root=config.absolute_root, + ) + except _run_cmd.CommandNotFoundError as e: + _log.exception("command %s not found while parsing the scm, using fallbacks", e) + return None + + +def parse_fallback_version(config: Configuration) -> ScmVersion | None: + return _entrypoints.version_from_entrypoint( + config, + entrypoint="setuptools_scm.parse_scm_fallback", + root=config.fallback_root, + ) + + +def parse_version(config: Configuration) -> ScmVersion | None: + return ( + _read_pretended_version_for(config) + or parse_scm_version(config) + or parse_fallback_version(config) + ) + + +def write_version_files( + config: Configuration, version: str, scm_version: ScmVersion +) -> None: + if config.write_to is not None: + from ._integration.dump_version import dump_version + + dump_version( + root=config.root, + version=version, + scm_version=scm_version, + write_to=config.write_to, + template=config.write_to_template, + ) + if config.version_file: + from ._integration.dump_version import write_version_to_path + + version_file = Path(config.version_file) + assert not version_file.is_absolute(), f"{version_file=}" + # todo: use a better name than fallback root + assert config.relative_to is not None + target = Path(config.relative_to).parent.joinpath(version_file) + write_version_to_path( + target, + template=config.version_file_template, + version=version, + scm_version=scm_version, + ) + + +def _get_version( + config: Configuration, force_write_version_files: bool | None = None +) -> str | None: + parsed_version = parse_version(config) + if parsed_version is None: + return None + version_string = _format_version(parsed_version) + if force_write_version_files is None: + force_write_version_files = True + warnings.warn( + "force_write_version_files ought to be set," + " presuming the legacy True value", + DeprecationWarning, + ) + + if force_write_version_files: + write_version_files(config, version=version_string, scm_version=parsed_version) + + return version_string + + +def _version_missing(config: Configuration) -> NoReturn: + raise LookupError( + f"setuptools-scm was unable to detect version for {config.absolute_root}.\n\n" + "Make sure you're either building from a fully intact git repository " + "or PyPI tarballs. Most other sources (such as GitHub's tarballs, a " + "git checkout without the .git folder) don't contain the necessary " + "metadata and will not work.\n\n" + "For example, if you're using pip, instead of " + "https://github.com/user/proj/archive/master.zip " + "use git+https://github.com/user/proj.git#egg=proj" + ) + + +def get_version( + root: _t.PathT = ".", + version_scheme: _t.VERSION_SCHEME = _config.DEFAULT_VERSION_SCHEME, + local_scheme: _t.VERSION_SCHEME = _config.DEFAULT_LOCAL_SCHEME, + write_to: _t.PathT | None = None, + write_to_template: str | None = None, + version_file: _t.PathT | None = None, + version_file_template: str | None = None, + relative_to: _t.PathT | None = None, + tag_regex: str | Pattern[str] = _config.DEFAULT_TAG_REGEX, + parentdir_prefix_version: str | None = None, + fallback_version: str | None = None, + fallback_root: _t.PathT = ".", + parse: Any | None = None, + git_describe_command: _t.CMD_TYPE | None = None, + dist_name: str | None = None, + version_cls: Any | None = None, + normalize: bool = True, + search_parent_directories: bool = False, +) -> str: + """ + If supplied, relative_to should be a file from which root may + be resolved. Typically called by a script or module that is not + in the root of the repository to direct setuptools_scm to the + root of the repository by supplying ``__file__``. + """ + + version_cls = _validate_version_cls(version_cls, normalize) + del normalize + tag_regex = parse_tag_regex(tag_regex) + config = Configuration(**locals()) + maybe_version = _get_version(config, force_write_version_files=True) + + if maybe_version is None: + _version_missing(config) + return maybe_version + + +def parse_tag_regex(tag_regex: str | Pattern[str]) -> Pattern[str]: + if isinstance(tag_regex, str): + if tag_regex == "": + warnings.warn( + DeprecationWarning( + "empty regex for tag regex is invalid, using default" + ) + ) + return _config.DEFAULT_TAG_REGEX + else: + return re.compile(tag_regex) + else: + return tag_regex diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__init__.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/__init__.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..7f47c3c Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/__init__.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/dump_version.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/dump_version.cpython-311.pyc new file mode 100644 index 0000000..b0d79e5 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/dump_version.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/pyproject_reading.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/pyproject_reading.cpython-311.pyc new file mode 100644 index 0000000..42dfe3c Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/pyproject_reading.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/setuptools.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/setuptools.cpython-311.pyc new file mode 100644 index 0000000..4ecb5ad Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/setuptools.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/toml.cpython-311.pyc b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/toml.cpython-311.pyc new file mode 100644 index 0000000..0105570 Binary files /dev/null and b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/__pycache__/toml.cpython-311.pyc differ diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/dump_version.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/dump_version.py new file mode 100644 index 0000000..d890243 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/dump_version.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import warnings +from pathlib import Path + +from .. import _types as _t +from .._log import log as parent_log +from .._version_cls import _version_as_tuple +from ..version import ScmVersion + + +log = parent_log.getChild("dump_version") + +TEMPLATES = { + ".py": """\ +# file generated by setuptools_scm +# don't change, don't track in version control +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = {version!r} +__version_tuple__ = version_tuple = {version_tuple!r} +""", + ".txt": "{version}", +} + + +def dump_version( + root: _t.PathT, + version: str, + write_to: _t.PathT, + template: str | None = None, + scm_version: ScmVersion | None = None, +) -> None: + assert isinstance(version, str) + root = Path(root) + write_to = Path(write_to) + if write_to.is_absolute(): + # trigger warning on escape + write_to.relative_to(root) + warnings.warn( + f"{write_to=!s} is a absolute path," + " please switch to using a relative version file", + DeprecationWarning, + ) + target = write_to + else: + target = Path(root).joinpath(write_to) + write_version_to_path( + target, template=template, version=version, scm_version=scm_version + ) + + +def _validate_template(target: Path, template: str | None) -> str: + if template == "": + warnings.warn(f"{template=} looks like a error, using default instead") + template = None + if template is None: + template = TEMPLATES.get(target.suffix) + + if template is None: + raise ValueError( + f"bad file format: {target.suffix!r} (of {target})\n" + "only *.txt and *.py have a default template" + ) + else: + return template + + +def write_version_to_path( + target: Path, template: str | None, version: str, scm_version: ScmVersion | None +) -> None: + final_template = _validate_template(target, template) + log.debug("dump %s into %s", version, target) + version_tuple = _version_as_tuple(version) + if scm_version is not None: + content = final_template.format( + version=version, + version_tuple=version_tuple, + scm_version=scm_version, + ) + else: + content = final_template.format(version=version, version_tuple=version_tuple) + + target.write_text(content, encoding="utf-8") diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/pyproject_reading.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/pyproject_reading.py new file mode 100644 index 0000000..c9818a2 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/pyproject_reading.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import warnings +from pathlib import Path +from typing import NamedTuple + +from .. import _log +from .setuptools import read_dist_name_from_setup_cfg +from .toml import read_toml_content +from .toml import TOML_RESULT + + +log = _log.log.getChild("pyproject_reading") + +_ROOT = "root" + + +class PyProjectData(NamedTuple): + path: Path + tool_name: str + project: TOML_RESULT + section: TOML_RESULT + + @property + def project_name(self) -> str | None: + return self.project.get("name") + + +def read_pyproject( + path: Path = Path("pyproject.toml"), + tool_name: str = "setuptools_scm", + require_section: bool = True, +) -> PyProjectData: + defn = read_toml_content(path, None if require_section else {}) + try: + section = defn.get("tool", {})[tool_name] + except LookupError as e: + error = f"{path} does not contain a tool.{tool_name} section" + if require_section: + raise LookupError(error) from e + else: + log.warning("toml section missing %r", error) + section = {} + + project = defn.get("project", {}) + return PyProjectData(path, tool_name, project, section) + + +def get_args_for_pyproject( + pyproject: PyProjectData, + dist_name: str | None, + kwargs: TOML_RESULT, +) -> TOML_RESULT: + """drops problematic details and figures the distribution name""" + section = pyproject.section.copy() + kwargs = kwargs.copy() + if "relative_to" in section: + relative = section.pop("relative_to") + warnings.warn( + f"{pyproject.path}: at [tool.{pyproject.tool_name}]\n" + f"ignoring value relative_to={relative!r}" + " as its always relative to the config file" + ) + if "dist_name" in section: + if dist_name is None: + dist_name = section.pop("dist_name") + else: + assert dist_name == section["dist_name"] + section.pop("dist_name") + if dist_name is None: + # minimal pep 621 support for figuring the pretend keys + dist_name = pyproject.project_name + if dist_name is None: + dist_name = read_dist_name_from_setup_cfg() + if _ROOT in kwargs: + if kwargs[_ROOT] is None: + kwargs.pop(_ROOT, None) + elif _ROOT in section: + if section[_ROOT] != kwargs[_ROOT]: + warnings.warn( + f"root {section[_ROOT]} is overridden" + f" by the cli arg {kwargs[_ROOT]}" + ) + section.pop(_ROOT, None) + return {"dist_name": dist_name, **section, **kwargs} diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/setuptools.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/setuptools.py new file mode 100644 index 0000000..f574d23 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/setuptools.py @@ -0,0 +1,121 @@ +from __future__ import annotations + +import logging +import os +import warnings +from typing import Any +from typing import Callable + +import setuptools + +from .. import _config + +log = logging.getLogger(__name__) + + +def read_dist_name_from_setup_cfg( + input: str | os.PathLike[str] = "setup.cfg", +) -> str | None: + # minimal effort to read dist_name off setup.cfg metadata + import configparser + + parser = configparser.ConfigParser() + parser.read([input], encoding="utf-8") + dist_name = parser.get("metadata", "name", fallback=None) + return dist_name + + +def _warn_on_old_setuptools(_version: str = setuptools.__version__) -> None: + if int(_version.split(".")[0]) < 61: + warnings.warn( + RuntimeWarning( + f""" +ERROR: setuptools=={_version} is used in combination with setuptools_scm>=8.x + +Your build configuration is incomplete and previously worked by accident! +setuptools_scm requires setuptools>=61 + +Suggested workaround if applicable: + - migrating from the deprecated setup_requires mechanism to pep517/518 + and using a pyproject.toml to declare build dependencies + which are reliably pre-installed before running the build tools +""" + ) + ) + + +def _assign_version( + dist: setuptools.Distribution, config: _config.Configuration +) -> None: + from .._get_version_impl import _get_version, _version_missing + + # todo: build time plugin + maybe_version = _get_version(config, force_write_version_files=True) + + if maybe_version is None: + _version_missing(config) + else: + assert dist.metadata.version is None + dist.metadata.version = maybe_version + + +_warn_on_old_setuptools() + + +def _log_hookstart(hook: str, dist: setuptools.Distribution) -> None: + log.debug("%s %r", hook, vars(dist.metadata)) + + +def version_keyword( + dist: setuptools.Distribution, + keyword: str, + value: bool | dict[str, Any] | Callable[[], dict[str, Any]], +) -> None: + overrides: dict[str, Any] + if value is True: + overrides = {} + elif callable(value): + overrides = value() + else: + assert isinstance(value, dict), "version_keyword expects a dict or True" + overrides = value + + assert ( + "dist_name" not in overrides + ), "dist_name may not be specified in the setup keyword " + dist_name: str | None = dist.metadata.name + _log_hookstart("version_keyword", dist) + + if dist.metadata.version is not None: + warnings.warn(f"version of {dist_name} already set") + return + + if dist_name is None: + dist_name = read_dist_name_from_setup_cfg() + + config = _config.Configuration.from_file( + dist_name=dist_name, + _require_section=False, + **overrides, + ) + _assign_version(dist, config) + + +def infer_version(dist: setuptools.Distribution) -> None: + _log_hookstart("infer_version", dist) + log.debug("dist %s %s", id(dist), id(dist.metadata)) + if dist.metadata.version is not None: + return # metadata already added by hook + dist_name = dist.metadata.name + if dist_name is None: + dist_name = read_dist_name_from_setup_cfg() + if not os.path.isfile("pyproject.toml"): + return + if dist_name == "setuptools_scm": + return + try: + config = _config.Configuration.from_file(dist_name=dist_name) + except LookupError as e: + log.warning(e) + else: + _assign_version(dist, config) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/toml.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/toml.py new file mode 100644 index 0000000..a08b7b8 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_integration/toml.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import sys +from pathlib import Path +from typing import Any +from typing import Callable +from typing import cast +from typing import Dict +from typing import TYPE_CHECKING +from typing import TypedDict + +if sys.version_info >= (3, 11): + from tomllib import loads as load_toml +else: + from tomli import loads as load_toml + +if TYPE_CHECKING: + from typing_extensions import TypeAlias + +from .. import _log + +log = _log.log.getChild("toml") + +TOML_RESULT: TypeAlias = Dict[str, Any] +TOML_LOADER: TypeAlias = Callable[[str], TOML_RESULT] + + +def read_toml_content(path: Path, default: TOML_RESULT | None = None) -> TOML_RESULT: + try: + data = path.read_text(encoding="utf-8") + except FileNotFoundError: + if default is None: + raise + else: + log.debug("%s missing, presuming default %r", path, default) + return default + else: + return load_toml(data) + + +class _CheatTomlData(TypedDict): + cheat: dict[str, Any] + + +def load_toml_or_inline_map(data: str | None) -> dict[str, Any]: + """ + load toml data - with a special hack if only a inline map is given + """ + if not data: + return {} + elif data[0] == "{": + data = "cheat=" + data + loaded: _CheatTomlData = cast(_CheatTomlData, load_toml(data)) + return loaded["cheat"] + return load_toml(data) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_log.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_log.py new file mode 100644 index 0000000..1247d46 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_log.py @@ -0,0 +1,85 @@ +""" +logging helpers, supports vendoring +""" +from __future__ import annotations + +import contextlib +import logging +import os +import sys +from typing import IO +from typing import Iterator +from typing import Mapping + +log = logging.getLogger(__name__.rsplit(".", 1)[0]) +log.propagate = False + + +class AlwaysStdErrHandler(logging.StreamHandler): # type: ignore[type-arg] + def __init___(self) -> None: + super().__init__(sys.stderr) + + @property # type: ignore [override] + def stream(self) -> IO[str]: + return sys.stderr + + @stream.setter + def stream(self, value: IO[str]) -> None: + assert value is sys.stderr + + +def make_default_handler() -> logging.Handler: + try: + from rich.console import Console + + console = Console(stderr=True) + from rich.logging import RichHandler + + return RichHandler(console=console) + except ImportError: + handler = AlwaysStdErrHandler() + handler.setFormatter(logging.Formatter("%(levelname)s %(name)s %(message)s")) + return handler + + +_default_handler = make_default_handler() + +log.addHandler(_default_handler) + + +def _default_log_level(_env: Mapping[str, str] = os.environ) -> int: + val: str | None = _env.get("SETUPTOOLS_SCM_DEBUG") + return logging.WARN if val is None else logging.DEBUG + + +log.setLevel(_default_log_level()) + + +@contextlib.contextmanager +def defer_to_pytest() -> Iterator[None]: + log.propagate = True + old_level = log.level + log.setLevel(logging.NOTSET) + log.removeHandler(_default_handler) + try: + yield + finally: + log.addHandler(_default_handler) + log.propagate = False + log.setLevel(old_level) + + +@contextlib.contextmanager +def enable_debug(handler: logging.Handler = _default_handler) -> Iterator[None]: + log.addHandler(handler) + old_level = log.level + log.setLevel(logging.DEBUG) + old_handler_level = handler.level + handler.setLevel(logging.DEBUG) + try: + yield + finally: + log.setLevel(old_level) + handler.setLevel(old_handler_level) + if handler is not _default_handler: + log.removeHandler(handler) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_modify_version.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_modify_version.py new file mode 100644 index 0000000..63c0dfd --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_modify_version.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +import re + +from . import _types as _t + + +def strip_local(version_string: str) -> str: + public, sep, local = version_string.partition("+") + return public + + +def _add_post(version: str) -> str: + if "post" in version: + raise ValueError( + f"{version} already is a post release, refusing to guess the update" + ) + return f"{version}.post1" + + +def _bump_dev(version: str) -> str | None: + if ".dev" not in version: + return None + + prefix, tail = version.rsplit(".dev", 1) + if tail != "0": + raise ValueError( + "choosing custom numbers for the `.devX` distance " + "is not supported.\n " + f"The {version} can't be bumped\n" + "Please drop the tag or create a new supported one ending in .dev0" + ) + return prefix + + +def _bump_regex(version: str) -> str: + match = re.match(r"(.*?)(\d+)$", version) + if match is None: + raise ValueError( + f"{version} does not end with a number to bump, " + "please correct or use a custom version scheme" + ) + else: + prefix, tail = match.groups() + return f"{prefix}{int(tail) + 1}" + + +def _format_local_with_time(version: _t.SCMVERSION, time_format: str) -> str: + if version.exact or version.node is None: + return version.format_choice( + "", "+d{time:{time_format}}", time_format=time_format + ) + else: + return version.format_choice( + "+{node}", "+{node}.d{time:{time_format}}", time_format=time_format + ) + + +def _dont_guess_next_version(tag_version: _t.SCMVERSION) -> str: + version = strip_local(str(tag_version.tag)) + return _bump_dev(version) or _add_post(version) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_overrides.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_overrides.py new file mode 100644 index 0000000..792bfd2 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_overrides.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import os +import re +from typing import Any + +from . import _config +from . import _log +from . import version +from ._integration.toml import load_toml_or_inline_map + +log = _log.log.getChild("overrides") + +PRETEND_KEY = "SETUPTOOLS_SCM_PRETEND_VERSION" +PRETEND_KEY_NAMED = PRETEND_KEY + "_FOR_{name}" + + +def read_named_env( + *, tool: str = "SETUPTOOLS_SCM", name: str, dist_name: str | None +) -> str | None: + """ """ + if dist_name is not None: + # Normalize the dist name as per PEP 503. + normalized_dist_name = re.sub(r"[-_.]+", "-", dist_name) + env_var_dist_name = normalized_dist_name.replace("-", "_").upper() + val = os.environ.get(f"{tool}_{name}_FOR_{env_var_dist_name}") + if val is not None: + return val + return os.environ.get(f"{tool}_{name}") + + +def _read_pretended_version_for( + config: _config.Configuration, +) -> version.ScmVersion | None: + """read a a overridden version from the environment + + tries ``SETUPTOOLS_SCM_PRETEND_VERSION`` + and ``SETUPTOOLS_SCM_PRETEND_VERSION_FOR_$UPPERCASE_DIST_NAME`` + """ + log.debug("dist name: %s", config.dist_name) + + pretended = read_named_env(name="PRETEND_VERSION", dist_name=config.dist_name) + + if pretended: + # we use meta here since the pretended version + # must adhere to the pep to begin with + return version.meta(tag=pretended, preformatted=True, config=config) + else: + return None + + +def read_toml_overrides(dist_name: str | None) -> dict[str, Any]: + data = read_named_env(name="OVERRIDES", dist_name=dist_name) + return load_toml_or_inline_map(data) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_run_cmd.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_run_cmd.py new file mode 100644 index 0000000..1b80d28 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_run_cmd.py @@ -0,0 +1,207 @@ +from __future__ import annotations + +import os +import shlex +import subprocess +import textwrap +import warnings +from typing import Callable +from typing import Final +from typing import Mapping +from typing import overload +from typing import Sequence +from typing import TYPE_CHECKING +from typing import TypeVar + +from . import _log +from . import _types as _t + +if TYPE_CHECKING: + BaseCompletedProcess = subprocess.CompletedProcess[str] +else: + BaseCompletedProcess = subprocess.CompletedProcess + +# pick 40 seconds +# unfortunately github CI for windows sometimes needs +# up to 30 seconds to start a command + +BROKEN_TIMEOUT: Final[int] = 40 + +log = _log.log.getChild("run_cmd") + +PARSE_RESULT = TypeVar("PARSE_RESULT") +T = TypeVar("T") + + +class CompletedProcess(BaseCompletedProcess): + @classmethod + def from_raw( + cls, input: BaseCompletedProcess, strip: bool = True + ) -> CompletedProcess: + return cls( + args=input.args, + returncode=input.returncode, + stdout=input.stdout.strip() if strip and input.stdout else input.stdout, + stderr=input.stderr.strip() if strip and input.stderr else input.stderr, + ) + + @overload + def parse_success( + self, + parse: Callable[[str], PARSE_RESULT], + default: None = None, + error_msg: str | None = None, + ) -> PARSE_RESULT | None: + ... + + @overload + def parse_success( + self, + parse: Callable[[str], PARSE_RESULT], + default: T, + error_msg: str | None = None, + ) -> PARSE_RESULT | T: + ... + + def parse_success( + self, + parse: Callable[[str], PARSE_RESULT], + default: T | None = None, + error_msg: str | None = None, + ) -> PARSE_RESULT | T | None: + if self.returncode: + if error_msg: + log.warning("%s %s", error_msg, self) + return default + else: + return parse(self.stdout) + + +def no_git_env(env: Mapping[str, str]) -> dict[str, str]: + # adapted from pre-commit + # Too many bugs dealing with environment variables and GIT: + # https://github.com/pre-commit/pre-commit/issues/300 + # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running + # pre-commit hooks + # In git 1.9.1 (maybe others), git exports GIT_DIR and GIT_INDEX_FILE + # while running pre-commit hooks in submodules. + # GIT_DIR: Causes git clone to clone wrong thing + # GIT_INDEX_FILE: Causes 'error invalid object ...' during commit + for k, v in env.items(): + if k.startswith("GIT_"): + log.debug("%s: %s", k, v) + return { + k: v + for k, v in env.items() + if not k.startswith("GIT_") + or k in ("GIT_EXEC_PATH", "GIT_SSH", "GIT_SSH_COMMAND") + } + + +def avoid_pip_isolation(env: Mapping[str, str]) -> dict[str, str]: + """ + pip build isolation can break Mercurial + (see https://github.com/pypa/pip/issues/10635) + + pip uses PYTHONNOUSERSITE and a path in PYTHONPATH containing "pip-build-env-". + """ + new_env = {k: v for k, v in env.items() if k != "PYTHONNOUSERSITE"} + if "PYTHONPATH" not in new_env: + return new_env + + new_env["PYTHONPATH"] = os.pathsep.join( + [ + path + for path in new_env["PYTHONPATH"].split(os.pathsep) + if "pip-build-env-" not in path + ] + ) + return new_env + + +def ensure_stripped_str(str_or_bytes: str | bytes) -> str: + if isinstance(str_or_bytes, str): + return str_or_bytes.strip() + else: + return str_or_bytes.decode("utf-8", "surrogateescape").strip() + + +def run( + cmd: _t.CMD_TYPE, + cwd: _t.PathT, + *, + strip: bool = True, + trace: bool = True, + timeout: int = BROKEN_TIMEOUT, + check: bool = False, +) -> CompletedProcess: + if isinstance(cmd, str): + cmd = shlex.split(cmd) + else: + cmd = [os.fspath(x) for x in cmd] + cmd_4_trace = " ".join(map(_unsafe_quote_for_display, cmd)) + log.debug("at %s\n $ %s ", cwd, cmd_4_trace) + res = subprocess.run( + cmd, + capture_output=True, + cwd=os.fspath(cwd), + env=dict( + avoid_pip_isolation(no_git_env(os.environ)), + # os.environ, + # try to disable i18n, but still allow UTF-8 encoded text. + LC_ALL="C.UTF-8", + LANGUAGE="", + HGPLAIN="1", + ), + text=True, + timeout=timeout, + ) + + res = CompletedProcess.from_raw(res, strip=strip) + if trace: + if res.stdout: + log.debug("out:\n%s", textwrap.indent(res.stdout, " ")) + if res.stderr: + log.debug("err:\n%s", textwrap.indent(res.stderr, " ")) + if res.returncode: + log.debug("ret: %s", res.returncode) + if check: + res.check_returncode() + return res + + +def _unsafe_quote_for_display(item: _t.PathT) -> str: + # give better results than shlex.join in our cases + text = os.fspath(item) + return text if all(c not in text for c in " {[:") else f'"{text}"' + + +def has_command( + name: str, args: Sequence[str] = ["version"], warn: bool = True +) -> bool: + try: + p = run([name, *args], cwd=".", timeout=BROKEN_TIMEOUT) + if p.returncode != 0: + log.error(f"Command '{name}' returned non-zero. This is stderr:") + log.error(p.stderr) + except OSError as e: + log.warning("command %s missing: %s", name, e) + res = False + except subprocess.TimeoutExpired as e: + log.warning("command %s timed out %s", name, e) + res = False + + else: + res = not p.returncode + if not res and warn: + warnings.warn("%r was not found" % name, category=RuntimeWarning) + return res + + +class CommandNotFoundError(LookupError, FileNotFoundError): + pass + + +def require_command(name: str) -> None: + if not has_command(name, warn=False): + raise CommandNotFoundError(name) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_types.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_types.py new file mode 100644 index 0000000..df8fa94 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_types.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import os +from typing import Callable +from typing import List +from typing import Sequence +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + + +if TYPE_CHECKING: + from typing_extensions import TypeAlias + from . import version + +PathT: TypeAlias = Union["os.PathLike[str]", str] + +CMD_TYPE: TypeAlias = Union[Sequence[PathT], str] + +VERSION_SCHEME: TypeAlias = Union[str, Callable[["version.ScmVersion"], str]] +VERSION_SCHEMES: TypeAlias = Union[List[str], Tuple[str, ...], VERSION_SCHEME] +SCMVERSION: TypeAlias = "version.ScmVersion" diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_version_cls.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_version_cls.py new file mode 100644 index 0000000..3fd4a32 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/_version_cls.py @@ -0,0 +1,91 @@ +from __future__ import annotations + +from typing import cast +from typing import Type +from typing import Union + +try: + from packaging.version import InvalidVersion + from packaging.version import Version as Version +except ImportError: + from setuptools.extern.packaging.version import InvalidVersion # type: ignore + from setuptools.extern.packaging.version import Version as Version # type: ignore +from . import _log + +log = _log.log.getChild("version_cls") + + +class NonNormalizedVersion(Version): + """A non-normalizing version handler. + + You can use this class to preserve version verification but skip normalization. + For example you can use this to avoid git release candidate version tags + ("1.0.0-rc1") to be normalized to "1.0.0rc1". Only use this if you fully + trust the version tags. + """ + + def __init__(self, version: str) -> None: + # parse and validate using parent + super().__init__(version) + + # store raw for str + self._raw_version = version + + def __str__(self) -> str: + # return the non-normalized version (parent returns the normalized) + return self._raw_version + + def __repr__(self) -> str: + # same pattern as parent + return f"" + + +def _version_as_tuple(version_str: str) -> tuple[int | str, ...]: + try: + parsed_version = Version(version_str) + except InvalidVersion as e: + log.error("failed to parse version %s: %s", e, version_str) + return (version_str,) + else: + version_fields: tuple[int | str, ...] = parsed_version.release + if parsed_version.dev is not None: + version_fields += (f"dev{parsed_version.dev}",) + if parsed_version.local is not None: + version_fields += (parsed_version.local,) + return version_fields + + +_VersionT = Union[Version, NonNormalizedVersion] + + +def import_name(name: str) -> object: + import importlib + + pkg_name, cls_name = name.rsplit(".", 1) + pkg = importlib.import_module(pkg_name) + return getattr(pkg, cls_name) + + +def _validate_version_cls( + version_cls: type[_VersionT] | str | None, normalize: bool +) -> type[_VersionT]: + if not normalize: + if version_cls is not None: + raise ValueError( + "Providing a custom `version_cls` is not permitted when " + "`normalize=False`" + ) + return NonNormalizedVersion + else: + # Use `version_cls` if provided, default to packaging or pkg_resources + if version_cls is None: + return Version + elif isinstance(version_cls, str): + try: + return cast(Type[_VersionT], import_name(version_cls)) + except: # noqa + raise ValueError( + f"Unable to import version_cls='{version_cls}'" + ) from None + else: + return version_cls diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/discover.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/discover.py new file mode 100644 index 0000000..b12b2f1 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/discover.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +import os +from pathlib import Path +from typing import Iterable +from typing import Iterator + +from . import _entrypoints +from . import _log +from . import _types as _t +from ._config import Configuration + +log = _log.log.getChild("discover") + + +def walk_potential_roots(root: _t.PathT, search_parents: bool = True) -> Iterator[Path]: + """ + Iterate though a path and each of its parents. + :param root: File path. + :param search_parents: If ``False`` the parents are not considered. + """ + root = Path(root) + yield root + if search_parents: + yield from root.parents + + +def match_entrypoint(root: _t.PathT, name: str) -> bool: + """ + Consider a ``root`` as entry-point. + :param root: File path. + :param name: Subdirectory name. + :return: ``True`` if a subdirectory ``name`` exits in ``root``. + """ + + if os.path.exists(os.path.join(root, name)): + if not os.path.isabs(name): + return True + log.debug("ignoring bad ep %s", name) + + return False + + +# blocked entrypints from legacy plugins +_BLOCKED_EP_TARGETS = {"setuptools_scm_git_archive:parse"} + + +def iter_matching_entrypoints( + root: _t.PathT, entrypoint: str, config: Configuration +) -> Iterable[_entrypoints.EntryPoint]: + """ + Consider different entry-points in ``root`` and optionally its parents. + :param root: File path. + :param entrypoint: Entry-point to consider. + :param config: Configuration, + read ``search_parent_directories``, write found parent to ``parent``. + """ + + log.debug("looking for ep %s in %s", entrypoint, root) + from ._entrypoints import iter_entry_points + + for wd in walk_potential_roots(root, config.search_parent_directories): + for ep in iter_entry_points(entrypoint): + if ep.value in _BLOCKED_EP_TARGETS: + continue + if match_entrypoint(wd, ep.name): + log.debug("found ep %s in %s", ep, wd) + config.parent = wd + yield ep diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/fallbacks.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/fallbacks.py new file mode 100644 index 0000000..e1ea60c --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/fallbacks.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import logging +import os +from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from . import _types as _t +from . import Configuration +from .integration import data_from_mime +from .version import meta +from .version import ScmVersion +from .version import tag_to_version + +log = logging.getLogger(__name__) + +_UNKNOWN = "UNKNOWN" + + +def parse_pkginfo(root: _t.PathT, config: Configuration) -> ScmVersion | None: + pkginfo = Path(root) / "PKG-INFO" + log.debug("pkginfo %s", pkginfo) + data = data_from_mime(pkginfo) + version = data.get("Version", _UNKNOWN) + if version != _UNKNOWN: + return meta(version, preformatted=True, config=config) + else: + return None + + +def fallback_version(root: _t.PathT, config: Configuration) -> ScmVersion | None: + if config.parentdir_prefix_version is not None: + _, parent_name = os.path.split(os.path.abspath(root)) + if parent_name.startswith(config.parentdir_prefix_version): + version = tag_to_version( + parent_name[len(config.parentdir_prefix_version) :], config + ) + if version is not None: + return meta(str(version), preformatted=True, config=config) + if config.fallback_version is not None: + log.debug("FALLBACK %s", config.fallback_version) + return meta(config.fallback_version, preformatted=True, config=config) + return None diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/git.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/git.py new file mode 100644 index 0000000..d511961 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/git.py @@ -0,0 +1,336 @@ +from __future__ import annotations + +import dataclasses +import logging +import os +import re +import shlex +import warnings +from datetime import date +from datetime import datetime +from datetime import timezone +from os.path import samefile +from pathlib import Path +from typing import Callable +from typing import Sequence +from typing import TYPE_CHECKING + +from . import _types as _t +from . import Configuration +from . import discover +from ._run_cmd import CompletedProcess as _CompletedProcess +from ._run_cmd import require_command as _require_command +from ._run_cmd import run as _run +from .integration import data_from_mime +from .scm_workdir import Workdir +from .version import meta +from .version import ScmVersion +from .version import tag_to_version + +if TYPE_CHECKING: + from . import hg_git +log = logging.getLogger(__name__) + +REF_TAG_RE = re.compile(r"(?<=\btag: )([^,]+)\b") +DESCRIBE_UNSUPPORTED = "%(describe" + +# If testing command in shell make sure to quote the match argument like +# '*[0-9]*' as it will expand before being sent to git if there are any matching +# files in current directory. +DEFAULT_DESCRIBE = [ + "git", + "describe", + "--dirty", + "--tags", + "--long", + "--match", + "*[0-9]*", +] + + +def run_git( + args: Sequence[str | os.PathLike[str]], + repo: Path, + *, + check: bool = False, + timeout: int = 20, +) -> _CompletedProcess: + return _run( + ["git", "--git-dir", repo / ".git", *args], + cwd=repo, + check=check, + timeout=timeout, + ) + + +class GitWorkdir(Workdir): + """experimental, may change at any time""" + + @classmethod + def from_potential_worktree(cls, wd: _t.PathT) -> GitWorkdir | None: + wd = Path(wd).resolve() + real_wd = run_git(["rev-parse", "--show-prefix"], wd).parse_success(parse=str) + if real_wd is None: + return None + else: + real_wd = real_wd[:-1] # remove the trailing pathsep + + if not real_wd: + real_wd = os.fspath(wd) + else: + str_wd = os.fspath(wd) + assert str_wd.replace("\\", "/").endswith(real_wd) + # In windows wd contains ``\`` which should be replaced by ``/`` + # for this assertion to work. Length of string isn't changed by replace + # ``\\`` is just and escape for `\` + real_wd = str_wd[: -len(real_wd)] + log.debug("real root %s", real_wd) + if not samefile(real_wd, wd): + return None + + return cls(Path(real_wd)) + + def is_dirty(self) -> bool: + return run_git( + ["status", "--porcelain", "--untracked-files=no"], self.path + ).parse_success( + parse=bool, + default=False, + ) + + def get_branch(self) -> str | None: + return run_git( + ["rev-parse", "--abbrev-ref", "HEAD"], + self.path, + ).parse_success( + parse=str, + error_msg="branch err (abbrev-err)", + ) or run_git( + ["symbolic-ref", "--short", "HEAD"], + self.path, + ).parse_success( + parse=str, + error_msg="branch err (symbolic-ref)", + ) + + def get_head_date(self) -> date | None: + def parse_timestamp(timestamp_text: str) -> date | None: + if "%c" in timestamp_text: + log.warning("git too old -> timestamp is %r", timestamp_text) + return None + return datetime.fromisoformat(timestamp_text).date() + + res = run_git( + [ + *("-c", "log.showSignature=false"), + *("log", "-n", "1", "HEAD"), + "--format=%cI", + ], + self.path, + ) + return res.parse_success( + parse=parse_timestamp, + error_msg="logging the iso date for head failed", + ) + + def is_shallow(self) -> bool: + return self.path.joinpath(".git/shallow").is_file() + + def fetch_shallow(self) -> None: + run_git(["fetch", "--unshallow"], self.path, check=True, timeout=240) + + def node(self) -> str | None: + def _unsafe_short_node(node: str) -> str: + return node[:7] + + return run_git( + ["rev-parse", "--verify", "--quiet", "HEAD"], self.path + ).parse_success( + parse=_unsafe_short_node, + ) + + def count_all_nodes(self) -> int: + res = run_git(["rev-list", "HEAD"], self.path) + return res.stdout.count("\n") + 1 + + def default_describe(self) -> _CompletedProcess: + return run_git(DEFAULT_DESCRIBE[1:], self.path) + + +def warn_on_shallow(wd: GitWorkdir) -> None: + """experimental, may change at any time""" + if wd.is_shallow(): + warnings.warn(f'"{wd.path}" is shallow and may cause errors') + + +def fetch_on_shallow(wd: GitWorkdir) -> None: + """experimental, may change at any time""" + if wd.is_shallow(): + warnings.warn(f'"{wd.path}" was shallow, git fetch was used to rectify') + wd.fetch_shallow() + + +def fail_on_shallow(wd: GitWorkdir) -> None: + """experimental, may change at any time""" + if wd.is_shallow(): + raise ValueError( + f'{wd.path} is shallow, please correct with "git fetch --unshallow"' + ) + + +def get_working_directory(config: Configuration, root: _t.PathT) -> GitWorkdir | None: + """ + Return the working directory (``GitWorkdir``). + """ + + if config.parent: # todo broken + return GitWorkdir.from_potential_worktree(config.parent) + + for potential_root in discover.walk_potential_roots( + root, search_parents=config.search_parent_directories + ): + potential_wd = GitWorkdir.from_potential_worktree(potential_root) + if potential_wd is not None: + return potential_wd + + return GitWorkdir.from_potential_worktree(root) + + +def parse( + root: _t.PathT, + config: Configuration, + describe_command: str | list[str] | None = None, + pre_parse: Callable[[GitWorkdir], None] = warn_on_shallow, +) -> ScmVersion | None: + """ + :param pre_parse: experimental pre_parse action, may change at any time + """ + _require_command("git") + wd = get_working_directory(config, root) + if wd: + return _git_parse_inner( + config, wd, describe_command=describe_command, pre_parse=pre_parse + ) + else: + return None + + +def version_from_describe( + wd: GitWorkdir | hg_git.GitWorkdirHgClient, + config: Configuration, + describe_command: _t.CMD_TYPE | None, +) -> ScmVersion | None: + pass + + if config.git_describe_command is not None: + describe_command = config.git_describe_command + + if describe_command is not None: + if isinstance(describe_command, str): + describe_command = shlex.split(describe_command) + # todo: figure how to ensure git with gitdir gets correctly invoked + if describe_command[0] == "git": + describe_res = run_git(describe_command[1:], wd.path) + else: + describe_res = _run(describe_command, wd.path) + else: + describe_res = wd.default_describe() + + def parse_describe(output: str) -> ScmVersion: + tag, distance, node, dirty = _git_parse_describe(output) + return meta(tag=tag, distance=distance, dirty=dirty, node=node, config=config) + + return describe_res.parse_success(parse=parse_describe) + + +def _git_parse_inner( + config: Configuration, + wd: GitWorkdir | hg_git.GitWorkdirHgClient, + pre_parse: None | (Callable[[GitWorkdir | hg_git.GitWorkdirHgClient], None]) = None, + describe_command: _t.CMD_TYPE | None = None, +) -> ScmVersion: + if pre_parse: + pre_parse(wd) + + version = version_from_describe(wd, config, describe_command) + + if version is None: + # If 'git git_describe_command' failed, try to get the information otherwise. + tag = config.version_cls("0.0") + node = wd.node() + if node is None: + distance = 0 + dirty = True + else: + distance = wd.count_all_nodes() + node = "g" + node + dirty = wd.is_dirty() + version = meta( + tag=tag, distance=distance, dirty=dirty, node=node, config=config + ) + branch = wd.get_branch() + node_date = wd.get_head_date() or datetime.now(timezone.utc).date() + return dataclasses.replace(version, branch=branch, node_date=node_date) + + +def _git_parse_describe( + describe_output: str, +) -> tuple[str, int, str | None, bool]: + # 'describe_output' looks e.g. like 'v1.5.0-0-g4060507' or + # 'v1.15.1rc1-37-g9bd1298-dirty'. + # It may also just be a bare tag name if this is a tagged commit and we are + # parsing a .git_archival.txt file. + + if describe_output.endswith("-dirty"): + dirty = True + describe_output = describe_output[:-6] + else: + dirty = False + + split = describe_output.rsplit("-", 2) + if len(split) < 3: # probably a tagged commit + tag = describe_output + number = 0 + node = None + else: + tag, number_, node = split + number = int(number_) + return tag, number, node, dirty + + +def archival_to_version( + data: dict[str, str], config: Configuration +) -> ScmVersion | None: + node: str | None + log.debug("data %s", data) + archival_describe = data.get("describe-name", DESCRIBE_UNSUPPORTED) + if DESCRIBE_UNSUPPORTED in archival_describe: + warnings.warn("git archive did not support describe output") + else: + tag, number, node, _ = _git_parse_describe(archival_describe) + return meta( + tag, + config=config, + distance=number, + node=node, + ) + + for ref in REF_TAG_RE.findall(data.get("ref-names", "")): + version = tag_to_version(ref, config) + if version is not None: + return meta(version, config=config) + else: + node = data.get("node") + if node is None: + return None + elif "$FORMAT" in node.upper(): + warnings.warn("unprocessed git archival found (no export subst applied)") + return None + else: + return meta("0.0", node=node, config=config) + + +def parse_archival(root: _t.PathT, config: Configuration) -> ScmVersion | None: + archival = os.path.join(root, ".git_archival.txt") + data = data_from_mime(archival) + return archival_to_version(data, config=config) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg.py new file mode 100644 index 0000000..522dfb6 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg.py @@ -0,0 +1,190 @@ +from __future__ import annotations + +import datetime +import logging +import os +from pathlib import Path +from typing import TYPE_CHECKING + +from . import Configuration +from ._version_cls import Version +from .integration import data_from_mime +from .scm_workdir import Workdir +from .version import meta +from .version import ScmVersion +from .version import tag_to_version + +if TYPE_CHECKING: + from . import _types as _t + +from ._run_cmd import run as _run, require_command as _require_command + +log = logging.getLogger(__name__) + + +class HgWorkdir(Workdir): + @classmethod + def from_potential_worktree(cls, wd: _t.PathT) -> HgWorkdir | None: + res = _run(["hg", "root"], wd) + if res.returncode: + return None + return cls(Path(res.stdout)) + + def get_meta(self, config: Configuration) -> ScmVersion | None: + node: str + tags_str: str + bookmark: str + node_date_str: str + node, tags_str, bookmark, node_date_str = self.hg_log( + ".", "{node}\n{tag}\n{bookmark}\n{date|shortdate}" + ).split("\n") + + # TODO: support bookmarks and topics (but nowadays bookmarks are + # mainly used to emulate Git branches, which is already supported with + # the dedicated class GitWorkdirHgClient) + + branch, dirty_str, dirty_date = _run( + ["hg", "id", "-T", "{branch}\n{if(dirty, 1, 0)}\n{date|shortdate}"], + cwd=self.path, + check=True, + ).stdout.split("\n") + dirty = bool(int(dirty_str)) + node_date = datetime.date.fromisoformat(dirty_date if dirty else node_date_str) + + if node == "0" * len(node): + log.debug("initial node %s", self.path) + return meta( + Version("0.0"), + config=config, + dirty=dirty, + branch=branch, + node_date=node_date, + ) + + node = "h" + node[:7] + + tags = tags_str.split() + if "tip" in tags: + # tip is not a real tag + tags.remove("tip") + + if tags: + tag = tag_to_version(tags[0], config) + if tag: + return meta(tag, dirty=dirty, branch=branch, config=config) + + try: + tag_str = self.get_latest_normalizable_tag() + if tag_str is None: + dist = self.get_distance_revs("") + else: + dist = self.get_distance_revs(tag_str) + + if tag_str == "null" or tag_str is None: + tag = Version("0.0") + dist += 1 + else: + tag = tag_to_version(tag_str, config=config) + assert tag is not None + + if self.check_changes_since_tag(tag_str) or dirty: + return meta( + tag, + distance=dist, + node=node, + dirty=dirty, + branch=branch, + config=config, + node_date=node_date, + ) + else: + return meta(tag, config=config, node_date=node_date) + + except ValueError as e: + log.exception("error %s", e) + pass # unpacking failed, old hg + + return None + + def hg_log(self, revset: str, template: str) -> str: + cmd = ["hg", "log", "-r", revset, "-T", template] + + return _run(cmd, cwd=self.path, check=True).stdout + + def get_latest_normalizable_tag(self) -> str | None: + # Gets all tags containing a '.' (see #229) from oldest to newest + outlines = self.hg_log( + revset="ancestors(.) and tag('re:\\.')", + template="{tags}{if(tags, '\n', '')}", + ).split() + if not outlines: + return None + tag = outlines[-1].split()[-1] + return tag + + def get_distance_revs(self, rev1: str, rev2: str = ".") -> int: + revset = f"({rev1}::{rev2})" + out = self.hg_log(revset, ".") + return len(out) - 1 + + def check_changes_since_tag(self, tag: str | None) -> bool: + if tag == "0.0" or tag is None: + return True + + revset = ( + "(branch(.)" # look for revisions in this branch only + f" and tag({tag!r})::." # after the last tag + # ignore commits that only modify .hgtags and nothing else: + " and (merge() or file('re:^(?!\\.hgtags).*$'))" + f" and not tag({tag!r}))" # ignore the tagged commit itself + ) + + return bool(self.hg_log(revset, ".")) + + +def parse(root: _t.PathT, config: Configuration) -> ScmVersion | None: + _require_command("hg") + if os.path.exists(os.path.join(root, ".hg/git")): + res = _run(["hg", "path"], root) + if not res.returncode: + for line in res.stdout.split("\n"): + if line.startswith("default ="): + path = Path(line.split()[2]) + if path.name.endswith(".git") or (path / ".git").exists(): + from .git import _git_parse_inner + from .hg_git import GitWorkdirHgClient + + wd_hggit = GitWorkdirHgClient.from_potential_worktree(root) + if wd_hggit: + return _git_parse_inner(config, wd_hggit) + + wd = HgWorkdir.from_potential_worktree(config.absolute_root) + + if wd is None: + return None + + return wd.get_meta(config) + + +def archival_to_version(data: dict[str, str], config: Configuration) -> ScmVersion: + log.debug("data %s", data) + node = data.get("node", "")[:12] + if node: + node = "h" + node + if "tag" in data: + return meta(data["tag"], config=config) + elif "latesttag" in data: + return meta( + data["latesttag"], + distance=int(data["latesttagdistance"]), + node=node, + config=config, + ) + else: + return meta(config.version_cls("0.0"), node=node, config=config) + + +def parse_archival(root: _t.PathT, config: Configuration) -> ScmVersion: + archival = os.path.join(root, ".hg_archival.txt") + data = data_from_mime(archival) + return archival_to_version(data, config=config) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg_git.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg_git.py new file mode 100644 index 0000000..b6c3036 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/hg_git.py @@ -0,0 +1,155 @@ +from __future__ import annotations + +import logging +import os +from contextlib import suppress +from datetime import date +from pathlib import Path + +from . import _types as _t +from ._run_cmd import CompletedProcess as _CompletedProcess +from ._run_cmd import require_command +from ._run_cmd import run as _run +from .git import GitWorkdir +from .hg import HgWorkdir + +log = logging.getLogger(__name__) + +_FAKE_GIT_DESCRIBE_ERROR = _CompletedProcess( + "fake git describe output for hg", + 1, + "<>hg git failed to describe", +) + + +class GitWorkdirHgClient(GitWorkdir, HgWorkdir): + COMMAND = "hg" + + @classmethod + def from_potential_worktree(cls, wd: _t.PathT) -> GitWorkdirHgClient | None: + require_command("hg") + res = _run(["hg", "root"], cwd=wd).parse_success(parse=Path) + if res is None: + return None + return cls(res) + + def is_dirty(self) -> bool: + res = _run(["hg", "id", "-T", "{dirty}"], cwd=self.path, check=True) + return bool(res.stdout) + + def get_branch(self) -> str | None: + res = _run(["hg", "id", "-T", "{bookmarks}"], cwd=self.path) + if res.returncode: + log.info("branch err %s", res) + return None + return res.stdout + + def get_head_date(self) -> date | None: + return _run('hg log -r . -T "{shortdate(date)}"', cwd=self.path).parse_success( + parse=date.fromisoformat, error_msg="head date err" + ) + + def is_shallow(self) -> bool: + return False + + def fetch_shallow(self) -> None: + pass + + def get_hg_node(self) -> str | None: + res = _run('hg log -r . -T "{node}"', cwd=self.path) + if res.returncode: + return None + else: + return res.stdout + + def _hg2git(self, hg_node: str) -> str | None: + with suppress(FileNotFoundError): + with open(os.path.join(self.path, ".hg/git-mapfile")) as map_items: + for item in map_items: + if hg_node in item: + git_node, hg_node = item.split() + return git_node + return None + + def node(self) -> str | None: + hg_node = self.get_hg_node() + if hg_node is None: + return None + + git_node = self._hg2git(hg_node) + + if git_node is None: + # trying again after hg -> git + _run(["hg", "gexport"], cwd=self.path) + git_node = self._hg2git(hg_node) + + if git_node is None: + log.debug("Cannot get git node so we use hg node %s", hg_node) + + if hg_node == "0" * len(hg_node): + # mimic Git behavior + return None + + return hg_node + + return git_node[:7] + + def count_all_nodes(self) -> int: + res = _run(["hg", "log", "-r", "ancestors(.)", "-T", "."], cwd=self.path) + return len(res.stdout) + + def default_describe(self) -> _CompletedProcess: + """ + Tentative to reproduce the output of + + `git describe --dirty --tags --long --match *[0-9]*` + + """ + res = _run( + [ + "hg", + "log", + "-r", + "(reverse(ancestors(.)) and tag(r're:v?[0-9].*'))", + "-T", + "{tags}{if(tags, ' ', '')}", + ], + cwd=self.path, + ) + if res.returncode: + return _FAKE_GIT_DESCRIBE_ERROR + hg_tags: list[str] = res.stdout.split() + + if not hg_tags: + return _FAKE_GIT_DESCRIBE_ERROR + + with self.path.joinpath(".hg/git-tags").open() as fp: + git_tags: dict[str, str] = dict(line.split()[::-1] for line in fp) + + tag: str + for hg_tag in hg_tags: + if hg_tag in git_tags: + tag = hg_tag + break + else: + logging.warning("tag not found hg=%s git=%s", hg_tags, git_tags) + return _FAKE_GIT_DESCRIBE_ERROR + + res = _run(["hg", "log", "-r", f"'{tag}'::.", "-T", "."], cwd=self.path) + if res.returncode: + return _FAKE_GIT_DESCRIBE_ERROR + distance = len(res.stdout) - 1 + + node = self.node() + assert node is not None + desc = f"{tag}-{distance}-g{node}" + + if self.is_dirty(): + desc += "-dirty" + log.debug("faked describe %r", desc) + return _CompletedProcess( + ["setuptools-scm", "faked", "describe"], + returncode=0, + stdout=desc, + stderr="", + ) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/integration.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/integration.py new file mode 100644 index 0000000..390b0a7 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/integration.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import logging +import textwrap +from pathlib import Path + +from . import _types as _t + +log = logging.getLogger(__name__) + + +def data_from_mime(path: _t.PathT, content: None | str = None) -> dict[str, str]: + """return a mapping from mime/pseudo-mime content + :param path: path to the mime file + :param content: content of the mime file, if None, read from path + :rtype: dict[str, str] + + """ + + if content is None: + content = Path(path).read_text(encoding="utf-8") + log.debug("mime %s content:\n%s", path, textwrap.indent(content, " ")) + + from email.parser import HeaderParser + + parser = HeaderParser() + message = parser.parsestr(content) + data = dict(message.items()) + log.debug("mime %s data:\n%s", path, data) + return data diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/scm_workdir.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/scm_workdir.py new file mode 100644 index 0000000..9879549 --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/scm_workdir.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from ._config import Configuration +from .version import ScmVersion + + +@dataclass() +class Workdir: + path: Path + + def run_describe(self, config: Configuration) -> ScmVersion: + raise NotImplementedError(self.run_describe) diff --git a/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/version.py b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/version.py new file mode 100644 index 0000000..f43e14b --- /dev/null +++ b/.eggs/setuptools_scm-8.0.4-py3.11.egg/setuptools_scm/version.py @@ -0,0 +1,440 @@ +from __future__ import annotations + +import dataclasses +import logging +import os +import re +import warnings +from datetime import date +from datetime import datetime +from datetime import timezone +from typing import Any +from typing import Callable +from typing import Match +from typing import TYPE_CHECKING + +from . import _entrypoints +from . import _modify_version + +if TYPE_CHECKING: + from typing_extensions import Concatenate + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") + +from typing import TypedDict + + +from ._version_cls import Version as PkgVersion, _VersionT +from . import _version_cls as _v +from . import _config + +log = logging.getLogger(__name__) + + +SEMVER_MINOR = 2 +SEMVER_PATCH = 3 +SEMVER_LEN = 3 + + +class _TagDict(TypedDict): + version: str + prefix: str + suffix: str + + +def _parse_version_tag( + tag: str | object, config: _config.Configuration +) -> _TagDict | None: + match = config.tag_regex.match(str(tag)) + + if match: + key: str | int = 1 if len(match.groups()) == 1 else "version" + full = match.group(0) + log.debug("%r %r %s", tag, config.tag_regex, match) + log.debug( + "key %s data %s, %s, %r", key, match.groupdict(), match.groups(), full + ) + result = _TagDict( + version=match.group(key), + prefix=full[: match.start(key)], + suffix=full[match.end(key) :], + ) + + log.debug("tag %r parsed to %r", tag, result) + assert result["version"] + return result + else: + log.debug("tag %r did not parse", tag) + + return None + + +def callable_or_entrypoint(group: str, callable_or_name: str | Any) -> Any: + log.debug("ep %r %r", group, callable_or_name) + + if callable(callable_or_name): + return callable_or_name + from ._entrypoints import iter_entry_points + + for ep in iter_entry_points(group, callable_or_name): + log.debug("ep found: %s", ep.name) + return ep.load() + + +def tag_to_version( + tag: _VersionT | str, config: _config.Configuration +) -> _VersionT | None: + """ + take a tag that might be prefixed with a keyword and return only the version part + """ + log.debug("tag %s", tag) + + tag_dict = _parse_version_tag(tag, config) + if tag_dict is None or not tag_dict.get("version", None): + warnings.warn(f"tag {tag!r} no version found") + return None + + version_str = tag_dict["version"] + log.debug("version pre parse %s", version_str) + + if suffix := tag_dict.get("suffix", ""): + warnings.warn(f"tag {tag!r} will be stripped of its suffix {suffix!r}") + + version: _VersionT = config.version_cls(version_str) + log.debug("version=%r", version) + + return version + + +def _source_epoch_or_utc_now() -> datetime: + if "SOURCE_DATE_EPOCH" in os.environ: + date_epoch = int(os.environ["SOURCE_DATE_EPOCH"]) + return datetime.fromtimestamp(date_epoch, timezone.utc) + else: + return datetime.now(timezone.utc) + + +@dataclasses.dataclass +class ScmVersion: + """represents a parsed version from scm""" + + tag: _v.Version | _v.NonNormalizedVersion | str + """the related tag or preformatted version string""" + config: _config.Configuration + """the configuration used to parse the version""" + distance: int = 0 + """the number of commits since the tag""" + node: str | None = None + """the shortened node id""" + dirty: bool = False + """whether the working copy had uncommitted changes""" + preformatted: bool = False + """whether the version string was preformatted""" + branch: str | None = None + """the branch name if any""" + node_date: date | None = None + """the date of the commit if available""" + time: datetime = dataclasses.field(default_factory=_source_epoch_or_utc_now) + """the current time or source epoch time + only set for unit-testing version schemes + for real usage it must be `now(utc)` or `SOURCE_EPOCH` + """ + + @property + def exact(self) -> bool: + """returns true checked out exactly on a tag and no local changes apply""" + return self.distance == 0 and not self.dirty + + def __repr__(self) -> str: + return ( + f"" + ) + + def format_with(self, fmt: str, **kw: object) -> str: + """format a given format string with attributes of this object""" + return fmt.format( + time=self.time, + tag=self.tag, + distance=self.distance, + node=self.node, + dirty=self.dirty, + branch=self.branch, + node_date=self.node_date, + **kw, + ) + + def format_choice(self, clean_format: str, dirty_format: str, **kw: object) -> str: + """given `clean_format` and `dirty_format` + + choose one based on `self.dirty` and format it using `self.format_with`""" + + return self.format_with(dirty_format if self.dirty else clean_format, **kw) + + def format_next_version( + self, + guess_next: Callable[Concatenate[ScmVersion, _P], str], + fmt: str = "{guessed}.dev{distance}", + *k: _P.args, + **kw: _P.kwargs, + ) -> str: + guessed = guess_next(self, *k, **kw) + return self.format_with(fmt, guessed=guessed) + + +def _parse_tag( + tag: _VersionT | str, preformatted: bool, config: _config.Configuration +) -> _VersionT | str: + if preformatted: + return tag + elif not isinstance(tag, config.version_cls): + version = tag_to_version(tag, config) + assert version is not None + return version + else: + return tag + + +def meta( + tag: str | _VersionT, + *, + distance: int = 0, + dirty: bool = False, + node: str | None = None, + preformatted: bool = False, + branch: str | None = None, + config: _config.Configuration, + node_date: date | None = None, +) -> ScmVersion: + parsed_version = _parse_tag(tag, preformatted, config) + log.info("version %s -> %s", tag, parsed_version) + assert parsed_version is not None, "Can't parse version %s" % tag + return ScmVersion( + parsed_version, + distance=distance, + node=node, + dirty=dirty, + preformatted=preformatted, + branch=branch, + config=config, + node_date=node_date, + ) + + +def guess_next_version(tag_version: ScmVersion) -> str: + version = _modify_version.strip_local(str(tag_version.tag)) + return _modify_version._bump_dev(version) or _modify_version._bump_regex(version) + + +def guess_next_dev_version(version: ScmVersion) -> str: + if version.exact: + return version.format_with("{tag}") + else: + return version.format_next_version(guess_next_version) + + +def guess_next_simple_semver( + version: ScmVersion, retain: int, increment: bool = True +) -> str: + try: + parts = [int(i) for i in str(version.tag).split(".")[:retain]] + except ValueError: + raise ValueError(f"{version} can't be parsed as numeric version") from None + while len(parts) < retain: + parts.append(0) + if increment: + parts[-1] += 1 + while len(parts) < SEMVER_LEN: + parts.append(0) + return ".".join(str(i) for i in parts) + + +def simplified_semver_version(version: ScmVersion) -> str: + if version.exact: + return guess_next_simple_semver(version, retain=SEMVER_LEN, increment=False) + else: + if version.branch is not None and "feature" in version.branch: + return version.format_next_version( + guess_next_simple_semver, retain=SEMVER_MINOR + ) + else: + return version.format_next_version( + guess_next_simple_semver, retain=SEMVER_PATCH + ) + + +def release_branch_semver_version(version: ScmVersion) -> str: + if version.exact: + return version.format_with("{tag}") + if version.branch is not None: + # Does the branch name (stripped of namespace) parse as a version? + branch_ver_data = _parse_version_tag( + version.branch.split("/")[-1], version.config + ) + if branch_ver_data is not None: + branch_ver = branch_ver_data["version"] + if branch_ver[0] == "v": + # Allow branches that start with 'v', similar to Version. + branch_ver = branch_ver[1:] + # Does the branch version up to the minor part match the tag? If not it + # might be like, an issue number or something and not a version number, so + # we only want to use it if it matches. + tag_ver_up_to_minor = str(version.tag).split(".")[:SEMVER_MINOR] + branch_ver_up_to_minor = branch_ver.split(".")[:SEMVER_MINOR] + if branch_ver_up_to_minor == tag_ver_up_to_minor: + # We're in a release/maintenance branch, next is a patch/rc/beta bump: + return version.format_next_version(guess_next_version) + # We're in a development branch, next is a minor bump: + return version.format_next_version(guess_next_simple_semver, retain=SEMVER_MINOR) + + +def release_branch_semver(version: ScmVersion) -> str: + warnings.warn( + "release_branch_semver is deprecated and will be removed in the future. " + "Use release_branch_semver_version instead", + category=DeprecationWarning, + stacklevel=2, + ) + return release_branch_semver_version(version) + + +def no_guess_dev_version(version: ScmVersion) -> str: + if version.exact: + return version.format_with("{tag}") + else: + return version.format_next_version(_modify_version._dont_guess_next_version) + + +_DATE_REGEX = re.compile( + r""" + ^(?P + (?P[vV]?) + (?P\d{2}|\d{4})(?:\.\d{1,2}){2}) + (?:\.(?P\d*))?$ + """, + re.VERBOSE, +) + + +def date_ver_match(ver: str) -> Match[str] | None: + return _DATE_REGEX.match(ver) + + +def guess_next_date_ver( + version: ScmVersion, + node_date: date | None = None, + date_fmt: str | None = None, + version_cls: type | None = None, +) -> str: + """ + same-day -> patch +1 + other-day -> today + + distance is always added as .devX + """ + match = date_ver_match(str(version.tag)) + if match is None: + warnings.warn( + f"{version} does not correspond to a valid versioning date, " + "assuming legacy version" + ) + if date_fmt is None: + date_fmt = "%y.%m.%d" + else: + # deduct date format if not provided + if date_fmt is None: + date_fmt = "%Y.%m.%d" if len(match.group("year")) == 4 else "%y.%m.%d" + if prefix := match.group("prefix"): + if not date_fmt.startswith(prefix): + date_fmt = prefix + date_fmt + + today = version.time.date() + head_date = node_date or today + # compute patch + if match is None: + tag_date = today + else: + tag_date = ( + datetime.strptime(match.group("date"), date_fmt) + .replace(tzinfo=timezone.utc) + .date() + ) + if tag_date == head_date: + patch = "0" if match is None else (match.group("patch") or "0") + patch = int(patch) + 1 + else: + if tag_date > head_date and match is not None: + # warn on future times + warnings.warn( + f"your previous tag ({tag_date})" + f" is ahead your node date ({head_date})" + ) + patch = 0 + next_version = "{node_date:{date_fmt}}.{patch}".format( + node_date=head_date, date_fmt=date_fmt, patch=patch + ) + # rely on the Version object to ensure consistency (e.g. remove leading 0s) + if version_cls is None: + version_cls = PkgVersion + next_version = str(version_cls(next_version)) + return next_version + + +def calver_by_date(version: ScmVersion) -> str: + if version.exact and not version.dirty: + return version.format_with("{tag}") + # TODO: move the release-X check to a new scheme + if version.branch is not None and version.branch.startswith("release-"): + branch_ver = _parse_version_tag(version.branch.split("-")[-1], version.config) + if branch_ver is not None: + ver = branch_ver["version"] + match = date_ver_match(ver) + if match: + return ver + return version.format_next_version( + guess_next_date_ver, + node_date=version.node_date, + version_cls=version.config.version_cls, + ) + + +def get_local_node_and_date(version: ScmVersion) -> str: + return _modify_version._format_local_with_time(version, time_format="%Y%m%d") + + +def get_local_node_and_timestamp(version: ScmVersion) -> str: + return _modify_version._format_local_with_time(version, time_format="%Y%m%d%H%M%S") + + +def get_local_dirty_tag(version: ScmVersion) -> str: + return version.format_choice("", "+dirty") + + +def get_no_local_node(version: ScmVersion) -> str: + return "" + + +def postrelease_version(version: ScmVersion) -> str: + if version.exact: + return version.format_with("{tag}") + else: + return version.format_with("{tag}.post{distance}") + + +def format_version(version: ScmVersion) -> str: + log.debug("scm version %s", version) + log.debug("config %s", version.config) + if version.preformatted: + assert isinstance(version.tag, str) + return version.tag + main_version = _entrypoints._call_version_scheme( + version, "setuptools_scm.version_scheme", version.config.version_scheme, None + ) + log.debug("version %s", main_version) + assert main_version is not None + local_version = _entrypoints._call_version_scheme( + version, "setuptools_scm.local_scheme", version.config.local_scheme, "+unknown" + ) + log.debug("local_version %s", local_version) + return main_version + local_version diff --git a/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/LICENSE b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/LICENSE new file mode 100644 index 0000000..f26bcf4 --- /dev/null +++ b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/PKG-INFO b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000..dc7c951 --- /dev/null +++ b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: typing_extensions +Version: 4.8.0 +Summary: Backported and Experimental Type Hints for Python 3.8+ +Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing +Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee" +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development +Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues +Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md +Project-URL: Documentation, https://typing-extensions.readthedocs.io/ +Project-URL: Home, https://github.com/python/typing_extensions +Project-URL: Q & A, https://github.com/python/typing/discussions +Project-URL: Repository, https://github.com/python/typing_extensions + +# Typing Extensions + +[![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing) + +[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) – +[PyPI](https://pypi.org/project/typing-extensions/) + +## Overview + +The `typing_extensions` module serves two related purposes: + +- Enable use of new type system features on older Python versions. For example, + `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows + users on previous Python versions to use it too. +- Enable experimentation with new type system PEPs before they are accepted and + added to the `typing` module. + +`typing_extensions` is treated specially by static type checkers such as +mypy and pyright. Objects defined in `typing_extensions` are treated the same +way as equivalent forms in `typing`. + +`typing_extensions` uses +[Semantic Versioning](https://semver.org/). The +major version will be incremented only for backwards-incompatible changes. +Therefore, it's safe to depend +on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`, +where `x.y` is the first version that includes all features you need. + +## Included items + +See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a +complete listing of module contents. + +## Contributing + +See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md) +for how to contribute to `typing_extensions`. + diff --git a/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/RECORD b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/RECORD new file mode 100644 index 0000000..9fdd2a5 --- /dev/null +++ b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/RECORD @@ -0,0 +1,5 @@ +typing_extensions.py,sha256=ktWhnF_DOJuyvxy6jW0ndns3KA4byTEB9DzTtEMl26M,103397 +typing_extensions-4.8.0.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936 +typing_extensions-4.8.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +typing_extensions-4.8.0.dist-info/METADATA,sha256=FwHgtsuPpRLjEsgt7mXT_fzt0jErWt5vEa2ia2kRMaY,2966 +typing_extensions-4.8.0.dist-info/RECORD,, diff --git a/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/WHEEL b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/WHEEL new file mode 100644 index 0000000..3b5e64b --- /dev/null +++ b/.eggs/typing_extensions-4.8.0-py3.11.egg/EGG-INFO/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.eggs/typing_extensions-4.8.0-py3.11.egg/typing_extensions.py b/.eggs/typing_extensions-4.8.0-py3.11.egg/typing_extensions.py new file mode 100644 index 0000000..c96bf90 --- /dev/null +++ b/.eggs/typing_extensions-4.8.0-py3.11.egg/typing_extensions.py @@ -0,0 +1,2892 @@ +import abc +import collections +import collections.abc +import functools +import inspect +import operator +import sys +import types as _types +import typing +import warnings + +__all__ = [ + # Super-special typing primitives. + 'Any', + 'ClassVar', + 'Concatenate', + 'Final', + 'LiteralString', + 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', + 'Self', + 'Type', + 'TypeVar', + 'TypeVarTuple', + 'Unpack', + + # ABCs (from collections.abc). + 'Awaitable', + 'AsyncIterator', + 'AsyncIterable', + 'Coroutine', + 'AsyncGenerator', + 'AsyncContextManager', + 'Buffer', + 'ChainMap', + + # Concrete collection types. + 'ContextManager', + 'Counter', + 'Deque', + 'DefaultDict', + 'NamedTuple', + 'OrderedDict', + 'TypedDict', + + # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', + 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', + + # One-off things. + 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'deprecated', + 'Doc', + 'get_overloads', + 'final', + 'get_args', + 'get_origin', + 'get_original_bases', + 'get_protocol_members', + 'get_type_hints', + 'IntVar', + 'is_protocol', + 'is_typeddict', + 'Literal', + 'NewType', + 'overload', + 'override', + 'Protocol', + 'reveal_type', + 'runtime', + 'runtime_checkable', + 'Text', + 'TypeAlias', + 'TypeAliasType', + 'TypeGuard', + 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() + + +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar('T') # Any type. +KT = typing.TypeVar('KT') # Key type. +VT = typing.TypeVar('VT') # Value type. +T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + +Final = typing.Final + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): + Literal = typing.Literal +else: + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""") + + +_overload_dummy = typing._overload_dummy + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +ContextManager = typing.ContextManager +AsyncContextManager = typing.AsyncContextManager +DefaultDict = typing.DefaultDict +OrderedDict = typing.OrderedDict +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], +} + + +_EXCLUDED_ATTRS = { + "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", + "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", + "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", + "__subclasshook__", "__orig_class__", "__init__", "__new__", + "__protocol_attrs__", "__callable_proto_members_only__", +} + +if sys.version_info >= (3, 9): + _EXCLUDED_ATTRS.add("__class_getitem__") + +if sys.version_info >= (3, 12): + _EXCLUDED_ATTRS.add("__type_params__") + +_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in {'Protocol', 'Generic'}: + continue + annotations = getattr(base, '__annotations__', {}) + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): + attrs.add(attr) + return attrs + + +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# The performance of runtime-checkable protocols is significantly improved on Python 3.12, +# so we backport the 3.12 version of Protocol to Python <=3.11 +if sys.version_info >= (3, 12): + Protocol = typing.Protocol +else: + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError('Protocols cannot be instantiated') + + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + class _ProtocolMeta(type(typing.Protocol)): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, typing.Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, typing.Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, '_is_protocol', False) + and not _allow_reckless_class_checks() + ): + if not isinstance(other, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + if ( + not cls.__callable_proto_members_only__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + raise TypeError( + "Protocols with non-method members don't support issubclass()" + ) + if not getattr(cls, '_is_runtime_protocol', False): + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks() + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if abc.ABCMeta.__instancecheck__(cls, instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: + return True + + return False + + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return cls is Protocol and other is typing.Protocol + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True + + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +# The "runtime" alias exists for backwards compatibility. +runtime = runtime_checkable = typing.runtime_checkable + + +# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound +else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +if sys.version_info >= (3, 13): + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, total=True): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') + + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () + + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" + tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == "Protocol": + tp_dict.__qualname__ = name + + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases + + annotations = {} + own_annotations = ns.get('__annotations__', {}) + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } + required_keys = set() + optional_keys = set() + + for base in bases: + annotations.update(base.__dict__.get('__annotations__', {})) + required_keys.update(base.__dict__.get('__required_keys__', ())) + optional_keys.update(base.__dict__.get('__optional_keys__', ())) + + annotations.update(own_annotations) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + if not hasattr(tp_dict, '__total__'): + tp_dict.__total__ = total + return tp_dict + + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(typename, fields=_marker, /, *, total=True, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is + associated with a value of a consistent type. This expectation + is not checked at runtime. + + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports an additional equivalent form:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. + """ + if fields is _marker or fields is None: + if fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{typename} = TypedDict({typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(val, typ, /): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return val + + +if hasattr(typing, "Required"): # 3.11+ + get_type_hints = typing.get_type_hints +else: # <=3.10 + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): # 3.9+ + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: # 3.8 + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, 'Annotated'): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.8 +else: + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]") + + def __reduce__(self): + return operator.getitem, ( + Annotated, (self.__origin__,) + self.__metadata__ + ) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError("Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.8-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, + ParamSpecArgs, ParamSpecKwargs)): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__,) + tp.__metadata__ + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, 'TypeAlias'): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") +# 3.8 +else: + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""" + ) + + +def _set_default(type_param, default): + if isinstance(default, (tuple, list)): + type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default != _marker: + if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11 + type_param.__default__ = default + else: + type_param.__default__ = typing._type_check(default, "Default must be a type") + else: + type_param.__default__ = None + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + __init__ = _set_default + + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) + + +# Add default and infer_variance parameters from PEP 696 and 695 +class TypeVar(metaclass=_TypeVarLikeMeta): + """Type variable.""" + + _backported_typevarlike = typing.TypeVar + + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=_marker, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + _set_default(typevar, default) + _set_module(typevar) + return typevar + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, 'ParamSpecArgs'): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.8-3.9 +else: + class _Immutable: + """Mixin to indicate that object should not be copied.""" + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + +# 3.10+ +if hasattr(typing, 'ParamSpec'): + + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=_marker): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance + + _set_default(paramspec, default) + _set_module(paramspec) + return paramspec + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") + +# 3.8-3.9 +else: + + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + infer_variance=False, default=_marker): + super().__init__([self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) + if bound: + self.__bound__ = typing._type_check(bound, 'Bound must be a type.') + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __repr__(self): + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: + prefix = '+' + elif self.__contravariant__: + prefix = '-' + else: + prefix = '~' + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.8-3.9 +if not hasattr(typing, 'Concatenate'): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return (f'{_type_repr(self.__origin__)}' + f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.8-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError("The last parameter to Concatenate should be a " + "ParamSpec variable.") + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, 'Concatenate'): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) +# 3.8 +else: + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + 'Concatenate', + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """) + +# 3.10+ +if hasattr(typing, 'TypeGuard'): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + @_ExtensionsSpecialForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f'{self} accepts only a single type.') + return typing._GenericAlias(self, (item,)) +# 3.8 +else: + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type') + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + 'TypeGuard', + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f'typing_extensions.{self._name}' + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): # 3.11+ + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): # 3.11+ + Self = typing.Self +else: + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): # 3.11+ + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, 'Required'): # 3.11+ + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + class _RequiredForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + 'Required', + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """) + NotRequired = _RequiredForm( + 'NotRequired', + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] + Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +elif sys.version_info[:2] >= (3, 9): # 3.9+ + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: # 3.8 + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): + """Type variable tuple.""" + + _backported_typevarlike = typing.TypeVarTuple + + def __new__(cls, name, *, default=_marker): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: # <=3.10 + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=_marker): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): # 3.11+ + reveal_type = typing.reveal_type +else: # <=3.10 + def reveal_type(obj: T, /) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) + return obj + + +if hasattr(typing, "assert_never"): # 3.11+ + assert_never = typing.assert_never +else: # <=3.10 + def assert_never(arg: Never, /) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + raise AssertionError("Expected code to be unreachable") + + +if sys.version_info >= (3, 12): # 3.12+ + # dataclass_transform exists in 3.11 but lacks the frozen_default parameter + dataclass_transform = typing.dataclass_transform +else: # <=3.11 + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "frozen_default": frozen_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +if hasattr(typing, "override"): # 3.12+ + override = typing.override +else: # <=3.11 + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(arg: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: ... + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + try: + arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return arg + + +if hasattr(typing, "deprecated"): + deprecated = typing.deprecated +else: + _T = typing.TypeVar("_T") + + def deprecated( + msg: str, + /, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> typing.Callable[[_T], _T]: + """Indicate that a class, function or overload is deprecated. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + The warning specified by ``category`` will be emitted on use + of deprecated objects. For functions, that happens on calls; + for classes, on instantiation. If the ``category`` is ``None``, + no warning is emitted. The ``stacklevel`` determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + + The decorator sets the ``__deprecated__`` + attribute on the decorated object to the deprecation message + passed to the decorator. If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + def decorator(arg: _T, /) -> _T: + if category is None: + arg.__deprecated__ = msg + return arg + elif isinstance(arg, type): + original_new = arg.__new__ + has_init = arg.__init__ is not object.__init__ + + @functools.wraps(original_new) + def __new__(cls, *args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif not has_init and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + else: + return original_new(cls) + + arg.__new__ = staticmethod(__new__) + arg.__deprecated__ = __new__.__deprecated__ = msg + return arg + elif callable(arg): + @functools.wraps(arg) + def wrapper(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return arg(*args, **kwargs) + + arg.__deprecated__ = wrapper.__deprecated__ = msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {arg!r}" + ) + + return decorator + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic + + +# Backport typing.NamedTuple as it exists in Python 3.13. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): + NamedTuple = typing.NamedTuple +else: + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types} + nm_tpl = collections.namedtuple(name, fields, + defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + 'can only inherit from a NamedTuple type and Generic') + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + types = ns.get('__annotations__', {}) + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError(f"Non-default namedtuple field {field_name} " + f"cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}") + nm_tpl = _make_nmtuple( + typename, types.items(), + defaults=[ns[n] for n in default_names], + module=ns['__module__'] + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key in ns: + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(typename, fields=_marker, /, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif kwargs: + raise TypeError("Either list of fields or keywords" + " can be provided to NamedTuple, not both") + if fields is _marker or fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + fields = kwargs.items() + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__dict__.get("__orig_bases__", cls.__bases__) + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, name: str, value: object, /) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(name) + super().__setattr__(name, value) + + def __delattr__(self, name: str, /) -> Never: + self._raise_attribute_error(name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(tp, type) + and getattr(tp, '_is_protocol', False) + and tp is not Protocol + and tp is not typing.Protocol + ) + + def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(tp): + raise TypeError(f'{tp!r} is not a Protocol') + if hasattr(tp, '__protocol_attrs__'): + return frozenset(tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(tp)) + + +if hasattr(typing, "Doc"): + Doc = typing.Doc +else: + class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute ``documentation``. + + Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generator = typing.Generator +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/README.md b/README.md index e715af5..0f8a1ef 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ # CI MPM Toy repo with some simple functions for the CI lecture + +test change! diff --git a/simple_functions.egg-info/PKG-INFO b/simple_functions.egg-info/PKG-INFO new file mode 100644 index 0000000..4612ff5 --- /dev/null +++ b/simple_functions.egg-info/PKG-INFO @@ -0,0 +1,7 @@ +Metadata-Version: 2.1 +Name: simple-functions +Version: 0.1.dev14+g336efd1 +Summary: Environment for playing with CI. +Home-page: https://github.com/rhodrin/ci_mpm +Author: Imperial College London +Author-email: rhodri.nelson@imperial.ac.uk diff --git a/simple_functions.egg-info/SOURCES.txt b/simple_functions.egg-info/SOURCES.txt new file mode 100644 index 0000000..70eeb0c --- /dev/null +++ b/simple_functions.egg-info/SOURCES.txt @@ -0,0 +1,13 @@ +README.md +environment.yml +requirements.txt +setup.py +.github/workflows/flake8.yml +.github/workflows/pytest-unit-tests.yml +simple_functions/__init__.py +simple_functions/functions1.py +simple_functions.egg-info/PKG-INFO +simple_functions.egg-info/SOURCES.txt +simple_functions.egg-info/dependency_links.txt +simple_functions.egg-info/top_level.txt +tests/test_simple_functions.py \ No newline at end of file diff --git a/simple_functions.egg-info/dependency_links.txt b/simple_functions.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/simple_functions.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/simple_functions.egg-info/top_level.txt b/simple_functions.egg-info/top_level.txt new file mode 100644 index 0000000..a4ba85f --- /dev/null +++ b/simple_functions.egg-info/top_level.txt @@ -0,0 +1 @@ +simple_functions