diff --git a/.clickup-docs-updates.md b/.clickup-docs-updates.md new file mode 100644 index 0000000..4d5b5a6 --- /dev/null +++ b/.clickup-docs-updates.md @@ -0,0 +1,465 @@ +# ClickUp Documentation Updates + +This file contains the content updates for the three ClickUp documentation pages as per the plan. + +--- + +## Page 1: Conventional Commits - Replace Section + +**Remove**: Any sections mentioning commitlint, npm, husky, or Node.js tools + +**Add**: The following "Enforcement with Lefthook" section + +### Enforcement with Lefthook + +#### Install Lefthook + +```bash +# Add to dev dependencies +uv add --dev lefthook + +# Install git hooks +uv run lefthook install +``` + +#### Configure Commit Message Validation + +Create or update `lefthook.yml`: + +```yaml +commit-msg: + commands: + check-commit-msg: + run: | + commit_regex='^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?: .+' + if ! grep -qE "$commit_regex" {1}; then + echo "❌ Commit message must follow conventional commits format!" + echo "" + echo "Format: : OR (): " + echo "" + echo "Types:" + echo " feat: New feature" + echo " fix: Bug fix" + echo " docs: Documentation changes" + echo " style: Code style changes" + echo " refactor: Code refactoring" + echo " test: Test changes" + echo " chore: Maintenance tasks" + echo " build: Build system changes" + echo " ci: CI/CD changes" + echo " perf: Performance improvements" + echo " revert: Revert previous commit" + exit 1 + fi +``` + +#### Benefits + +✅ **Enforced at commit time** - Prevents non-conventional commits +✅ **Fast** - Pure shell script, no Node.js overhead +✅ **Python ecosystem** - No JavaScript dependencies +✅ **Customizable** - Easy to modify commit types + +#### GitHub PR Title Enforcement + +Use GitHub Actions to enforce conventional commit format for PR titles: + +```yaml +name: PR Title Check + +on: + pull_request: + types: [opened, edited, synchronize] + +jobs: + check-title: + runs-on: ubuntu-latest + steps: + - name: Check PR title + run: | + title="${{ github.event.pull_request.title }}" + if ! echo "$title" | grep -qE '^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?: .+'; then + echo "❌ PR title must follow conventional commits format" + exit 1 + fi +``` + +--- + +## Page 2: python-semantic-release Configuration - Add Section + +**Add**: The following "Enforcement and Pre-commit Hooks" section + +### Enforcement and Pre-commit Hooks + +#### Lefthook Integration + +Ensure commits follow conventional format before PSR processes them: + +**File**: `lefthook.yml` + +```yaml +pre-commit: + parallel: true + commands: + # Linting and formatting + ruff: + run: uv run ruff check --fix {staged_files} + glob: "*.py" + stage_fixed: true + + ruff-format: + run: uv run ruff format {staged_files} + glob: "*.py" + stage_fixed: true + + # Type checking + type-check: + run: uv run basedpyright + skip: + - merge + - rebase + +commit-msg: + commands: + check-commit-msg: + run: | + commit_regex='^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?: .+' + if ! grep -qE "$commit_regex" {1}; then + echo "❌ Commit message must follow conventional commits!" + exit 1 + fi + +pre-push: + commands: + # Ensure tests pass before pushing + test: + run: uv run pytest + + # Verify package builds + build: + run: uv build +``` + +This ensures: +- Code quality before commit +- Conventional commits enforced +- Tests pass before push +- Package builds successfully + +All requirements for successful PSR releases. + +--- + +## Page 3: NEW PAGE - "Enforcement & Project Setup" + +This is a complete new page to be created in ClickUp. + +# Enforcement & Project Setup Guide + +This guide provides a complete checklist for setting up automated versioning and enforcement in a new Python project. + +## Prerequisites + +- Python 3.12+ installed +- uv package manager installed +- Git repository initialized +- GitHub repository created + +## Step-by-Step Setup + +### 1. Install Dependencies + +```bash +# Install uv if not already installed +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Add development dependencies +uv add --dev python-semantic-release +uv add --dev lefthook +uv add --dev ruff +uv add --dev basedpyright # or mypy +uv add --dev pytest +uv add --dev pytest-cov +``` + +### 2. Configure pyproject.toml + +Add semantic release configuration: + +```toml +[project] +name = "my-package" +dynamic = ["version"] # Version from VCS +description = "My awesome package" +requires-python = ">=3.12" + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.hatch.version] +source = "vcs" + +[tool.semantic_release] +branch = "main" +tag_format = "v{version}" +build_command = "uv build" +upload_to_pypi = true +upload_to_release = true + +[tool.semantic_release.commit_parser_options] +allowed_tags = [ + "build", "chore", "ci", "docs", "feat", "fix", + "perf", "refactor", "style", "test" +] +minor_tags = ["feat"] +patch_tags = ["fix", "perf"] + +[tool.semantic_release.changelog] +changelog_file = "CHANGELOG.md" +exclude_commit_patterns = [] + +[tool.ruff] +line-length = 128 +target-version = "py312" + +[tool.basedpyright] +pythonVersion = "3.12" +typeCheckingMode = "standard" +``` + +### 3. Create lefthook.yml + +```yaml +skip_output: + - meta + - success + +pre-commit: + parallel: true + commands: + ruff: + run: uv run ruff check --fix {staged_files} + glob: "*.py" + stage_fixed: true + + ruff-format: + run: uv run ruff format {staged_files} + glob: "*.py" + stage_fixed: true + + type-check: + run: uv run basedpyright + skip: + - merge + - rebase + +commit-msg: + commands: + check-commit-msg: + run: | + commit_regex='^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?: .+' + if ! grep -qE "$commit_regex" {1}; then + echo "❌ Commit message must follow conventional commits!" + echo "" + echo "Format: : OR (): " + exit 1 + fi + +pre-push: + commands: + test: + run: uv run pytest + + build: + run: uv build +``` + +### 4. Install Git Hooks + +```bash +# Install lefthook hooks +uv run lefthook install + +# Verify installation +uv run lefthook run pre-commit --all-files +``` + +### 5. Create GitHub Workflow + +**File**: `.github/workflows/release.yml` + +```yaml +name: Release + +on: + push: + branches: + - main + +jobs: + release: + runs-on: ubuntu-latest + concurrency: release + permissions: + id-token: write + contents: write + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Install dependencies + run: uv sync --all-extras + + - name: Configure Git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Create release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + NEXT_VERSION=$(uv run semantic-release version --print) + if [ -n "$NEXT_VERSION" ]; then + uv run semantic-release version + uv build + uv run semantic-release publish + fi + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 +``` + +### 6. Configure PyPI Trusted Publishing + +1. Go to https://pypi.org/manage/account/publishing/ +2. Add new publisher: + - PyPI Project Name: `my-package` + - Owner: `my-github-username` + - Repository: `my-repo-name` + - Workflow: `release.yml` + +### 7. First Commit and Release + +```bash +# Initialize with conventional commit +git add . +git commit -m "feat: initial project setup" + +# Create initial tag (optional, for version base) +git tag v0.1.0 +git push origin main --tags + +# First release will be automatic on push to main +``` + +## Verification Checklist + +### Local Development + +- [ ] Lefthook hooks installed (`uv run lefthook install`) +- [ ] Pre-commit runs on commit (`git commit` triggers hooks) +- [ ] Commit message validation works (try invalid message) +- [ ] Pre-push runs tests (`git push` triggers tests) + +### CI/CD + +- [ ] GitHub Actions workflow exists (`.github/workflows/release.yml`) +- [ ] Workflow has correct permissions (`id-token: write`, `contents: write`) +- [ ] PyPI Trusted Publishing configured + +### Release Process + +- [ ] Conventional commits create correct version bumps +- [ ] CHANGELOG.md is auto-generated +- [ ] Git tags are created automatically +- [ ] PyPI packages are published +- [ ] GitHub Releases are created with changelog + +## Troubleshooting + +### Lefthook not running + +```bash +# Reinstall hooks +uv run lefthook uninstall +uv run lefthook install +``` + +### Semantic release not finding commits + +```bash +# Check what PSR sees +uv run semantic-release version --print --verbosity DEBUG +``` + +### PyPI publishing fails + +- Verify trusted publisher is configured on PyPI +- Check workflow permissions include `id-token: write` +- Ensure workflow name matches PyPI configuration + +## Maintenance + +### Updating Lefthook Configuration + +After modifying `lefthook.yml`: + +```bash +# Reinstall hooks with new configuration +uv run lefthook install +``` + +### Testing Hooks Without Committing + +```bash +# Test pre-commit hooks +uv run lefthook run pre-commit --all-files + +# Test commit-msg validation +echo "feat: test message" | uv run lefthook run commit-msg +``` + +## Best Practices + +✅ **Always test locally first** - Run lefthook before pushing +✅ **Use conventional commits** - Every commit should follow format +✅ **Keep lefthook.yml in version control** - Team consistency +✅ **Pin tool versions** - Reproducible builds +✅ **Document custom commit types** - If you add new types beyond standard + +--- + +## Next Steps + +1. Read **Semantic Versioning** page for version number rules +2. Read **Conventional Commits** page for commit message format +3. Read **python-semantic-release Configuration** for advanced config +4. Read **GitHub Actions Workflows** for CI/CD customization + +--- + +## Implementation Notes + +**All content above is Python-exclusive:** +- ✅ Uses `uv` (Python package manager) +- ✅ Uses `lefthook` (language-agnostic, no Node.js) +- ✅ Uses `python-semantic-release` (Python tool) +- ✅ Uses `ruff` (Python linter) +- ✅ Uses `basedpyright` (Python type checker) +- ❌ NO `commitlint` (Node.js) +- ❌ NO `npm` (Node.js) +- ❌ NO `husky` (Node.js) + +This ensures the entire workflow stays within the Python ecosystem while maintaining best practices for automated versioning and release management. diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 1d6ad76..b90e9e9 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -32,34 +32,24 @@ - **Merge PRs into `develop`.** Configure repository settings so that branches are deleted automatically after PRs are merged. - **Only merge to `main` if [fast-forwarding](https://www.git-scm.com/book/en/v2/Git-Branching-Basic-Branching-and-Merging) from `develop`.** - **Enable [branch protection](https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/about-protected-branches) on `develop` and `main`.** -- **Set up a release workflow.** Here's an example release workflow, controlled by Git tags: - - Bump the version number in `pyproject.toml` with `poetry version` and commit the changes to `develop`. - - Push to `develop` and verify all CI checks pass. - - Fast-forward merge to `main`, push, and verify all CI checks pass. - - Create an [annotated and signed Git tag](https://www.git-scm.com/book/en/v2/Git-Basics-Tagging) - - Follow [SemVer](https://semver.org/) guidelines when choosing a version number. - - List PRs and commits in the tag message: - ```sh - git log --pretty=format:"- %s (%h)" "$(git describe --abbrev=0 --tags)"..HEAD - ``` - - Omit the leading `v` (use `1.0.0` instead of `v1.0.0`) - - Example: `git tag -a -s 1.0.0` - - Push the tag. GitHub Actions will build and push the Python package and Docker images. -- **Create a changelog.** Here's an example changelog generation command, controlled by Git tags: - - ```sh - printf '# Changelog\n\n' >CHANGELOG.md - - GIT_LOG_FORMAT='## %(subject) - %(taggerdate:short) - - %(contents:body) - Tagger: %(taggername) %(taggeremail) - Date: %(taggerdate:iso) - - %(contents:signature)' - - git tag -l --sort=-taggerdate:iso --format="$GIT_LOG_FORMAT" >>CHANGELOG.md - ``` +- **Release workflow is fully automated.** This project uses [python-semantic-release](https://python-semantic-release.readthedocs.io/) for automated versioning: + - Releases happen automatically when commits are pushed to `main` + - Version is determined by analyzing [conventional commits](https://www.conventionalcommits.org/) + - CHANGELOG.md is automatically updated + - Git tags are created and GPG signed + - Package is published to PyPI via Trusted Publishing + - GitHub Release is created with changelog + - Documentation is deployed automatically + - **No manual tagging or version bumping required!** + - For detailed information, see [Release Automation Guide](../docs/releases.md) +- **Use conventional commits.** All commit messages must follow the [Conventional Commits](https://www.conventionalcommits.org/) specification: + - `feat:` for new features (minor version bump) + - `fix:` for bug fixes (patch version bump) + - `docs:`, `chore:`, `ci:`, etc. for non-releasing changes + - `feat!:` or `BREAKING CHANGE:` for breaking changes (major version bump) + - Commit messages are validated before commit via lefthook + - PR titles must also follow this format + - See [Release Automation Guide](../docs/releases.md) for examples and best practices ## Git @@ -68,7 +58,7 @@ - [Configure Git to connect to GitHub with SSH](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/connecting-to-github-with-ssh) - [Fork](https://docs.github.com/en/free-pro-team@latest/github/getting-started-with-github/fork-a-repo) this repo - Create a [branch](https://www.git-scm.com/book/en/v2/Git-Branching-Branches-in-a-Nutshell) in your fork. -- Commit your changes with a [properly-formatted Git commit message](https://chris.beams.io/posts/git-commit/). +- Commit your changes with a [conventional commit message](https://www.conventionalcommits.org/). See [Release Automation Guide](../docs/releases.md) for format and examples. - Create a [pull request (PR)](https://docs.github.com/en/free-pro-team@latest/github/collaborating-with-issues-and-pull-requests/about-pull-requests) to incorporate your changes into the upstream project you forked. ## Python diff --git a/.github/actions/setup-python-uv/action.yml b/.github/actions/setup-python-uv/action.yml index a7f0680..3be2f37 100644 --- a/.github/actions/setup-python-uv/action.yml +++ b/.github/actions/setup-python-uv/action.yml @@ -4,7 +4,7 @@ inputs: python-version: description: 'Python version to use' required: false - default: '3.13' + default: '3.14' uv-version: description: 'uv version to use' required: false diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index e6e381f..5e4eb5c 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -11,8 +11,7 @@ on: - main release: types: - - released - - prereleased + - published jobs: prepare: @@ -36,11 +35,15 @@ jobs: - name: Extract major and minor version id: version + env: + LATEST_TAG: ${{ steps.latest_tag.outputs.tag }} run: | - echo "value=`echo ${{ steps.latest_tag.outputs.tag }} | sed -r 's|(v[0-9]+.[0-9]+).*|\1|g'`" >> $GITHUB_OUTPUT + echo "value=$(echo "$LATEST_TAG" | sed -r 's|(v[0-9]+.[0-9]+).*|\1|g')" >> $GITHUB_OUTPUT - name: Computed Doc Version - run: echo ${{ steps.version.outputs.value }} + env: + DOC_VERSION: ${{ steps.version.outputs.value }} + run: echo "$DOC_VERSION" publish-docs: name: Publish Docs @@ -65,9 +68,12 @@ jobs: git_commit_gpgsign: true - name: Configurating Git + env: + GIT_EMAIL: ${{ steps.import-gpg.outputs.email }} + GIT_NAME: ${{ steps.import-gpg.outputs.name }} run: | - git config user.email "${{ steps.import-gpg.outputs.email }}" - git config user.name "${{ steps.import-gpg.outputs.name }}" + git config user.email "$GIT_EMAIL" + git config user.name "$GIT_NAME" git config core.autocrlf false git config commit.gpgsign true @@ -77,6 +83,8 @@ jobs: dependency-groups: doc - name: Publish + env: + DOC_VERSION: ${{ needs.prepare.outputs.version }} run: | - uv run mike deploy --push --update-aliases "${{ needs.prepare.outputs.version }}" latest - uv run mike deploy --push --update-aliases "${{ needs.prepare.outputs.version }}" docs + uv run mike deploy --push --update-aliases "$DOC_VERSION" latest + uv run mike deploy --push --update-aliases "$DOC_VERSION" docs diff --git a/.github/workflows/semantic-release.yml b/.github/workflows/semantic-release.yml new file mode 100644 index 0000000..c008f98 --- /dev/null +++ b/.github/workflows/semantic-release.yml @@ -0,0 +1,106 @@ +name: Semantic Release + +on: + push: + branches: + - main + workflow_dispatch: + +permissions: + id-token: write # PyPI Trusted Publishing + contents: write # Create tags and releases + issues: write # Comment on issues + pull-requests: write # Comment on PRs + +jobs: + release: + name: Semantic Release + runs-on: ubuntu-latest + outputs: + released: ${{ steps.release.outputs.released }} + version: ${{ steps.release.outputs.version }} + tag: ${{ steps.release.outputs.tag }} + + steps: + - name: Checkout code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Import GPG key + id: import-gpg + uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6 + with: + gpg_private_key: ${{ secrets.HOTHER_BOT_GPG_KEY }} + passphrase: ${{ secrets.HOTHER_BOT_GPG_PASSPHRASE }} + git_user_signingkey: true + git_commit_gpgsign: true + git_tag_gpgsign: true + + - name: Configure Git + env: + GIT_EMAIL: ${{ steps.import-gpg.outputs.email }} + GIT_NAME: ${{ steps.import-gpg.outputs.name }} + run: | + git config user.email "$GIT_EMAIL" + git config user.name "$GIT_NAME" + git config core.autocrlf false + git config commit.gpgsign true + git config tag.gpgsign true + + - name: Setup Python and uv + uses: ./.github/actions/setup-python-uv + + - name: Python Semantic Release + id: release + uses: python-semantic-release/python-semantic-release@v9.15.3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + git_committer_name: ${{ steps.import-gpg.outputs.name }} + git_committer_email: ${{ steps.import-gpg.outputs.email }} + + - name: Publish to PyPI + if: steps.release.outputs.released == 'true' + uses: pypa/gh-action-pypi-publish@release/v1 + + - name: Publish GitHub Release + if: steps.release.outputs.released == 'true' + uses: python-semantic-release/publish-action@v9.15.3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + tag: ${{ steps.release.outputs.tag }} + + dispatch: + name: Dispatch to Package Registry + runs-on: ubuntu-latest + needs: release + if: needs.release.outputs.released == 'true' + + steps: + - name: Checkout code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 + with: + ref: ${{ needs.release.outputs.tag }} + + - name: Get package description + id: description + run: | + python -c "import tomllib; print(tomllib.load(open('pyproject.toml', 'rb'))['project']['description'])" > desc.txt + echo "value=$(cat desc.txt)" >> $GITHUB_OUTPUT + + - name: Dispatch to package registry + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + PKG_NAME: ${{ vars.PKG_NAME }} + PKG_REGISTRY: ${{ vars.PKG_REGISTRY }} + VERSION_TAG: ${{ needs.release.outputs.tag }} + REPO_OWNER: ${{ github.repository_owner }} + REPO_FULL: ${{ github.repository }} + DESCRIPTION: ${{ steps.description.outputs.value }} + run: | + curl -H "Accept: application/vnd.github.everest-preview+json" \ + -H "Authorization: token $GH_TOKEN" \ + --request POST \ + --data "{\"event_type\": \"new_release\", \"client_payload\": { \"package_name\": \"$PKG_NAME\", \"version\": \"$VERSION_TAG\", \"author\": \"$REPO_OWNER\", \"short_desc\": \"$DESCRIPTION\", \"long_desc\": \"$DESCRIPTION\", \"homepage\": \"https://github.com/$REPO_FULL/\", \"link\": \"https://github.com/$REPO_FULL/releases/tag/$VERSION_TAG\" }}" \ + "https://api.github.com/repos/$PKG_REGISTRY/dispatches" diff --git a/CLAUDE.md b/CLAUDE.md index 4f4e44f..c341fa3 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -395,14 +395,6 @@ Git hooks for: - **Plugins**: mkdocstrings, social cards, glightbox - **Watch paths**: src/, examples/, docs/, docs/.hooks/ -### `Makefile` -Convenience commands: -- `make install`: Setup dependencies -- `make test`: Run tests -- `make lint`: Run linting -- `make docs`: Serve documentation - - ## CI/CD The project uses GitHub Actions for CI/CD: @@ -416,12 +408,57 @@ The project uses GitHub Actions for CI/CD: ## Release Process -1. **Update version** in `pyproject.toml` -2. **Update CHANGELOG.md** with release notes -3. **Run full test suite**: `uv run pytest --cov` -4. **Build package**: `uv build` -5. **Test package locally**: `uv pip install dist/hother_cancelable-*.whl` -6. **Publish to PyPI**: `uv publish` -7. **Tag release**: `git tag v0.1.0 && git push --tags` -8. **Deploy docs**: `uv run mike deploy --update-aliases v0.1 latest` -9. **Create GitHub release** with changelog +This project uses **python-semantic-release** for fully automated versioning and releases. + +### How It Works + +Releases happen automatically when commits are pushed to `main`: +1. Commits are analyzed using conventional commit format +2. Version is automatically bumped based on commit types +3. CHANGELOG.md is updated +4. Git tag is created and pushed (GPG signed) +5. Package is built and published to PyPI (via Trusted Publishing) +6. GitHub Release is created +7. Documentation is deployed + +### Version Bumping Rules + +| Commit Type | Bump | Example | +|-------------|------|---------| +| `feat:` | Minor | 0.5.0 → 0.6.0 | +| `fix:`, `perf:`, `refactor:` | Patch | 0.5.0 → 0.5.1 | +| `feat!:`, `BREAKING CHANGE:` | Major | 0.5.0 → 1.0.0 | +| `docs:`, `chore:`, `ci:`, `style:`, `test:` | None | No release | + +### Local Version Preview + +```bash +# Check current version +grep 'version = ' pyproject.toml | cut -d'"' -f2 + +# Preview next version (on main branch) +uv run semantic-release --noop version --print +``` + +### Manual Release Trigger + +```bash +# Via GitHub UI: Actions → Semantic Release → Run workflow +# or via gh CLI: +gh workflow run semantic-release.yml +``` + +### Documentation Deployment + +Docs are automatically deployed on releases. Manual deployment: + +```bash +# Deploy specific version +uv run mike deploy --push --update-aliases v0.5 latest + +# Set default +uv run mike set-default latest + +# Serve locally +uv run mkdocs serve +``` diff --git a/Makefile b/Makefile index 54f7951..a8f48f4 100644 --- a/Makefile +++ b/Makefile @@ -94,11 +94,9 @@ docs-publish: # Publishes the documentation licenses: uvx --from pip-licenses==5.0.0 pip-licenses --from=mixed --order count -f $(FORMAT) --output-file licenses.$(FORMAT) -changelog: ### Generate full changelog - git-cliff -o CHANGELOG.md +version-check: ### Show current and next version + @echo "Current: $(shell grep 'version = ' pyproject.toml | cut -d'"' -f2)" + @echo "Next: $(shell uv run semantic-release --noop version --print 2>/dev/null || echo 'No release needed (must be on main branch)')" -changelog-unreleased: ### Preview unreleased changes - @git-cliff --unreleased - -changelog-tag: ### Get changelog for latest tag (for releases) - @git-cliff --latest --strip header +changelog-preview: ### Preview changelog for unreleased commits + @uv run semantic-release changelog diff --git a/README.md b/README.md index 2081f6d..773ac12 100644 --- a/README.md +++ b/README.md @@ -237,75 +237,112 @@ uv build ### Release process -This project uses Git tags for versioning with automatic semantic versioning based on conventional commits. Version numbers are automatically derived from Git tags using hatch-vcs. +This project uses [python-semantic-release](https://python-semantic-release.readthedocs.io/) for fully automated versioning and releases. Every commit to the `main` branch is analyzed using conventional commits, and releases are created automatically when needed. -#### Quick Release Commands +#### How It Works -```bash -# Check current version -hatch version +1. **Commit with conventional format** to the `main` branch +2. **GitHub Actions automatically** analyzes commits, determines version bump, creates tag, updates changelog, publishes to PyPI, and creates GitHub release +3. **Documentation** is automatically deployed when a release is published -# Create development release (v1.0.0 → v1.0.1-dev1) -hatch release dev +No manual intervention required! 🎉 -# Create release candidate (v1.0.1-dev1 → v1.0.1rc1) -hatch release rc +#### Version Bumping Rules -# Create final release (v1.0.1rc1 → v1.0.1) -hatch release final -``` +| Commit Type | Version Bump | Example | +|-------------|--------------|---------| +| `feat:` | Minor | 0.5.0 → 0.6.0 | +| `fix:`, `perf:`, `refactor:` | Patch | 0.5.0 → 0.5.1 | +| `feat!:`, `BREAKING CHANGE:` | Major | 0.5.0 → 1.0.0 | +| `docs:`, `chore:`, `ci:`, `style:`, `test:` | No release | - | -#### Release from Specific Commit +#### Conventional Commit Examples -You can optionally specify a commit SHA to create a release from: ```bash -# Release from a specific commit -hatch release dev abc123 -hatch release rc def456 -hatch release final 789xyz -``` +# Minor version bump (new feature) +git commit -m "feat: add streaming cancellation support" -The SHA must be: -- Reachable from HEAD (on current branch history) -- Not already included in a previous release +# Patch version bump (bug fix) +git commit -m "fix: resolve race condition in token cancellation" -#### How it Works +# Major version bump (breaking change) +git commit -m "feat!: redesign cancellation API -- **Development releases** (`dev`): Increments patch version and adds `-dev` suffix -- **Release candidates** (`rc`): Removes `-dev` and adds `rc` suffix -- **Final releases** (`final`): Uses git-cliff to analyze commits and automatically bumps major/minor/patch based on conventional commits +BREAKING CHANGE: CancellationToken.cancel() is now async" +``` + +#### Manual Release Trigger -The release process: -1. Analyzes commit history (for final releases) -2. Calculates the next version number -3. Creates and pushes the git tag -4. GitHub Actions automatically builds and publishes the release +If needed, you can manually trigger a release via GitHub Actions: -#### Manual Tagging (Advanced) +```bash +# Go to: Actions → Semantic Release → Run workflow → Run on main branch +``` -If needed, you can still create tags manually: +Or use the `gh` CLI: ```bash -# Manual tag creation -git tag -a v1.2.3 -m "Release v1.2.3" -git push origin v1.2.3 +gh workflow run semantic-release.yml ``` -### Changelog Management +#### Local Version Preview + +Check what the next version would be without making changes: -This project uses [git-cliff](https://git-cliff.org/) to automatically generate changelogs from conventional commits. +```bash +# Check current version +grep 'version = ' pyproject.toml | cut -d'"' -f2 +# Preview next version (requires being on main branch) +uv run semantic-release --noop version --print ``` -# Generate/update CHANGELOG.md -make changelog -# Preview unreleased changes -make changelog-unreleased +#### PyPI Trusted Publishing + +This project uses PyPI's Trusted Publishing for secure, token-free releases. The GitHub Actions workflow is automatically authorized to publish to PyPI via OIDC. + +**No API tokens needed!** The workflow authenticates using: +- Publisher: GitHub Actions +- Repository: `hotherio/cancelable` +- Workflow: `semantic-release.yml` + +#### Release Checklist for Maintainers -# Get changelog for latest tag (used in releases) -make changelog-tag +When preparing for a release: + +- [ ] Ensure all PRs use conventional commit format in titles +- [ ] Verify CI passes on main branch +- [ ] Commit messages follow conventional commits specification +- [ ] Breaking changes are documented in commit body with `BREAKING CHANGE:` +- [ ] Push to main or merge PR - release happens automatically! + +#### Changelog + +The changelog is automatically generated from conventional commits and updated on every release. View it at [CHANGELOG.md](CHANGELOG.md). + +### Documentation Deployment + +Documentation is automatically built and deployed when: +- A release is published (triggered by semantic-release) +- Changes are pushed to `docs/`, `mkdocs.yml`, or the workflow file on `main` + +Manual deployment commands: +```bash +# Deploy a specific version +uv run mike deploy --push --update-aliases v0.5 latest + +# Set default version +uv run mike set-default latest + +# List deployed versions +uv run mike list ``` -The changelog is automatically updated and included in GitHub releases when you push a version tag. +Check documentation locally: +```bash +uv run mkdocs serve +# or with mike +uv run mike serve +``` Generate the licenses: ``` diff --git a/docs/.hooks/main.py b/docs/.hooks/main.py index 7d5193e..ed8e603 100644 --- a/docs/.hooks/main.py +++ b/docs/.hooks/main.py @@ -72,7 +72,7 @@ def render_example_files(markdown: str) -> str: Markdown with #! directives replaced by code blocks """ # Pattern: #! followed by file path (at start of line) - pattern = r'^#!\s*(.+\.py)\s*$' + pattern = r"^#!\s*(.+\.py)\s*$" def replace_directive(match: re.Match) -> str: """Replace a single #! directive with rendered code.""" @@ -128,29 +128,29 @@ def create_package_manager_tabs(markdown: str) -> str: Markdown with tabbed package manager commands """ # Pattern: bash code blocks containing pip install or uv add - pattern = r'```bash\n((?:pip install|uv (?:add|pip install))[^\n]+)\n```' + pattern = r"```bash\n((?:pip install|uv (?:add|pip install))[^\n]+)\n```" def create_tabs(match: re.Match) -> str: """Create tabbed alternatives for a package manager command.""" command = match.group(1).strip() # Extract package name - if 'pip install' in command: - package = command.replace('pip install', '').strip() - elif 'uv add' in command: - package = command.replace('uv add', '').strip() - elif 'uv pip install' in command: - package = command.replace('uv pip install', '').strip() + if "pip install" in command: + package = command.replace("pip install", "").strip() + elif "uv add" in command: + package = command.replace("uv add", "").strip() + elif "uv pip install" in command: + package = command.replace("uv pip install", "").strip() else: # Don't modify if we can't parse it return match.group(0) # Skip if it's a complex command (contains && or other operators) - if any(op in command for op in ['&&', '||', ';', '|']): + if any(op in command for op in ["&&", "||", ";", "|"]): return match.group(0) # Generate tabbed interface (uv first as default) - return f'''=== "uv" + return f"""=== "uv" ```bash uv add {package} ``` @@ -158,7 +158,7 @@ def create_tabs(match: re.Match) -> str: === "pip" ```bash pip install {package} - ```''' + ```""" # Replace all package manager commands return re.sub(pattern, create_tabs, markdown, flags=re.MULTILINE) diff --git a/docs/.hooks/snippets.py b/docs/.hooks/snippets.py index 5ad72fe..60c8bf0 100644 --- a/docs/.hooks/snippets.py +++ b/docs/.hooks/snippets.py @@ -54,10 +54,10 @@ async def main(): """ # Pattern for section start: ### [section_name] - START_PATTERN = re.compile(r'^\s*###\s*\[(\w+)\]\s*$') + START_PATTERN = re.compile(r"^\s*###\s*\[(\w+)\]\s*$") # Pattern for section end: ### [/section_name] - END_PATTERN = re.compile(r'^\s*###\s*\[/(\w+)\]\s*$') + END_PATTERN = re.compile(r"^\s*###\s*\[/(\w+)\]\s*$") def __init__(self, file_path: Path): """Initialize extractor for a specific file. @@ -290,7 +290,7 @@ def replace_snippet(match: re.Match) -> str: # Parse sections list if provided sections_list = None if sections_str: - sections_list = [s.strip() for s in sections_str.split(',')] + sections_list = [s.strip() for s in sections_str.split(",")] return process_snippet_directive(file_path, section, sections_list) diff --git a/docs/.hooks/utils.py b/docs/.hooks/utils.py index 39fcec0..9bd7bee 100644 --- a/docs/.hooks/utils.py +++ b/docs/.hooks/utils.py @@ -41,7 +41,7 @@ def strip_leading_docstring(code: str) -> str: """ # Match triple-quoted strings at the start (with optional shebang/encoding) pattern = r'^(#!.*?\n)?(# -\*- coding:.*?\n)?(\s*"""[\s\S]*?"""\s*\n|\s*\'\'\'[\s\S]*?\'\'\'\s*\n)?' - return re.sub(pattern, r'\1\2', code, count=1) + return re.sub(pattern, r"\1\2", code, count=1) def format_code_block( @@ -73,9 +73,9 @@ def format_code_block( # Add source link if start_line and end_line: - result += f'_[View source on GitHub (lines {start_line}-{end_line})]({github_link})_' + result += f"_[View source on GitHub (lines {start_line}-{end_line})]({github_link})_" else: - result += f'_[View source on GitHub]({github_link})_' + result += f"_[View source on GitHub]({github_link})_" return result diff --git a/docs/releases.md b/docs/releases.md new file mode 100644 index 0000000..4debf86 --- /dev/null +++ b/docs/releases.md @@ -0,0 +1,513 @@ +# Release Automation Guide + +This guide documents the automated release process for the Cancelable project using python-semantic-release. + +## Overview + +The Cancelable project uses **fully automated semantic versioning**. Every commit to the `main` branch is analyzed, and when appropriate, a new version is automatically released. + +### Workflow Diagram + +``` +┌─────────────────┐ +│ Push to main │ +└────────┬────────┘ + │ + v +┌─────────────────┐ +│ Analyze commits │ ← python-semantic-release +│ (conventional) │ +└────────┬────────┘ + │ + v + ┌────┴────┐ + │ Release │ + │ needed? │ + └────┬────┘ + │ + ┌────┴────┐ + │ No │ ──→ [Stop] + │ │ + └─────────┘ + │ + ┌────┴────┐ + │ Yes │ + │ │ + └────┬────┘ + │ + v +┌─────────────────┐ +│ 1. Bump version │ ── Update pyproject.toml +│ 2. Update │ ── Update CHANGELOG.md +│ 3. Create tag │ ── Git tag (GPG signed) +│ 4. Build dist │ ── uv build +│ 5. Publish PyPI │ ── Trusted Publishing +│ 6. Create GH │ ── GitHub Release +│ Release │ +│ 7. Deploy docs │ ── mike deploy +└─────────────────┘ +``` + +## Conventional Commits + +### Format + +``` +[optional scope]: + +[optional body] + +[optional footer(s)] +``` + +### Commit Types + +| Type | Description | Version Bump | Example | +|------|-------------|--------------|---------| +| `feat` | New feature | Minor (0.5.0 → 0.6.0) | `feat: add signal-based cancellation` | +| `fix` | Bug fix | Patch (0.5.0 → 0.5.1) | `fix: resolve race condition in token` | +| `perf` | Performance improvement | Patch | `perf: optimize registry lookup` | +| `refactor` | Code refactoring | Patch | `refactor: simplify token linking` | +| `docs` | Documentation only | None | `docs: update README examples` | +| `chore` | Build/tooling changes | None | `chore: update dependencies` | +| `ci` | CI configuration | None | `ci: add coverage reporting` | +| `style` | Code style/formatting | None | `style: format with ruff` | +| `test` | Add/update tests | None | `test: add timeout source tests` | +| `revert` | Revert previous commit | None | `revert: "feat: add feature X"` | + +### Breaking Changes + +Breaking changes trigger a **major version bump** (0.5.0 → 1.0.0): + +**Option 1: Use `!` after type:** +```bash +git commit -m "feat!: redesign cancellation API + +The CancellationToken.cancel() method is now async. +Synchronous code should use cancel_sync() instead." +``` + +**Option 2: Use `BREAKING CHANGE:` footer:** +```bash +git commit -m "feat: redesign cancellation API + +BREAKING CHANGE: The CancellationToken.cancel() method is now +async. Synchronous code should use cancel_sync() instead." +``` + +### Examples + +**Feature (minor bump):** +```bash +git commit -m "feat: add condition-based cancellation source + +Adds ConditionSource that polls a predicate function and +cancels when it returns True. Useful for resource monitoring." +``` + +**Bug fix (patch bump):** +```bash +git commit -m "fix: prevent deadlock in cross-thread cancellation + +Ensures proper lock ordering when cancelling from different threads." +``` + +**Performance (patch bump):** +```bash +git commit -m "perf: cache compiled regex patterns in commit parser" +``` + +**Documentation (no release):** +```bash +git commit -m "docs: add FastAPI integration examples" +``` + +**Multiple changes (use highest priority):** +```bash +# This will trigger a minor bump (feat takes precedence) +git commit -m "feat: add new source type + +Also fixes minor bug in existing timeout source." +``` + +## Enforcement + +### Pre-commit Validation + +Commit messages are validated **before** commit using lefthook: + +```yaml +# lefthook.yml +commit-msg: + commands: + conventional: + run: | + if ! head -1 {1} | grep -qE '^(feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(\(.+?\))?: .{1,}$'; then + echo "❌ Commit message must follow Conventional Commits format" + exit 1 + fi +``` + +**Bypass if needed (not recommended):** +```bash +git commit --no-verify -m "wip: temporary commit" +``` + +### GitHub PR Title Check + +PR titles are also validated in CI to ensure they follow conventional commits. + +## Local Development + +### Preview Next Version + +Check what version would be released without making changes: + +```bash +# Must be on main branch +git checkout main +git pull + +# Preview next version +uv run semantic-release --noop version --print + +# With debug output +uv run semantic-release --noop --verbose version --print +``` + +**Example output:** +``` +0.6.0 # Next version would be 0.6.0 +``` + +### Preview Changelog + +Generate changelog for unreleased commits: + +```bash +uv run semantic-release changelog +``` + +### Test Configuration + +Validate semantic-release configuration: + +```bash +# Show configuration +uv run semantic-release generate-config + +# Check if release would happen +uv run semantic-release --noop version --print +``` + +## Manual Release Trigger + +While releases are automatic, you can manually trigger the workflow: + +### Via GitHub UI + +1. Go to **Actions** → **Semantic Release** +2. Click **Run workflow** +3. Select branch: `main` +4. Click **Run workflow** + +### Via GitHub CLI + +```bash +gh workflow run semantic-release.yml +``` + +## PyPI Trusted Publishing + +The project uses PyPI's **Trusted Publishing** for secure, token-free releases. + +### How It Works + +1. GitHub Actions workflow runs with `id-token: write` permission +2. GitHub provides OIDC token proving workflow identity +3. PyPI verifies token and authorizes publish +4. No API tokens stored or managed! + +### Configuration + +**On PyPI:** +- Publisher: GitHub Actions +- Owner: `hotherio` +- Repository: `cancelable` +- Workflow: `semantic-release.yml` +- Environment: (none) + +**In workflow:** +```yaml +permissions: + id-token: write # Required for Trusted Publishing + contents: write # Create tags and releases + +- name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + # No password/token needed! +``` + +### Setup for New Projects + +1. Create package on PyPI (one-time manual publish) +2. Go to PyPI → Project → Publishing +3. Add GitHub publisher: + - Owner: organization/user + - Repository: repo-name + - Workflow: semantic-release.yml +4. Save configuration +5. Future releases publish automatically! + +## Troubleshooting + +### No Release Created + +**Problem:** Pushed to main, but no release was created. + +**Possible causes:** + +1. **No releasable commits since last release:** + ```bash + # Check commits since last tag + git log $(git describe --tags --abbrev=0)..HEAD --oneline + + # Look for feat:, fix:, perf:, or refactor: commits + ``` + + **Solution:** Only `feat`, `fix`, `perf`, and `refactor` trigger releases. + +2. **Branch not configured for releases:** + ```bash + # Verify you're on main + git branch --show-current + ``` + + **Solution:** Releases only happen from `main` branch. + +3. **Invalid commit format:** + ```bash + # Check recent commits + git log --oneline -5 + ``` + + **Solution:** Ensure commits follow conventional commits format. + +### Version Conflict + +**Problem:** `Version x.y.z already exists on PyPI` + +**Cause:** Tag exists but PyPI publish failed previously. + +**Solution:** +```bash +# Option 1: Delete local and remote tag, create new commit +git tag -d vX.Y.Z +git push origin :refs/tags/vX.Y.Z + +# Make a small change +git commit --allow-empty -m "chore: trigger new release" +git push + +# Option 2: Manually publish to PyPI +uv build +uv publish +``` + +### PyPI Upload Fails + +**Problem:** `Error: Trusted publishing exchange failure` + +**Possible causes:** + +1. **Publisher not configured on PyPI:** + - Go to PyPI project settings → Publishing + - Add GitHub publisher with correct details + +2. **Workflow permissions incorrect:** + ```yaml + # Verify in .github/workflows/semantic-release.yml + permissions: + id-token: write # Must be present + ``` + +3. **Wrong workflow name:** + - PyPI publisher must match exact workflow filename + - Default: `semantic-release.yml` + +### Tags Not Pushed + +**Problem:** Release created but tag not visible on GitHub. + +**Cause:** GPG signing configuration issue. + +**Solution:** +```bash +# Check workflow logs for GPG errors +gh run list --workflow=semantic-release.yml --limit 1 +gh run view --log + +# Verify secrets are set +gh secret list + +# Required secrets: +# - HOTHER_BOT_GPG_KEY +# - HOTHER_BOT_GPG_PASSPHRASE +``` + +### Documentation Not Deployed + +**Problem:** Release succeeded but docs not updated. + +**Cause:** Docs workflow depends on `release.published` event. + +**Check:** +```bash +# Verify docs.yaml uses correct event +grep -A 3 "release:" .github/workflows/docs.yaml + +# Should show: +# release: +# types: +# - published +``` + +**Manual deploy:** +```bash +uv sync --group doc +uv run mike deploy --push --update-aliases v0.5 latest +``` + +## Migration from git-cliff + +### What Changed + +| Before (git-cliff) | After (PSR) | +|-------------------|-------------| +| Manual trigger via GitHub Actions | Automatic on push to main | +| Three-stage releases (dev/rc/final) | Single semantic release | +| `tools/release.py` custom script | python-semantic-release | +| `cliff.toml` configuration | `pyproject.toml` configuration | +| Manual version bumping | Automatic from commits | +| Separate changelog generation | Integrated changelog | + +### Removed Files + +- `tools/release.py` - Custom release script +- `cliff.toml` - git-cliff configuration +- `.github/workflows/bump.yml` - Manual bump workflow +- `.github/workflows/release.yml` - Tag-triggered release + +### Preserved + +- **Conventional commits** - Still required (now enforced) +- **GPG signing** - Tags and commits still signed +- **CHANGELOG.md** - Still auto-generated +- **PyPI publishing** - Now via Trusted Publishing +- **Documentation deployment** - Still automatic + +## Best Practices + +### Commit Messages + +1. **Write clear, descriptive messages:** + ```bash + # Good + feat: add timeout parameter to Cancelable context + + # Bad + feat: add timeout + ``` + +2. **Include context in body:** + ```bash + feat: add condition-based cancellation + + Allows cancellation based on custom predicates that are + polled periodically. Useful for resource monitoring and + external stop signals. + ``` + +3. **Document breaking changes:** + ```bash + feat!: make CancellationToken.cancel() async + + BREAKING CHANGE: The cancel() method is now async and must + be awaited. Synchronous callers should use cancel_sync() + instead. + + Migration guide: + - Old: token.cancel() + - New: await token.cancel() # async context + - New: token.cancel_sync() # sync context + ``` + +### Pull Requests + +1. **Use conventional format in PR title:** + ``` + feat: add feature X + fix: resolve bug Y + docs: update guide Z + ``` + +2. **PR title becomes the commit message** when squash merging + +3. **Include breaking changes in PR description** if applicable + +### Versioning Strategy + +- **0.x.y**: Pre-1.0 development (current) + - Breaking changes allowed on minor bumps + - Set `major_on_zero = false` in config + +- **1.x.y**: Stable releases (future) + - Breaking changes require major bump + - Set `major_on_zero = true` when ready + +## Advanced Configuration + +### Custom Commit Types + +To add custom commit types, edit `pyproject.toml`: + +```toml +[tool.semantic_release.commit_parser_options] +allowed_tags = [ + "build", "chore", "ci", "docs", "feat", "fix", + "perf", "refactor", "style", "test", "revert", + "custom", # Add custom type +] +minor_tags = ["feat", "custom"] # Types that trigger minor bump +patch_tags = ["fix", "perf", "refactor"] # Types that trigger patch bump +``` + +### Changelog Template + +Custom changelog templates can be added in `templates/` directory: + +```bash +# Project structure +templates/ + └── CHANGELOG.md.j2 +``` + +### Version Variables + +Access version in other files using template variables: + +```toml +[tool.semantic_release] +version_toml = [ + "pyproject.toml:project.version", + "src/hother/cancelable/__init__.py:__version__", +] +``` + +## Reference + +- [python-semantic-release Documentation](https://python-semantic-release.readthedocs.io/) +- [Conventional Commits Specification](https://www.conventionalcommits.org/) +- [Semantic Versioning](https://semver.org/) +- [PyPI Trusted Publishing](https://docs.pypi.org/trusted-publishers/) +- [Keep a Changelog](https://keepachangelog.com/) diff --git a/examples/01_basics/02_timeout_cancelation.py b/examples/01_basics/02_timeout_cancelation.py index 497a2d6..fa5ab3e 100644 --- a/examples/01_basics/02_timeout_cancelation.py +++ b/examples/01_basics/02_timeout_cancelation.py @@ -29,11 +29,11 @@ async def main() -> None: except anyio.get_cancelled_exc_class(): if cancel: - print(f" Operation timed out after {(cancel.context.duration.total_seconds() if cancel.context.duration else 0.0):.2f}s") - print(f" Final status: {cancel.context.status.value}") print( - f" Cancel reason: {cancel.context.cancel_reason.value if cancel.context.cancel_reason else 'unknown'}" + f" Operation timed out after {(cancel.context.duration.total_seconds() if cancel.context.duration else 0.0):.2f}s" ) + print(f" Final status: {cancel.context.status.value}") + print(f" Cancel reason: {cancel.context.cancel_reason.value if cancel.context.cancel_reason else 'unknown'}") # --8<-- [end:example] diff --git a/examples/02_advanced/01_combined_cancelation.py b/examples/02_advanced/01_combined_cancelation.py index b0c954d..31ce802 100644 --- a/examples/02_advanced/01_combined_cancelation.py +++ b/examples/02_advanced/01_combined_cancelation.py @@ -69,7 +69,9 @@ async def main(): print("Token cancel call completed") except asyncio.CancelledError: print(" Operation was cancelled") - print(f" Reason: {final_cancellable.context.cancel_reason.value if final_cancellable.context.cancel_reason else 'unknown'}") + print( + f" Reason: {final_cancellable.context.cancel_reason.value if final_cancellable.context.cancel_reason else 'unknown'}" + ) print(f" Message: {final_cancellable.context.cancel_message or 'no message'}") # --8<-- [end:example] diff --git a/examples/02_advanced/07_condition_cancelation.py b/examples/02_advanced/07_condition_cancelation.py index b5ba10a..c7c26a0 100644 --- a/examples/02_advanced/07_condition_cancelation.py +++ b/examples/02_advanced/07_condition_cancelation.py @@ -42,9 +42,7 @@ async def create_stop_file_after_delay(): async with asyncio.TaskGroup() as tg: tg.create_task(create_stop_file_after_delay()) - async with Cancelable.with_condition( - check_stop_file, condition_name="file_exists", name="file_watcher" - ) as cancel: + async with Cancelable.with_condition(check_stop_file, condition_name="file_exists", name="file_watcher") as cancel: print(f" Started file watcher: {cancel.context.id}") print(" Waiting for stop file to appear...") @@ -83,9 +81,7 @@ def check_db_error(): return bool(db_state.get("error", False)) # Create condition-based cancelables - complete_cancel = Cancelable.with_condition( - check_db_complete, condition_name="db_complete", name="complete_monitor" - ) + complete_cancel = Cancelable.with_condition(check_db_complete, condition_name="db_complete", name="complete_monitor") error_cancel = Cancelable.with_condition(check_db_error, condition_name="db_error", name="error_monitor") # Combine conditions (cancel if either complete or error) @@ -140,9 +136,7 @@ def check_quiet_period(): return system_state["active_connections"] < 10 and system_state["cpu_usage"] < 20.0 # Create condition-based cancelables - health_cancel = Cancelable.with_condition( - check_system_health, condition_name="system_health", name="health_monitor" - ) + health_cancel = Cancelable.with_condition(check_system_health, condition_name="system_health", name="health_monitor") quiet_cancel = Cancelable.with_condition(check_quiet_period, condition_name="quiet_period", name="quiet_monitor") # Cancel if system becomes unhealthy OR enters quiet period @@ -193,9 +187,7 @@ def check_target_reached(): return bool(progress_state["target_reached"]) # Create sources - condition_cancel = Cancelable.with_condition( - check_target_reached, condition_name="target_check", name="target_monitor" - ) + condition_cancel = Cancelable.with_condition(check_target_reached, condition_name="target_check", name="target_monitor") timeout_cancel = Cancelable.with_timeout(5.0, name="processing_timeout") # Combine condition and timeout diff --git a/examples/02_advanced/08_signal_handling.py b/examples/02_advanced/08_signal_handling.py index cd7af6f..0ab0c08 100644 --- a/examples/02_advanced/08_signal_handling.py +++ b/examples/02_advanced/08_signal_handling.py @@ -204,4 +204,4 @@ def custom_sigusr1_handler(signum: int, frame: Any) -> None: print("Run with: python examples/02_advanced/08_signal_handling.py") print() - asyncio.run(main()) \ No newline at end of file + asyncio.run(main()) diff --git a/examples/02_advanced/09_all_of_combining.py b/examples/02_advanced/09_all_of_combining.py index 7431e36..66cd402 100644 --- a/examples/02_advanced/09_all_of_combining.py +++ b/examples/02_advanced/09_all_of_combining.py @@ -78,9 +78,7 @@ async def batch_processor_with_requirements(): elapsed = time.time() - start_time print("\n✅ Batch processing completed!") print(f"Final: {items_processed} items processed in {elapsed:.1f}s") - print( - f"Requirements met: Time={elapsed >= 60}, Items={items_processed >= 100}" - ) + print(f"Requirements met: Time={elapsed >= 60}, Items={items_processed >= 100}") async def demonstration_fast_items_slow_time(): @@ -100,9 +98,7 @@ async def demonstration_fast_items_slow_time(): print("Should continue for full 30 seconds\n") min_time = TimeoutSource(timeout=30.0) - min_items = ConditionSource( - condition=lambda: items_processed >= 100, check_interval=0.5 - ) + min_items = ConditionSource(condition=lambda: items_processed >= 100, check_interval=0.5) all_of = AllOfSource([min_time, min_items]) cancelable = Cancelable(name="fast_items") @@ -124,10 +120,7 @@ async def demonstration_fast_items_slow_time(): except anyio.get_cancelled_exc_class(): elapsed = time.time() - start_time - print( - f"\n✅ Completed: {items_processed} items in {elapsed:.1f}s " - f"(waited for time requirement)" - ) + print(f"\n✅ Completed: {items_processed} items in {elapsed:.1f}s " f"(waited for time requirement)") async def demonstration_slow_items_fast_time(): @@ -147,9 +140,7 @@ async def demonstration_slow_items_fast_time(): print("Should continue until 50 items processed\n") min_time = TimeoutSource(timeout=10.0) - min_items = ConditionSource( - condition=lambda: items_processed >= 50, check_interval=0.5 - ) + min_items = ConditionSource(condition=lambda: items_processed >= 50, check_interval=0.5) all_of = AllOfSource([min_time, min_items]) cancelable = Cancelable(name="slow_items") @@ -171,10 +162,7 @@ async def demonstration_slow_items_fast_time(): except anyio.get_cancelled_exc_class(): elapsed = time.time() - start_time - print( - f"\n✅ Completed: {items_processed} items in {elapsed:.1f}s " - f"(waited for item requirement)" - ) + print(f"\n✅ Completed: {items_processed} items in {elapsed:.1f}s " f"(waited for item requirement)") async def main(): diff --git a/examples/02_advanced/09_resource_monitoring.py b/examples/02_advanced/09_resource_monitoring.py index e431035..cb0c95d 100644 --- a/examples/02_advanced/09_resource_monitoring.py +++ b/examples/02_advanced/09_resource_monitoring.py @@ -28,6 +28,7 @@ # Check if psutil is available try: import psutil + _psutil = psutil _has_psutil = True except ImportError: @@ -264,9 +265,7 @@ async def main(): print("Starting data export with conservative resource monitoring") print("Thresholds: Memory 75%, CPU 85%, Disk 90%") resources = get_current_resources() - print( - f"Current: Memory {resources['memory']:.1f}%, CPU {resources['cpu']:.1f}%, Disk {resources['disk']:.1f}%\n" - ) + print(f"Current: Memory {resources['memory']:.1f}%, CPU {resources['cpu']:.1f}%, Disk {resources['disk']:.1f}%\n") try: async with cancelable: diff --git a/examples/03_integrations/06_retry_tenacity.py b/examples/03_integrations/06_retry_tenacity.py index 71f743f..4d34c6d 100644 --- a/examples/03_integrations/06_retry_tenacity.py +++ b/examples/03_integrations/06_retry_tenacity.py @@ -81,9 +81,7 @@ async def example_wrap_with_tenacity(): wrapped_op = cancel.wrap(unreliable_operation) # Use Tenacity for retry logic - async for attempt in AsyncRetrying( - stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, min=1, max=10) - ): + async for attempt in AsyncRetrying(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, min=1, max=10)): with attempt: # Report progress attempt_num = attempt.retry_state.attempt_number @@ -275,9 +273,7 @@ async def example_progress_tracking(): async for attempt in AsyncRetrying(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1)): with attempt: attempt_num = attempt.retry_state.attempt_number - await cancel.report_progress( - f"📊 Attempt {attempt_num}", {"attempt": attempt_num, "stage": "before_execution"} - ) + await cancel.report_progress(f"📊 Attempt {attempt_num}", {"attempt": attempt_num, "stage": "before_execution"}) result = await wrapped_op(400) diff --git a/examples/04_streams/01_stream_processing.py b/examples/04_streams/01_stream_processing.py index ac42d93..1d2061e 100644 --- a/examples/04_streams/01_stream_processing.py +++ b/examples/04_streams/01_stream_processing.py @@ -172,9 +172,7 @@ async def transform_pipeline(stream: AsyncIterator[dict]) -> AsyncIterator[dict] yield enriched # Process with pipeline - cancelable = Cancelable.with_timeout(5.0, name="transform_pipeline").on_progress( - lambda op_id, msg, meta: print(f" {msg}") - ) + cancelable = Cancelable.with_timeout(5.0, name="transform_pipeline").on_progress(lambda op_id, msg, meta: print(f" {msg}")) async with cancelable: comfort_stats = {"optimal": 0, "good": 0, "poor": 0} diff --git a/examples/05_monitoring/01_monitoring_dashboard.py b/examples/05_monitoring/01_monitoring_dashboard.py index e61c7fa..46609b1 100644 --- a/examples/05_monitoring/01_monitoring_dashboard.py +++ b/examples/05_monitoring/01_monitoring_dashboard.py @@ -237,7 +237,9 @@ async def simulated_data_processing( processed += batch if cancelable: - await cancelable.report_progress(f"Processed {processed}/{record_count} records", {"progress_percent": (processed / record_count) * 100}) + await cancelable.report_progress( + f"Processed {processed}/{record_count} records", {"progress_percent": (processed / record_count) * 100} + ) return {"dataset_id": dataset_id, "records_processed": processed} @@ -261,7 +263,9 @@ async def simulated_file_download( progress = (downloaded / size_mb) * 100 if cancelable: - await cancelable.report_progress(f"Downloading: {progress:.1f}%", {"downloaded_mb": downloaded, "total_mb": size_mb}) + await cancelable.report_progress( + f"Downloading: {progress:.1f}%", {"downloaded_mb": downloaded, "total_mb": size_mb} + ) return {"file_id": file_id, "size_mb": size_mb} diff --git a/examples/07_llm/01_llm_streaming.py b/examples/07_llm/01_llm_streaming.py index 286815c..39dd6a6 100644 --- a/examples/07_llm/01_llm_streaming.py +++ b/examples/07_llm/01_llm_streaming.py @@ -112,9 +112,7 @@ async def stream_with_cancelation(prompt: str, token: CancelationToken, conversa print("STREAMING OUTPUT:") print(f"{'='*70}\n") - async for chunk in await client.aio.models.generate_content_stream( - model="gemini-2.0-flash-exp", contents=contents - ): + async for chunk in await client.aio.models.generate_content_stream(model="gemini-2.0-flash-exp", contents=contents): # Get chunk text if not chunk.text: continue diff --git a/lefthook.yml b/lefthook.yml index 9300532..8abc37e 100644 --- a/lefthook.yml +++ b/lefthook.yml @@ -111,11 +111,12 @@ commit-msg: # Validate commit message format check-commit-msg: run: | - commit_regex='^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?: .+' + commit_regex='^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?!?: .+' if ! grep -qE "$commit_regex" {1}; then echo "❌ Commit message must follow conventional commits format!" echo "" echo "Format: : OR (): " + echo " !: for breaking changes" echo "" echo "Types:" echo " feat: New feature" @@ -133,5 +134,6 @@ commit-msg: echo "Examples:" echo " feat: add user authentication" echo " fix(auth): resolve login timeout issue" + echo " feat!: breaking API change" exit 1 fi diff --git a/pyproject.toml b/pyproject.toml index b36f3ee..e1d4859 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "hother-cancelable" -dynamic = ["version"] +version = "0.5.1" description = "A comprehensive async cancellation system for Python streams" readme = "README.md" authors = [{ name = "Alexandre Quemy", email="alexandre@hother.io" }] @@ -62,7 +62,6 @@ include = [ "CHANGELOG.md", "LICENSE", "README.md", - "cliff.toml", "lefthook.yml", "pyproject.toml", ] @@ -79,12 +78,13 @@ dev = [ "coverage[toml]>=7.10.7", "pretty_errors>=1.2.25", "ruff>=0.8.6", - "basedpyright>=1.23.0", + "basedpyright>=1.36.0", "twine>=6.0.0", "lefthook>=1.13.0", "pyyaml>=6.0.2", "git-cliff>=2.7.0", "detect-secrets>=1.5.0", + "python-semantic-release>=9.15.3", ] doc = [ "mkdocs==1.6.1", @@ -140,6 +140,7 @@ ignore = [ "D104", # Missing docstring in public package "D105", # Missing docstring in magic method "D107", # Missing docstring in __init__ + "ISC001", # Conflicts with formatter ] mccabe = { max-complexity = 15 } @@ -153,8 +154,18 @@ combine-as-imports = true convention = "google" [tool.ruff.lint.per-file-ignores] -"tests/**/*.py" = ["D", "PLR2004"] # Ignore docstrings and magic values in tests -"examples/**/*.py" = ["D101", "D103"] +"tests/**/*.py" = ["D", "PLR2004", "S101", "S110", "E722", "F401", "F811", "B023", "SIM117", "PLR0124"] # Comprehensive test ignores +"examples/**/*.py" = ["ALL"] # Examples are demonstration code, ignore all linting rules +"docs/.hooks/**/*.py" = ["ALL"] # Ignore all rules in docs hooks +"src/**/*.py" = ["D200", "D212", "D415", "D301"] # Ignore minor docstring formatting in src +"src/hother/cancelable/sources/condition.py" = ["S101"] # Allow assert for defensive programming +"src/hother/cancelable/streaming/**/*.py" = ["S311"] # Allow random in streaming simulators +"src/hother/cancelable/utils/decorators.py" = ["S101"] # Allow assert False for unreachable code markers +"src/hother/cancelable/core/exceptions.py" = ["N818"] # Exception naming convention (Cancelation vs CancelationError) is intentional +"src/hother/cancelable/core/cancelable.py" = ["PLR0912"] # Allow _determine_final_status to have multiple branches for exception handling +"src/hother/cancelable/integrations/fastapi.py" = ["B904"] # HTTPException raising patterns are intentional +"src/hother/cancelable/utils/testing.py" = ["B904"] # AssertionError raising in test utilities is intentional +"tools/**/*.py" = ["S602", "S110", "D"] # Allow subprocess shell=True, try-except-pass, and skip docstrings in internal tools # Format settings [tool.ruff.format] @@ -175,6 +186,7 @@ include = [ exclude = [ "tests", "examples", + "src/hother/cancelable/integrations/fastapi.py", # Optional dependency ] venvPath = "." venv = ".venv" @@ -197,7 +209,12 @@ filterwarnings = [ [tool.coverage.run] source = ["src/hother/cancelable"] -omit = ["*/tests/*", "*/__init__.py", "*/_version.py"] +omit = [ + "*/tests/*", + "*/__init__.py", + "*/_version.py", + "*/integrations/fastapi.py", # Optional dependency +] branch = true concurrency = ["multiprocessing", "thread"] # Use subdirectory for coverage data to keep root clean @@ -244,7 +261,29 @@ exclude_lines = [ [tool.hatch.envs.default] installer = "uv" -[tool.hatch.envs.default.scripts] -release = "python tools/release.py {args}" +[tool.semantic_release] +version_toml = ["pyproject.toml:project.version"] +branch = "main" +upload_to_vcs_release = true +upload_to_pypi = true +build_command = "uv build" +tag_format = "v{version}" +major_on_zero = false +allow_zero_version = true +changelog_file = "CHANGELOG.md" + +[tool.semantic_release.commit_parser_options] +allowed_tags = [ + "build", "chore", "ci", "docs", "feat", "fix", + "perf", "refactor", "style", "test", "revert" +] +minor_tags = ["feat"] +patch_tags = ["fix", "perf", "refactor"] + +[tool.semantic_release.changelog] +exclude_commit_patterns = [ + "^chore\\(release\\):", + "^Merge pull request", +] diff --git a/src/hother/cancelable/__init__.py b/src/hother/cancelable/__init__.py index f1c5c01..10d0c08 100644 --- a/src/hother/cancelable/__init__.py +++ b/src/hother/cancelable/__init__.py @@ -1,10 +1,11 @@ -""" -Cancelable - Async Cancelation System for Python Streams +"""Cancelable - Async Cancelation System for Python Streams A comprehensive, production-ready cancelation system for async operations with support for timeouts, signals, conditions, and manual cancelation. """ +import importlib.metadata + from .core.cancelable import Cancelable, current_operation from .core.exceptions import ( CancelationError, @@ -17,6 +18,7 @@ from .core.models import CancelationReason, OperationContext, OperationStatus from .core.registry import OperationRegistry from .core.token import CancelationToken +from .sources.composite import AllOfSource, AnyOfSource, CompositeSource from .types import ( ErrorCallback, ErrorCallbackType, @@ -27,9 +29,6 @@ ensure_cancelable, ) from .utils.anyio_bridge import AnyioBridge, call_soon_threadsafe -from .utils.threading_bridge import ThreadSafeRegistry -import importlib.metadata - from .utils.decorators import ( cancelable, cancelable_combine, @@ -40,7 +39,7 @@ with_timeout, ) from .utils.streams import cancelable_stream -from .sources.composite import AllOfSource, AnyOfSource, CompositeSource +from .utils.threading_bridge import ThreadSafeRegistry try: __version__ = importlib.metadata.version("hother-cancelable") diff --git a/src/hother/cancelable/core/__init__.py b/src/hother/cancelable/core/__init__.py index 651dc0d..fdfe662 100644 --- a/src/hother/cancelable/core/__init__.py +++ b/src/hother/cancelable/core/__init__.py @@ -1,5 +1,4 @@ -""" -Core components of the async cancellation system. +"""Core components of the async cancellation system. This module provides the fundamental building blocks for cancellable async operations: diff --git a/src/hother/cancelable/core/cancelable.py b/src/hother/cancelable/core/cancelable.py index 67345fb..5bb9362 100644 --- a/src/hother/cancelable/core/cancelable.py +++ b/src/hother/cancelable/core/cancelable.py @@ -1,6 +1,4 @@ -""" -Main Cancelable class implementation. -""" +"""Main Cancelable class implementation.""" from __future__ import annotations @@ -30,9 +28,10 @@ R = TypeVar("R") # Context variable for current operation -_current_operation: contextvars.ContextVar[Cancelable | None] = contextvars.ContextVar( - "current_operation", default=None -) +_current_operation: contextvars.ContextVar[Cancelable | None] = contextvars.ContextVar("current_operation", default=None) + +# Maximum items to keep in buffer to prevent unbounded memory growth +_MAX_BUFFER_SIZE = 1000 class LinkState(StrEnum): @@ -45,8 +44,7 @@ class LinkState(StrEnum): class Cancelable: - """ - Main cancelation helper with composable cancelation sources. + """Main cancelation helper with composable cancelation sources. Provides a unified interface for handling cancelation from multiple sources including timeouts, tokens, signals, and conditions. @@ -60,8 +58,7 @@ def __init__( metadata: dict[str, Any] | None = None, register_globally: bool = False, ): - """ - Initialize a new cancelable operation. + """Initialize a new cancelable operation. Args: operation_id: Unique operation identifier (auto-generated if not provided) @@ -123,8 +120,7 @@ def __init__( @property def token(self) -> LinkedCancelationToken: - """ - Get the cancellation token for this operation. + """Get the cancellation token for this operation. Returns: The LinkedCancelationToken managing this operation's cancellation state. @@ -132,8 +128,7 @@ def token(self) -> LinkedCancelationToken: return self._token def add_source(self, source: CancelationSource) -> Cancelable: - """ - Add a cancelation source to this operation. + """Add a cancelation source to this operation. This allows adding custom or composite sources (like AllOfSource) to an existing Cancelable instance. @@ -161,8 +156,7 @@ def add_source(self, source: CancelationSource) -> Cancelable: def with_timeout( cls, timeout: float | timedelta, operation_id: str | None = None, name: str | None = None, **kwargs: Any ) -> Cancelable: - """ - Create cancelable with timeout. + """Create cancelable with timeout. Args: timeout: Timeout duration in seconds or timedelta @@ -173,7 +167,7 @@ def with_timeout( Returns: Configured Cancelable instance """ - from ..sources.timeout import TimeoutSource + from hother.cancelable.sources.timeout import TimeoutSource if isinstance(timeout, timedelta): timeout = timeout.total_seconds() @@ -186,8 +180,7 @@ def with_timeout( def with_token( cls, token: CancelationToken, operation_id: str | None = None, name: str | None = None, **kwargs: Any ) -> Cancelable: - """ - Create a Cancelable operation using an existing cancellation token. + """Create a Cancelable operation using an existing cancellation token. This factory method allows you to create a cancellable operation that shares a cancellation token with other operations, enabling coordinated cancellation. @@ -224,11 +217,8 @@ def with_token( return instance @classmethod - def with_signal( - cls, *signals: int, operation_id: str | None = None, name: str | None = None, **kwargs: Any - ) -> Cancelable: - """ - Create cancelable with signal handling. + def with_signal(cls, *signals: int, operation_id: str | None = None, name: str | None = None, **kwargs: Any) -> Cancelable: + """Create cancelable with signal handling. Args: *signals: Signal numbers to handle @@ -239,7 +229,7 @@ def with_signal( Returns: Configured Cancelable instance """ - from ..sources.signal import SignalSource + from hother.cancelable.sources.signal import SignalSource instance = cls(operation_id=operation_id, name=name or "signal_based", **kwargs) instance._sources.append(SignalSource(*signals)) @@ -255,8 +245,7 @@ def with_condition( name: str | None = None, **kwargs: Any, ) -> Cancelable: - """ - Create cancelable with condition checking. + """Create cancelable with condition checking. Args: condition: Callable that returns True when cancelation should occur @@ -269,7 +258,7 @@ def with_condition( Returns: Configured Cancelable instance """ - from ..sources.condition import ConditionSource + from hother.cancelable.sources.condition import ConditionSource instance = cls(operation_id=operation_id, name=name or "condition_based", **kwargs) instance._sources.append(ConditionSource(condition, check_interval, condition_name)) @@ -277,8 +266,7 @@ def with_condition( # Composition def combine(self, *others: Cancelable) -> Cancelable: - """ - Combine multiple Cancelable operations into a single coordinated operation. + """Combine multiple Cancelable operations into a single coordinated operation. Creates a new Cancelable that will be cancelled if ANY of the combined operations is cancelled. All cancellation sources from the combined @@ -348,8 +336,7 @@ def on_progress( self, callback: ProgressCallbackType, ) -> Cancelable: - """ - Register a callback to be invoked when progress is reported. + """Register a callback to be invoked when progress is reported. The callback will be called whenever `report_progress()` is invoked on this operation. Both sync and async callbacks are supported. @@ -377,8 +364,7 @@ def on_progress( return self def on_start(self, callback: StatusCallbackType) -> Cancelable: - """ - Register a callback to be invoked when the operation starts. + """Register a callback to be invoked when the operation starts. The callback is triggered when entering the async context (on `__aenter__`). @@ -392,8 +378,7 @@ def on_start(self, callback: StatusCallbackType) -> Cancelable: return self def on_complete(self, callback: StatusCallbackType) -> Cancelable: - """ - Register a callback to be invoked when the operation completes successfully. + """Register a callback to be invoked when the operation completes successfully. The callback is triggered when exiting the context without cancellation or error. @@ -407,8 +392,7 @@ def on_complete(self, callback: StatusCallbackType) -> Cancelable: return self def on_cancel(self, callback: StatusCallbackType) -> Cancelable: - """ - Register a callback to be invoked when the operation is cancelled. + """Register a callback to be invoked when the operation is cancelled. The callback is triggered when the operation is cancelled by any source (timeout, signal, token, condition, or parent cancellation). @@ -426,8 +410,7 @@ def on_error( self, callback: ErrorCallbackType, ) -> Cancelable: - """ - Register a callback to be invoked when the operation encounters an error. + """Register a callback to be invoked when the operation encounters an error. The callback is triggered when an exception (other than CancelledError) is raised within the operation context. @@ -444,8 +427,7 @@ def on_error( # Progress reporting async def report_progress(self, message: Any, metadata: dict[str, Any] | None = None) -> None: - """ - Report progress to all registered callbacks. + """Report progress to all registered callbacks. Args: message: Progress message @@ -465,8 +447,7 @@ async def report_progress(self, message: Any, metadata: dict[str, Any] | None = ) async def check_cancelation(self) -> None: - """ - Check if operation is cancelled and raise if so. + """Check if operation is cancelled and raise if so. This is a public API for checking cancellation state. Use this instead of accessing `_token` directly. @@ -474,7 +455,7 @@ async def check_cancelation(self) -> None: Raises: anyio.CancelledError: If operation is cancelled """ - await self._token.check_async() + await self._token.check_async() # pragma: no cover # Context manager async def __aenter__(self) -> Cancelable: @@ -503,16 +484,13 @@ async def __aenter__(self) -> Cancelable: # Set up simple token monitoring via callback async def on_token_cancel(token: CancelationToken) -> None: """Callback when token is cancelled.""" - logger.error( - f"🚨 TOKEN CALLBACK TRIGGERED! Token {token.id} cancelled, cancelling scope for {self.context.id}" - ) + logger.error(f"🚨 TOKEN CALLBACK TRIGGERED! Token {token.id} cancelled, cancelling scope for {self.context.id}") if self._scope and not self._scope.cancel_called: logger.error(f"🚨 CANCELLING SCOPE for {self.context.id}") self._scope.cancel() else: - logger.error( - f"🚨 SCOPE ALREADY CANCELLED OR NONE for {self.context.id} (scope={self._scope}, cancel_called={self._scope.cancel_called if self._scope else 'N/A'})" - ) + scope_info = f"scope={self._scope}, cancel_called={self._scope.cancel_called if self._scope else 'N/A'}" + logger.error(f"🚨 SCOPE ALREADY CANCELLED OR NONE for {self.context.id} ({scope_info})") logger.debug(f"Registering token callback for token {self._token.id}") await self._token.register_callback(on_token_cancel) @@ -536,8 +514,7 @@ def parent(self) -> Cancelable | None: return self._parent_ref() if self._parent_ref else None async def run_in_thread(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T: - """ - Run function in thread with proper context propagation. + """Run function in thread with proper context propagation. This method solves the context variable thread safety issue by ensuring that context variables (including _current_operation) are properly @@ -583,120 +560,145 @@ def __del__(self): self._parent_ref = None self._children.clear() - async def __aexit__( + def _handle_scope_exit( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any | None, ) -> bool: - """Exit cancelation context.""" - logger.debug(f"=== ENTERING __aexit__ for {self.context.id} ===") - logger.debug(f"exc_type: {exc_type}, exc_val: {exc_val}") - logger.debug(f"Current status: {self.context.status}") - logger.debug(f"Current cancel_reason: {self.context.cancel_reason}") + """Handle anyio scope exit. - try: - # Exit the scope first - sync operation - _scope_handled = False - if self._scope: - try: - # scope.__exit__ returns True if it handled the exception - _scope_handled = self._scope.__exit__(exc_type, exc_val, exc_tb) - except Exception as e: - logger.debug(f"Scope exit raised: {e}") - # Re-raise the exception from scope exit - raise - - # Determine final status based on the exception - # We need to update status even if scope handled it, because the exception might still propagate - if exc_type is not None: - logger.debug(f"Exception type: {exc_type}") - if issubclass(exc_type, anyio.get_cancelled_exc_class()): - logger.debug("Handling CancelledError") - # Handle cancelation - # First check if we already have a cancel reason set by a source - if self.context.cancel_reason: - # A source already set the reason (like condition, timeout, etc.) - logger.debug(f"Cancel reason already set: {self.context.cancel_reason}") - elif self._token.is_cancelled: - # Token was cancelled - self.context.cancel_reason = self._token.reason - self.context.cancel_message = self._token.message - logger.debug(f"Cancel reason from token: {self._token.reason}") - elif self._scope and self._scope.cancel_called: - # Scope was cancelled - check why - # Check if deadline was exceeded (timeout) - # Note: anyio CancelScope always has deadline attribute (defaults to inf) - if anyio.current_time() >= self._scope.deadline: - self.context.cancel_reason = CancelationReason.TIMEOUT - self.context.cancel_message = "Operation timed out" - logger.debug("Detected timeout from deadline") - else: - # Check sources - for source in self._sources: - if hasattr(source, "triggered") and source.triggered: - self.context.cancel_reason = source.reason - break - - if not self.context.cancel_reason: - self.context.cancel_reason = CancelationReason.MANUAL + Returns: + True if scope handled the exception, False otherwise. + """ + _scope_handled = False + if self._scope: + try: + # scope.__exit__ returns True if it handled the exception + _scope_handled = self._scope.__exit__(exc_type, exc_val, exc_tb) + except Exception as e: + logger.debug(f"Scope exit raised: {e}") + # Re-raise the exception from scope exit + raise + return _scope_handled + + async def _determine_final_status( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + ) -> None: + """Determine final operation status based on exception.""" + # Determine final status based on the exception + # We need to update status even if scope handled it, because the exception might still propagate + if exc_type is not None: + logger.debug(f"Exception type: {exc_type}") + if issubclass(exc_type, anyio.get_cancelled_exc_class()): + logger.debug("Handling CancelledError") + # Handle cancelation + # First check if we already have a cancel reason set by a source + if self.context.cancel_reason: + # A source already set the reason (like condition, timeout, etc.) + logger.debug(f"Cancel reason already set: {self.context.cancel_reason}") + elif self._token.is_cancelled: + # Token was cancelled + self.context.cancel_reason = self._token.reason + self.context.cancel_message = self._token.message + logger.debug(f"Cancel reason from token: {self._token.reason}") + elif self._scope and self._scope.cancel_called: + # Scope was cancelled - check why + # Check if deadline was exceeded (timeout) + # Note: anyio CancelScope always has deadline attribute (defaults to inf) + if anyio.current_time() >= self._scope.deadline: + self.context.cancel_reason = CancelationReason.TIMEOUT + self.context.cancel_message = "Operation timed out" + logger.debug("Detected timeout from deadline") else: - self.context.cancel_reason = CancelationReason.MANUAL + # Check sources + for source in self._sources: + if hasattr(source, "triggered") and source.triggered: + self.context.cancel_reason = source.reason + break - # Always update status to CANCELLED for any CancelledError - logger.debug(f"Updating status to CANCELLED (was {self.context.status})") - self.context.update_status(OperationStatus.CANCELLED) - logger.debug(f"Status after update: {self.context.status}") - await self._trigger_callbacks("cancel") - - elif issubclass(exc_type, CancelationError) and isinstance(exc_val, CancelationError): - # Our custom cancelation errors - self.context.cancel_reason = exc_val.reason - self.context.cancel_message = exc_val.message - self.context.update_status(OperationStatus.CANCELLED) - await self._trigger_callbacks("cancel") + if not self.context.cancel_reason: + self.context.cancel_reason = CancelationReason.MANUAL else: - # Other errors - self.context.error = str(exc_val) - self.context.update_status(OperationStatus.FAILED) - - # Only trigger error callbacks for Exception instances, not BaseException - # (e.g., skip KeyboardInterrupt, SystemExit, GeneratorExit) - if isinstance(exc_val, Exception): - await self._trigger_error_callbacks(exc_val) + self.context.cancel_reason = CancelationReason.MANUAL + + # Always update status to CANCELLED for any CancelledError + logger.debug(f"Updating status to CANCELLED (was {self.context.status})") + self.context.update_status(OperationStatus.CANCELLED) + logger.debug(f"Status after update: {self.context.status}") + await self._trigger_callbacks("cancel") + + elif issubclass(exc_type, CancelationError) and isinstance(exc_val, CancelationError): + # Our custom cancelation errors + self.context.cancel_reason = exc_val.reason + self.context.cancel_message = exc_val.message + self.context.update_status(OperationStatus.CANCELLED) + await self._trigger_callbacks("cancel") else: - # Successful completion - self.context.update_status(OperationStatus.COMPLETED) - await self._trigger_callbacks("complete") + # Other errors + self.context.error = str(exc_val) + self.context.update_status(OperationStatus.FAILED) + + # Only trigger error callbacks for Exception instances, not BaseException + # (e.g., skip KeyboardInterrupt, SystemExit, GeneratorExit) + if isinstance(exc_val, Exception): + await self._trigger_error_callbacks(exc_val) + else: + # Successful completion + self.context.update_status(OperationStatus.COMPLETED) + await self._trigger_callbacks("complete") - except Exception as e: - logger.error(f"Error in __aexit__ status handling: {e}", exc_info=True) + async def _cleanup_context(self) -> None: + """Cleanup monitoring, shields, registry, and context vars.""" + logger.debug(f"=== __aexit__ finally block for {self.context.id} ===") - finally: - logger.debug(f"=== __aexit__ finally block for {self.context.id} ===") + # Stop monitoring + await self._stop_monitoring() - # Stop monitoring - await self._stop_monitoring() + # Cleanup shields + for shield in self._shields: + shield.cancel() - # Cleanup shields - for shield in self._shields: - shield.cancel() + # Unregister from global registry + if self._register_globally: + from .registry import OperationRegistry - # Unregister from global registry - if self._register_globally: - from .registry import OperationRegistry + registry = OperationRegistry.get_instance() + await registry.unregister(self.context.id) - registry = OperationRegistry.get_instance() - await registry.unregister(self.context.id) + # Reset context variable + if hasattr(self, "_context_token"): + _current_operation.reset(self._context_token) - # Reset context variable - if hasattr(self, "_context_token"): - _current_operation.reset(self._context_token) + logger.debug( + f"Exited cancelation context - final status: {self.context.status}", + extra=self.context.log_context(), + ) - logger.debug( - f"Exited cancelation context - final status: {self.context.status}", - extra=self.context.log_context(), - ) + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: Any | None, + ) -> bool: + """Exit cancelation context.""" + logger.debug(f"=== ENTERING __aexit__ for {self.context.id} ===") + logger.debug(f"exc_type: {exc_type}, exc_val: {exc_val}") + logger.debug(f"Current status: {self.context.status}") + logger.debug(f"Current cancel_reason: {self.context.cancel_reason}") + + try: + # Handle scope exit + _scope_handled = self._handle_scope_exit(exc_type, exc_val, exc_tb) + # Determine final status based on exception + await self._determine_final_status(exc_type, exc_val) + except Exception as e: + logger.error(f"Error in __aexit__ status handling: {e}", exc_info=True) + finally: + # Cleanup context resources + await self._cleanup_context() # Always propagate exceptions - cancelation context should not suppress them # The anyio.CancelScope handles cancelation propagation appropriately @@ -801,8 +803,7 @@ async def stream( report_interval: int | None = None, buffer_partial: bool = True, ) -> AsyncIterator[T]: - """ - Wrap async iterator with cancelation support. + """Wrap async iterator with cancelation support. Args: async_iter: Async iterator to wrap @@ -826,8 +827,8 @@ async def stream( if buffer_partial: buffer.append(item) # Limit buffer size - if len(buffer) > 1000: - buffer = buffer[-1000:] + if len(buffer) > _MAX_BUFFER_SIZE: + buffer = buffer[-_MAX_BUFFER_SIZE:] if report_interval and count % report_interval == 0: await self.report_progress(f"Processed {count} items", {"count": count, "latest_item": item}) @@ -864,8 +865,7 @@ async def stream( # Function wrapper def wrap(self, operation: Callable[..., Awaitable[R]]) -> Callable[..., Awaitable[R]]: - """ - Wrap an async operation to automatically check for cancelation before execution. + """Wrap an async operation to automatically check for cancelation before execution. This is useful for retry loops and other patterns where you want automatic cancelation checking without manually accessing the token. @@ -904,8 +904,7 @@ async def wrapped(*args: Any, **kwargs: Any) -> R: @asynccontextmanager async def wrapping(self) -> AsyncIterator[Callable[..., Awaitable[R]]]: - """ - Async context manager that yields a wrap function for scoped operation wrapping. + """Async context manager that yields a wrap function for scoped operation wrapping. The yielded wrap function checks cancelation before executing any operation. This is useful for retry loops where you want all operations in a scope to @@ -935,8 +934,7 @@ async def wrap_fn(fn: Callable[..., Awaitable[R]], *args: Any, **kwargs: Any) -> # Shielding @asynccontextmanager async def shield(self) -> AsyncIterator[Cancelable]: - """ - Shield a section from cancelation. + """Shield a section from cancelation. Creates a child operation that is protected from cancelation but still participates in the operation hierarchy for monitoring and tracking. @@ -979,8 +977,7 @@ async def cancel( message: str | None = None, propagate_to_children: bool = True, ) -> None: - """ - Cancel the operation. + """Cancel the operation. Args: reason: Reason for cancelation diff --git a/src/hother/cancelable/core/exceptions.py b/src/hother/cancelable/core/exceptions.py index 47b2aab..593c00b 100644 --- a/src/hother/cancelable/core/exceptions.py +++ b/src/hother/cancelable/core/exceptions.py @@ -1,14 +1,10 @@ -""" -Custom exceptions for the async cancelation system. -""" - +"""Custom exceptions for the async cancelation system.""" from hother.cancelable.core.models import CancelationReason, OperationContext class CancelationError(Exception): - """ - Base exception for cancelation-related errors. + """Base exception for cancelation-related errors. Attributes: reason: The reason for cancelation diff --git a/src/hother/cancelable/core/models.py b/src/hother/cancelable/core/models.py index d755312..9792dfd 100644 --- a/src/hother/cancelable/core/models.py +++ b/src/hother/cancelable/core/models.py @@ -1,6 +1,4 @@ -""" -Pydantic models for operation context and status tracking. -""" +"""Pydantic models for operation context and status tracking.""" import uuid from datetime import UTC, datetime, timedelta @@ -38,8 +36,7 @@ class CancelationReason(str, Enum): class OperationContext(BaseModel): - """ - Complete operation context with metadata and status tracking. + """Complete operation context with metadata and status tracking. Attributes: id: Unique operation identifier @@ -123,8 +120,7 @@ def log_context(self) -> dict[str, Any]: } def update_status(self, status: OperationStatus) -> None: - """ - Update operation status with appropriate logging. + """Update operation status with appropriate logging. Args: status: New operation status diff --git a/src/hother/cancelable/core/registry.py b/src/hother/cancelable/core/registry.py index f6a09d7..3207c18 100644 --- a/src/hother/cancelable/core/registry.py +++ b/src/hother/cancelable/core/registry.py @@ -1,6 +1,4 @@ -""" -Global operation registry for tracking and managing operations. -""" +"""Global operation registry for tracking and managing operations.""" import threading from datetime import UTC, datetime, timedelta @@ -18,8 +16,7 @@ class OperationRegistry: - """ - Singleton registry for tracking all cancelable operations. + """Singleton registry for tracking all cancelable operations. Provides centralized management and monitoring of operations across the application. @@ -50,8 +47,7 @@ def __init__(self): @classmethod def get_instance(cls) -> "OperationRegistry": - """ - Get singleton instance of the registry. + """Get singleton instance of the registry. Returns: The global OperationRegistry instance @@ -61,8 +57,7 @@ def get_instance(cls) -> "OperationRegistry": return cls._instance async def register(self, operation: "Cancelable") -> None: - """ - Register an operation with the registry. + """Register an operation with the registry. Args: operation: Cancelable operation to register @@ -82,8 +77,7 @@ async def register(self, operation: "Cancelable") -> None: ) async def unregister(self, operation_id: str) -> None: - """ - Unregister an operation and add to history. + """Unregister an operation and add to history. Args: operation_id: ID of operation to unregister @@ -110,8 +104,7 @@ async def unregister(self, operation_id: str) -> None: ) async def get_operation(self, operation_id: str) -> Optional["Cancelable"]: - """ - Get operation by ID. + """Get operation by ID. Args: operation_id: Operation ID to look up @@ -129,8 +122,7 @@ async def list_operations( parent_id: str | None = None, name_pattern: str | None = None, ) -> list[OperationContext]: - """ - List operations with optional filtering. + """List operations with optional filtering. Args: status: Filter by operation status @@ -162,8 +154,7 @@ async def cancel_operation( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> bool: - """ - Cancel a specific operation. + """Cancel a specific operation. Args: operation_id: ID of operation to cancel @@ -189,8 +180,7 @@ async def cancel_all( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> int: - """ - Cancel all operations with optional status filter. + """Cancel all operations with optional status filter. Args: status: Only cancel operations with this status @@ -239,8 +229,7 @@ async def get_history( status: OperationStatus | None = None, since: datetime | None = None, ) -> list[OperationContext]: - """ - Get operation history. + """Get operation history. Args: limit: Maximum number of operations to return @@ -272,8 +261,7 @@ async def cleanup_completed( older_than: timedelta | None = None, keep_failed: bool = True, ) -> int: - """ - Clean up completed operations from active tracking. + """Clean up completed operations from active tracking. Args: older_than: Only cleanup operations older than this @@ -326,8 +314,7 @@ async def cleanup_completed( return len(to_remove) async def get_statistics(self) -> dict[str, Any]: - """ - Get registry statistics. + """Get registry statistics. Returns: Dictionary with operation statistics @@ -373,8 +360,7 @@ async def clear_all(self) -> None: # Thread-safe synchronous methods def get_operation_sync(self, operation_id: str) -> Optional["Cancelable"]: - """ - Get operation by ID (thread-safe, synchronous). + """Get operation by ID (thread-safe, synchronous). This method can be called from any thread. @@ -393,8 +379,7 @@ def list_operations_sync( parent_id: str | None = None, name_pattern: str | None = None, ) -> list[OperationContext]: - """ - List operations with optional filtering (thread-safe, synchronous). + """List operations with optional filtering (thread-safe, synchronous). This method can be called from any thread. @@ -423,8 +408,7 @@ def list_operations_sync( return operations def get_statistics_sync(self) -> dict[str, Any]: - """ - Get registry statistics (thread-safe, synchronous). + """Get registry statistics (thread-safe, synchronous). This method can be called from any thread. @@ -466,8 +450,7 @@ def get_history_sync( status: OperationStatus | None = None, since: datetime | None = None, ) -> list[OperationContext]: - """ - Get operation history (thread-safe, synchronous). + """Get operation history (thread-safe, synchronous). This method can be called from any thread. @@ -501,8 +484,7 @@ def cancel_operation_sync( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> None: - """ - Cancel a specific operation (thread-safe, asynchronous execution). + """Cancel a specific operation (thread-safe, asynchronous execution). This method can be called from any thread. It schedules the cancelation to be executed asynchronously and returns immediately. @@ -529,8 +511,7 @@ def cancel_all_sync( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> None: - """ - Cancel all operations (thread-safe, asynchronous execution). + """Cancel all operations (thread-safe, asynchronous execution). This method can be called from any thread. It schedules the cancelation to be executed asynchronously and returns immediately. diff --git a/src/hother/cancelable/core/token.py b/src/hother/cancelable/core/token.py index 6a88005..42e9434 100644 --- a/src/hother/cancelable/core/token.py +++ b/src/hother/cancelable/core/token.py @@ -1,6 +1,4 @@ -""" -Thread-safe cancelation token implementation. -""" +"""Thread-safe cancelation token implementation.""" from __future__ import annotations @@ -22,8 +20,7 @@ class CancelationToken(BaseModel): - """ - Thread-safe cancelation token that can be shared across tasks. + """Thread-safe cancelation token that can be shared across tasks. Attributes: id: Unique token identifier @@ -71,8 +68,7 @@ async def cancel( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> bool: - """ - Cancel the token. + """Cancel the token. Args: reason: Reason for cancelation @@ -134,8 +130,7 @@ def cancel_sync( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> bool: - """ - Thread-safe synchronous cancelation from any thread. + """Thread-safe synchronous cancelation from any thread. This method can be called from regular Python threads (pynput, signal handlers, etc.) and will safely cancel the token and notify async waiters via the anyio bridge. @@ -192,8 +187,7 @@ def on_signal(signum): return True def _notify_async_waiters(self) -> None: - """ - Set the anyio event from a thread. + """Set the anyio event from a thread. Uses the anyio bridge to safely set the event in the anyio context. """ @@ -204,8 +198,7 @@ def set_event() -> None: call_soon_threadsafe(set_event) def _schedule_callbacks(self) -> None: - """ - Schedule callbacks to run in the anyio context. + """Schedule callbacks to run in the anyio context. Uses the anyio bridge to safely execute callbacks from a thread. """ @@ -243,8 +236,7 @@ async def wait_for_cancel(self) -> None: await self._event.wait() def check(self) -> None: - """ - Check if cancelled and raise exception if so. + """Check if cancelled and raise exception if so. Raises: ManualCancelation: If token is cancelled @@ -256,8 +248,7 @@ def check(self) -> None: ) async def check_async(self) -> None: - """ - Async version of check that allows for proper async cancelation. + """Async version of check that allows for proper async cancelation. Raises: anyio.CancelledError: If token is cancelled @@ -267,8 +258,7 @@ async def check_async(self) -> None: raise anyio.get_cancelled_exc_class()(self.message or "Operation cancelled via token") def is_cancelation_requested(self) -> bool: - """ - Non-throwing check for cancelation. + """Non-throwing check for cancelation. Returns: True if cancelation has been requested @@ -276,8 +266,7 @@ def is_cancelation_requested(self) -> bool: return self.is_cancelled async def register_callback(self, callback: Callable[[CancelationToken], Awaitable[None]]) -> None: - """ - Register a callback to be called on cancelation. + """Register a callback to be called on cancelation. The callback should accept the token as its only argument. @@ -312,12 +301,14 @@ def __str__(self) -> str: def __repr__(self) -> str: """Detailed representation of token.""" - return f"CancelationToken(id='{self.id}', is_cancelled={self.is_cancelled}, reason={self.reason}, message='{self.message}')" + return ( + f"CancelationToken(id='{self.id}', is_cancelled={self.is_cancelled}, " + f"reason={self.reason}, message='{self.message}')" + ) class LinkedCancelationToken(CancelationToken): - """ - Cancelation token that can be linked to other tokens. + """Cancelation token that can be linked to other tokens. When any linked token is cancelled, this token is also cancelled. """ @@ -327,8 +318,7 @@ def __init__(self, **data: Any) -> None: self._linked_tokens: list[CancelationToken] = [] # Use regular list instead of WeakSet for now async def link(self, token: CancelationToken, preserve_reason: bool = False) -> None: - """ - Link this token to another token. + """Link this token to another token. When the linked token is cancelled, this token will also be cancelled. diff --git a/src/hother/cancelable/integrations/__init__.py b/src/hother/cancelable/integrations/__init__.py index c8a4a7a..fef9de4 100644 --- a/src/hother/cancelable/integrations/__init__.py +++ b/src/hother/cancelable/integrations/__init__.py @@ -1,5 +1,4 @@ -""" -Integration modules for popular async libraries and frameworks. +"""Integration modules for popular async libraries and frameworks. This module provides seamless integration with popular Python async frameworks: diff --git a/src/hother/cancelable/integrations/fastapi.py b/src/hother/cancelable/integrations/fastapi.py index 512fe24..052e9c7 100644 --- a/src/hother/cancelable/integrations/fastapi.py +++ b/src/hother/cancelable/integrations/fastapi.py @@ -1,6 +1,4 @@ -""" -FastAPI integration for request-scoped cancelation. -""" +"""FastAPI integration for request-scoped cancelation.""" from collections.abc import AsyncIterator, Callable from typing import Any @@ -19,13 +17,10 @@ class RequestCancelationMiddleware: - """ - FastAPI middleware that provides request-scoped cancelation. - """ + """FastAPI middleware that provides request-scoped cancelation.""" def __init__(self, app: ASGIApp, default_timeout: float | None = None): - """ - Initialize middleware. + """Initialize middleware. Args: app: ASGI application @@ -58,8 +53,7 @@ async def monitor_disconnect(): def get_request_token(request: Request) -> CancelationToken: - """ - Get cancelation token from request. + """Get cancelation token from request. Args: request: FastAPI request @@ -80,8 +74,7 @@ async def cancelable_dependency( request: Request, timeout: float | None = None, ) -> Cancelable: - """ - FastAPI dependency that provides a cancelable for the request. + """FastAPI dependency that provides a cancelable for the request. Args: request: FastAPI request @@ -126,8 +119,7 @@ def with_cancelation( timeout: float | None = None, raise_on_cancel: bool = True, ) -> Callable[[Callable[..., Any]], Callable[..., Any]]: - """ - Decorator for FastAPI endpoints with automatic cancelation. + """Decorator for FastAPI endpoints with automatic cancelation. Args: timeout: Optional timeout for the endpoint @@ -159,9 +151,7 @@ async def wrapper(request: Request, *args: Any, **kwargs: Any): raise HTTPException(status_code=504, detail="Request timeout") if cancelable.context.cancel_reason == CancelationReason.SIGNAL: raise HTTPException(status_code=499, detail="Client closed connection") - raise HTTPException( - status_code=503, detail=f"Request cancelled: {cancelable.context.cancel_message}" - ) + raise HTTPException(status_code=503, detail=f"Request cancelled: {cancelable.context.cancel_message}") raise return wrapper @@ -175,8 +165,7 @@ async def cancelable_streaming_response( media_type: str = "text/plain", chunk_size: int | None = None, ) -> StreamingResponse: - """ - Create a streaming response with cancelation support. + """Create a streaming response with cancelation support. Args: generator: Async generator producing response chunks @@ -227,9 +216,7 @@ async def wrapped_generator(): # WebSocket support class CancelableWebSocket: - """ - WebSocket wrapper with cancelation support. - """ + """WebSocket wrapper with cancelation support.""" def __init__(self, websocket: Any, cancelable: Cancelable): self.websocket = websocket diff --git a/src/hother/cancelable/sources/__init__.py b/src/hother/cancelable/sources/__init__.py index 3853d9a..ee3934a 100644 --- a/src/hother/cancelable/sources/__init__.py +++ b/src/hother/cancelable/sources/__init__.py @@ -1,5 +1,4 @@ -""" -Cancellation source implementations. +"""Cancellation source implementations. This module provides various ways to trigger cancellation of async operations: diff --git a/src/hother/cancelable/sources/base.py b/src/hother/cancelable/sources/base.py index 480e189..0410bb4 100644 --- a/src/hother/cancelable/sources/base.py +++ b/src/hother/cancelable/sources/base.py @@ -1,6 +1,4 @@ -""" -Base class for cancelation sources. -""" +"""Base class for cancelation sources.""" from abc import ABC, abstractmethod from collections.abc import Awaitable, Callable @@ -14,16 +12,14 @@ class CancelationSource(ABC): - """ - Abstract base class for cancelation sources. + """Abstract base class for cancelation sources. A cancelation source monitors for a specific condition and triggers cancelation when that condition is met. """ def __init__(self, reason: CancelationReason, name: str | None = None): - """ - Initialize cancelation source. + """Initialize cancelation source. Args: reason: The cancelation reason this source will use @@ -38,8 +34,7 @@ def __init__(self, reason: CancelationReason, name: str | None = None): @abstractmethod async def start_monitoring(self, scope: anyio.CancelScope) -> None: - """ - Start monitoring for cancelation condition. + """Start monitoring for cancelation condition. Args: scope: The cancel scope to trigger when condition is met @@ -54,8 +49,7 @@ async def stop_monitoring(self) -> None: self._monitoring_task = None def set_cancel_callback(self, callback: Callable[[CancelationReason, str], None | Awaitable[None]]) -> None: - """ - Set callback to be called when cancelation is triggered. + """Set callback to be called when cancelation is triggered. Args: callback: Callback function that accepts reason and message (can be sync or async) @@ -63,8 +57,7 @@ def set_cancel_callback(self, callback: Callable[[CancelationReason, str], None self._cancel_callback = callback async def trigger_cancelation(self, message: str | None = None) -> None: - """ - Trigger cancelation with the configured reason. + """Trigger cancelation with the configured reason. Args: message: Optional cancelation message diff --git a/src/hother/cancelable/sources/composite.py b/src/hother/cancelable/sources/composite.py index 476933b..63aa8ce 100644 --- a/src/hother/cancelable/sources/composite.py +++ b/src/hother/cancelable/sources/composite.py @@ -1,7 +1,4 @@ -""" -Composite cancelation source for combining multiple sources. -""" - +"""Composite cancelation source for combining multiple sources.""" import anyio @@ -13,8 +10,7 @@ class CompositeSource(CancelationSource): - """ - Cancelation source that combines multiple other sources. + """Cancelation source that combines multiple other sources. Triggers when any of the component sources trigger. """ @@ -24,8 +20,7 @@ def __init__( sources: list[CancelationSource], name: str | None = None, ): - """ - Initialize composite source. + """Initialize composite source. Args: sources: List of cancelation sources to combine @@ -41,8 +36,7 @@ def __init__( self.triggered_source: CancelationSource | None = None async def start_monitoring(self, scope: anyio.CancelScope) -> None: - """ - Start monitoring all component sources. + """Start monitoring all component sources. Args: scope: Cancel scope to trigger when any source triggers @@ -101,8 +95,7 @@ async def stop_monitoring(self) -> None: ) async def _monitor_source(self, source: CancelationSource) -> None: - """ - Monitor a single source and propagate its cancelation. + """Monitor a single source and propagate its cancelation. Args: source: Source to monitor @@ -139,8 +132,7 @@ class AnyOfSource(CompositeSource): class AllOfSource(CancelationSource): - """ - Cancelation source that requires ALL component sources to trigger. + """Cancelation source that requires ALL component sources to trigger. Only cancels when all component sources have triggered. """ @@ -150,8 +142,7 @@ def __init__( sources: list[CancelationSource], name: str | None = None, ): - """ - Initialize all-of source. + """Initialize all-of source. Args: sources: List of cancelation sources that must all trigger diff --git a/src/hother/cancelable/sources/condition.py b/src/hother/cancelable/sources/condition.py index 9bc99da..52b2c5b 100644 --- a/src/hother/cancelable/sources/condition.py +++ b/src/hother/cancelable/sources/condition.py @@ -1,6 +1,4 @@ -""" -Condition-based cancelation source implementation. -""" +"""Condition-based cancelation source implementation.""" import inspect from collections.abc import Awaitable, Callable @@ -16,8 +14,7 @@ class ConditionSource(CancelationSource): - """ - Cancelation source that monitors a condition function. + """Cancelation source that monitors a condition function. Cancels when the condition function returns True. """ @@ -29,8 +26,7 @@ def __init__( condition_name: str | None = None, name: str | None = None, ): - """ - Initialize condition source. + """Initialize condition source. Args: condition: Function that returns True when cancelation should occur @@ -55,8 +51,7 @@ def __init__( self._is_async = inspect.iscoroutinefunction(condition) async def start_monitoring(self, scope: anyio.CancelScope) -> None: - """ - Start monitoring the condition. + """Start monitoring the condition. Args: scope: Cancel scope to trigger when condition is met @@ -134,9 +129,7 @@ async def _monitor_condition(self) -> None: logger.debug(f"Condition '{self.condition_name}' met after {check_count} checks") # Trigger cancelation through the base class method - await self.trigger_cancelation( - f"Condition '{self.condition_name}' met after {check_count} checks" - ) + await self.trigger_cancelation(f"Condition '{self.condition_name}' met after {check_count} checks") break except Exception as e: @@ -171,8 +164,7 @@ async def _monitor_condition(self) -> None: class ResourceConditionSource(ConditionSource): - """ - Specialized condition source for monitoring system resources. + """Specialized condition source for monitoring system resources. Useful for cancelling operations when resources are constrained. """ @@ -185,8 +177,7 @@ def __init__( check_interval: float = 5.0, name: str | None = None, ): - """ - Initialize resource condition source. + """Initialize resource condition source. Args: memory_threshold: Cancel if memory usage exceeds this percentage diff --git a/src/hother/cancelable/sources/signal.py b/src/hother/cancelable/sources/signal.py index 7ffd73f..0b9c43b 100644 --- a/src/hother/cancelable/sources/signal.py +++ b/src/hother/cancelable/sources/signal.py @@ -1,6 +1,4 @@ -""" -Signal-based cancelation source implementation. -""" +"""Signal-based cancelation source implementation.""" from __future__ import annotations @@ -17,8 +15,7 @@ class SignalSource(CancelationSource): - """ - Cancelation source that monitors OS signals. + """Cancelation source that monitors OS signals. Uses anyio's native signal handling for clean integration. Supports graceful shutdown via SIGINT, SIGTERM, etc. @@ -27,8 +24,7 @@ class SignalSource(CancelationSource): """ def __init__(self, *signals: int, name: str | None = None) -> None: - """ - Initialize signal source. + """Initialize signal source. Args: *signals: Signal numbers to monitor (e.g., signal.SIGINT) @@ -52,8 +48,7 @@ def __init__(self, *signals: int, name: str | None = None) -> None: self._task_group: anyio.abc.TaskGroup | None = None async def start_monitoring(self, scope: anyio.CancelScope) -> None: - """ - Start monitoring for signals. + """Start monitoring for signals. Args: scope: Cancel scope to trigger when signal is received @@ -98,8 +93,7 @@ async def stop_monitoring(self) -> None: ) async def _monitor_signals(self) -> None: - """ - Monitor for signals using anyio's native signal handling. + """Monitor for signals using anyio's native signal handling. This runs in a background task and waits for any of the configured signals. When a signal is received, it triggers cancelation and exits. @@ -111,9 +105,7 @@ async def _monitor_signals(self) -> None: "Signal source monitoring started", extra={ "source": self.name, - "signals": [ - signal.Signals(s).name for s in self.signals if s in signal.Signals._value2member_map_ - ], + "signals": [signal.Signals(s).name for s in self.signals if s in signal.Signals._value2member_map_], }, ) diff --git a/src/hother/cancelable/sources/timeout.py b/src/hother/cancelable/sources/timeout.py index bdf898e..f8b071d 100644 --- a/src/hother/cancelable/sources/timeout.py +++ b/src/hother/cancelable/sources/timeout.py @@ -1,6 +1,4 @@ -""" -Timeout-based cancelation source implementation. -""" +"""Timeout-based cancelation source implementation.""" from datetime import timedelta @@ -14,13 +12,10 @@ class TimeoutSource(CancelationSource): - """ - Cancelation source that triggers after a specified timeout. - """ + """Cancelation source that triggers after a specified timeout.""" def __init__(self, timeout: float | timedelta, name: str | None = None): - """ - Initialize timeout source. + """Initialize timeout source. Args: timeout: Timeout duration in seconds or as timedelta @@ -39,8 +34,7 @@ def __init__(self, timeout: float | timedelta, name: str | None = None): self._deadline_time: float | None = None async def start_monitoring(self, scope: anyio.CancelScope) -> None: - """ - Set scope deadline for timeout. + """Set scope deadline for timeout. Args: scope: Cancel scope to configure diff --git a/src/hother/cancelable/streaming/__init__.py b/src/hother/cancelable/streaming/__init__.py index 0137099..9c8243c 100644 --- a/src/hother/cancelable/streaming/__init__.py +++ b/src/hother/cancelable/streaming/__init__.py @@ -1,5 +1,4 @@ -""" -Stream processing utilities for async operations. +"""Stream processing utilities for async operations. This module provides tools for simulating and testing cancellable stream processing: diff --git a/src/hother/cancelable/streaming/simulator/__init__.py b/src/hother/cancelable/streaming/simulator/__init__.py index 74242e4..edcf21f 100644 --- a/src/hother/cancelable/streaming/simulator/__init__.py +++ b/src/hother/cancelable/streaming/simulator/__init__.py @@ -4,8 +4,4 @@ from .simulator import simulate_stream from .utils import get_random_chunk_size -__all__ = [ - "StreamConfig", - "simulate_stream", - "get_random_chunk_size" -] +__all__ = ["StreamConfig", "simulate_stream", "get_random_chunk_size"] diff --git a/src/hother/cancelable/streaming/simulator/config.py b/src/hother/cancelable/streaming/simulator/config.py index 9718ba4..e9fcd2e 100644 --- a/src/hother/cancelable/streaming/simulator/config.py +++ b/src/hother/cancelable/streaming/simulator/config.py @@ -1,6 +1,5 @@ """Configuration models for stream simulation.""" - from pydantic import BaseModel, Field, model_validator diff --git a/src/hother/cancelable/streaming/simulator/simulator.py b/src/hother/cancelable/streaming/simulator/simulator.py index db2853d..84a3fdd 100644 --- a/src/hother/cancelable/streaming/simulator/simulator.py +++ b/src/hother/cancelable/streaming/simulator/simulator.py @@ -18,9 +18,8 @@ async def simulate_stream( text: str, config: StreamConfig | None = None, cancelable: Cancelable | None = None -) -> AsyncGenerator[dict[str, Any], None]: - """ - Simulate a realistic network stream with variable timing and cancellation support. +) -> AsyncGenerator[dict[str, Any]]: + """Simulate a realistic network stream with variable timing and cancellation support. This function simulates network streaming behavior including bursts, stalls, jitter, and variable chunk sizes. It's useful for testing cancellable stream diff --git a/src/hother/cancelable/types.py b/src/hother/cancelable/types.py index f47abb8..237bec0 100644 --- a/src/hother/cancelable/types.py +++ b/src/hother/cancelable/types.py @@ -1,5 +1,4 @@ -""" -Type definitions and protocols for hother.cancelable. +"""Type definitions and protocols for hother.cancelable. This module provides Protocol classes and type definitions to enable proper static type checking without suppressions. @@ -21,8 +20,7 @@ class ProgressCallback(Protocol): - """ - Protocol for progress callback functions. + """Protocol for progress callback functions. Accepts both sync and async callbacks for progress reporting. """ @@ -33,8 +31,7 @@ def __call__( message: Any, metadata: dict[str, Any] | None = None, ) -> None | Awaitable[None]: - """ - Called when operation reports progress. + """Called when operation reports progress. Args: operation_id: ID of the operation reporting progress @@ -45,15 +42,13 @@ def __call__( class StatusCallback(Protocol): - """ - Protocol for status change callback functions. + """Protocol for status change callback functions. Called when operation status changes (started, completed, cancelled). """ def __call__(self, context: OperationContext) -> None | Awaitable[None]: - """ - Called when operation status changes. + """Called when operation status changes. Args: context: The operation context with updated status @@ -62,8 +57,7 @@ def __call__(self, context: OperationContext) -> None | Awaitable[None]: class ErrorCallback(Protocol): - """ - Protocol for error callback functions. + """Protocol for error callback functions. Called when operation encounters an error. """ @@ -73,8 +67,7 @@ def __call__( context: OperationContext, error: Exception, ) -> None | Awaitable[None]: - """ - Called when operation encounters an error. + """Called when operation encounters an error. Args: context: The operation context @@ -85,24 +78,16 @@ def __call__( # Union types for callbacks that can be sync or async ProgressCallbackType = ( - Callable[[str, Any, dict[str, Any] | None], None] - | Callable[[str, Any, dict[str, Any] | None], Awaitable[None]] + Callable[[str, Any, dict[str, Any] | None], None] | Callable[[str, Any, dict[str, Any] | None], Awaitable[None]] ) -StatusCallbackType = ( - Callable[["OperationContext"], None] - | Callable[["OperationContext"], Awaitable[None]] -) +StatusCallbackType = Callable[["OperationContext"], None] | Callable[["OperationContext"], Awaitable[None]] -ErrorCallbackType = ( - Callable[["OperationContext", Exception], None] - | Callable[["OperationContext", Exception], Awaitable[None]] -) +ErrorCallbackType = Callable[["OperationContext", Exception], None] | Callable[["OperationContext", Exception], Awaitable[None]] -def ensure_cancelable(cancelable: "Cancelable | None") -> "Cancelable": - """ - Type guard utility for injected cancelable parameters. +def ensure_cancelable(cancelable: Cancelable | None) -> Cancelable: + """Type guard utility for injected cancelable parameters. Use this when decorated with @cancelable to narrow the type from `Cancelable | None` to `Cancelable`. diff --git a/src/hother/cancelable/utils/__init__.py b/src/hother/cancelable/utils/__init__.py index ec08ee5..54542c0 100644 --- a/src/hother/cancelable/utils/__init__.py +++ b/src/hother/cancelable/utils/__init__.py @@ -1,5 +1,4 @@ -""" -Utility modules for the async cancellation system. +"""Utility modules for the async cancellation system. This module provides helper functions, decorators, bridges, and testing tools: diff --git a/src/hother/cancelable/utils/anyio_bridge.py b/src/hother/cancelable/utils/anyio_bridge.py index 3ca4a4a..bc1c2c3 100644 --- a/src/hother/cancelable/utils/anyio_bridge.py +++ b/src/hother/cancelable/utils/anyio_bridge.py @@ -1,5 +1,4 @@ -""" -Global bridge for thread-safe anyio operations. +"""Global bridge for thread-safe anyio operations. Allows regular Python threads to schedule callbacks in anyio context, providing an equivalent to asyncio's loop.call_soon_threadsafe(). @@ -21,8 +20,7 @@ class AnyioBridge: - """ - Singleton bridge for thread-to-anyio communication. + """Singleton bridge for thread-to-anyio communication. Provides call_soon_threadsafe equivalent for anyio by using memory object streams and a background worker task. @@ -52,8 +50,7 @@ def from_thread(): _lock = threading.Lock() def __init__(self, buffer_size: int = 1000) -> None: - """ - Initialize the AnyioBridge. + """Initialize the AnyioBridge. Args: buffer_size: Maximum number of queued callbacks before blocking (default: 1000) @@ -69,8 +66,7 @@ def __init__(self, buffer_size: int = 1000) -> None: @classmethod def get_instance(cls) -> Self: - """ - Get singleton instance of the bridge. + """Get singleton instance of the bridge. Thread-safe lazy initialization. @@ -84,8 +80,7 @@ def get_instance(cls) -> Self: return cls._instance # type: ignore[return-value] async def start(self) -> None: - """ - Start the bridge worker task. + """Start the bridge worker task. Should be called once at application startup from async context. Must be run in a task group as it blocks forever. @@ -126,8 +121,7 @@ async def start(self) -> None: await self._worker() async def _worker(self) -> None: - """ - Worker task that processes callbacks from threads. + """Worker task that processes callbacks from threads. Runs forever until the receive stream is closed. """ @@ -166,8 +160,7 @@ async def _worker(self) -> None: logger.warning("Bridge worker loop ended") def call_soon_threadsafe(self, callback: Callable[[], Any]) -> None: - """ - Schedule callback to run in anyio context from any thread. + """Schedule callback to run in anyio context from any thread. This is the anyio equivalent of asyncio's loop.call_soon_threadsafe(). The callback will be executed in the anyio event loop context. @@ -183,7 +176,7 @@ def call_soon_threadsafe(self, callback: Callable[[], Any]) -> None: # Queue for later processing with self._pending_lock: self._pending_callbacks.append(callback) - logger.debug(f"Bridge not started, queuing callback " f"(queue size: {len(self._pending_callbacks)})") + logger.debug(f"Bridge not started, queuing callback (queue size: {len(self._pending_callbacks)})") return logger.debug(f"Queueing callback to bridge: {callback}") @@ -192,15 +185,13 @@ def call_soon_threadsafe(self, callback: Callable[[], Any]) -> None: logger.debug("Callback successfully queued to bridge stream") except anyio.WouldBlock: logger.warning( - f"Bridge queue full ({self._buffer_size} callbacks), " - "callback dropped - consider increasing buffer size" + f"Bridge queue full ({self._buffer_size} callbacks), " "callback dropped - consider increasing buffer size" ) except Exception as e: logger.error(f"Failed to schedule callback: {e}", exc_info=True) async def stop(self) -> None: - """ - Stop the bridge and clean up resources. + """Stop the bridge and clean up resources. Properly closes the send and receive streams to avoid resource leak warnings during garbage collection. @@ -246,8 +237,7 @@ def is_started(self) -> bool: # Global convenience function def call_soon_threadsafe(callback: Callable[[], Any]) -> None: - """ - Convenience function for thread-safe anyio scheduling. + """Convenience function for thread-safe anyio scheduling. Equivalent to bridge.get_instance().call_soon_threadsafe(callback). diff --git a/src/hother/cancelable/utils/context_bridge.py b/src/hother/cancelable/utils/context_bridge.py index 9f71404..0a57230 100644 --- a/src/hother/cancelable/utils/context_bridge.py +++ b/src/hother/cancelable/utils/context_bridge.py @@ -1,5 +1,4 @@ -""" -Context bridge utilities for thread-safe context variable propagation. +"""Context bridge utilities for thread-safe context variable propagation. This module provides utilities to safely propagate context variables between async tasks and OS threads, solving the context variable thread safety issue. @@ -15,8 +14,7 @@ class ContextBridge: - """ - Thread-safe context variable bridge for async-to-thread communication. + """Thread-safe context variable bridge for async-to-thread communication. This class solves the issue where context variables don't propagate to OS threads created by ThreadPoolExecutor, breaking operation tracking @@ -25,8 +23,7 @@ class ContextBridge: @staticmethod def copy_context() -> dict[contextvars.ContextVar[Any], Any]: - """ - Copy current context variables to a dict for thread transport. + """Copy current context variables to a dict for thread transport. Returns: Dictionary mapping context variables to their current values @@ -36,8 +33,7 @@ def copy_context() -> dict[contextvars.ContextVar[Any], Any]: @staticmethod def restore_context(context_dict: dict[contextvars.ContextVar[Any], Any]) -> None: - """ - Restore context variables from a dictionary. + """Restore context variables from a dictionary. Args: context_dict: Dictionary mapping context variables to values @@ -49,8 +45,7 @@ def restore_context(context_dict: dict[contextvars.ContextVar[Any], Any]) -> Non async def run_in_thread_with_context( func: Callable[..., T], *args: Any, executor: ThreadPoolExecutor | None = None, **kwargs: Any ) -> T: - """ - Run function in thread with context variables propagated. + """Run function in thread with context variables propagated. This method safely copies context variables to the thread, runs the function, and returns the result. diff --git a/src/hother/cancelable/utils/decorators.py b/src/hother/cancelable/utils/decorators.py index 3f80095..585c10e 100644 --- a/src/hother/cancelable/utils/decorators.py +++ b/src/hother/cancelable/utils/decorators.py @@ -1,6 +1,4 @@ -""" -Decorators and convenience functions for async cancelation. -""" +"""Decorators and convenience functions for async cancelation.""" import inspect from collections.abc import Awaitable, Callable @@ -28,8 +26,7 @@ def cancelable( register_globally: bool = False, inject_param: str | None = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator to make async function cancelable. + """Decorator to make async function cancelable. The decorator automatically creates a Cancelable context and injects it via the specified parameter name (default: 'cancelable'). The decorated @@ -75,10 +72,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: "register_globally": register_globally, } - if timeout: - cancel = Cancelable.with_timeout(timeout, **cancel_kwargs) - else: - cancel = Cancelable(**cancel_kwargs) + cancel = Cancelable.with_timeout(timeout, **cancel_kwargs) if timeout else Cancelable(**cancel_kwargs) async with cancel: # Inject cancelable if requested @@ -91,7 +85,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: return await func(*args, **kwargs) # Unreachable - async with block always completes above - assert False, "Unreachable" # pragma: no cover + raise AssertionError("Unreachable") # pragma: no cover # Add attribute to access decorator parameters (dynamic attribute, no type annotation needed) wrapper._cancelable_params = { # type: ignore[attr-defined] @@ -112,8 +106,7 @@ async def with_timeout( operation_id: str | None = None, name: str | None = None, ) -> T: - """ - Run coroutine with timeout. + """Run coroutine with timeout. Args: timeout: Timeout duration @@ -140,12 +133,11 @@ async def with_timeout( return await coro # Unreachable - async with block always completes above - assert False, "Unreachable" # pragma: no cover + raise AssertionError("Unreachable") # pragma: no cover def with_current_operation() -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator that injects current operation into function. + """Decorator that injects current operation into function. The function must have a parameter named 'operation'. The decorator will inject the current operation context if available (may be None @@ -186,8 +178,7 @@ def cancelable_method( name: str | None = None, register_globally: bool = False, ) -> Callable[[Callable[..., Awaitable[R]]], Callable[..., Awaitable[R]]]: - """ - Decorator for async methods that should be cancelable. + """Decorator for async methods that should be cancelable. Similar to @cancelable but designed for class methods. The decorator automatically creates a Cancelable context and injects it as a @@ -228,10 +219,7 @@ async def wrapper(self: Any, *args: Any, **kwargs: Any) -> R: "register_globally": register_globally, } - if timeout: - cancel = Cancelable.with_timeout(timeout, **cancel_kwargs) - else: - cancel = Cancelable(**cancel_kwargs) + cancel = Cancelable.with_timeout(timeout, **cancel_kwargs) if timeout else Cancelable(**cancel_kwargs) async with cancel: # Inject cancelable @@ -242,7 +230,7 @@ async def wrapper(self: Any, *args: Any, **kwargs: Any) -> R: return await func(self, *args, **kwargs) # Unreachable - async with block always completes above - assert False, "Unreachable" # pragma: no cover + raise AssertionError("Unreachable") # pragma: no cover # Add attribute to access decorator parameters wrapper._cancelable_params = { # type: ignore[attr-defined] @@ -263,8 +251,7 @@ def cancelable_with_token( register_globally: bool = False, inject_param: str | None = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator for token-based cancelation. + """Decorator for token-based cancelation. Creates a cancelable operation that can be cancelled via the provided token. Useful for operations that need to be cancelled from other tasks or threads. @@ -313,7 +300,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: return await func(*args, **kwargs) # Unreachable - async with block always completes above - assert False, "Unreachable" # pragma: no cover + raise AssertionError("Unreachable") # pragma: no cover # Add attribute to access decorator parameters wrapper._cancelable_params = { # type: ignore[attr-defined] @@ -336,8 +323,7 @@ def cancelable_with_signal( register_globally: bool = False, inject_param: str | None = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator for signal-based cancelation. + """Decorator for signal-based cancelation. Creates a cancelable operation that responds to OS signals (Unix only). Useful for graceful shutdown of long-running services. @@ -369,7 +355,7 @@ async def long_running_service(cancelable: Cancelable = None): def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: @wraps(func) - async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: # pyright: ignore[reportReturnType] cancel = Cancelable.with_signal( *signals, operation_id=operation_id, name=name or func.__name__, register_globally=register_globally ) @@ -381,8 +367,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: if inject_param in sig.parameters: kwargs[inject_param] = cancel - result = await func(*args, **kwargs) - return result # type: ignore[possibly-unbound] + return await func(*args, **kwargs) # Add attribute to access decorator parameters wrapper._cancelable_params = { # type: ignore[attr-defined] @@ -407,8 +392,7 @@ def cancelable_with_condition( register_globally: bool = False, inject_param: str | None = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator for condition-based cancelation. + """Decorator for condition-based cancelation. Creates a cancelable operation that cancels when a condition becomes True. Useful for resource-based cancelation (disk full, memory limit, etc.). @@ -442,7 +426,7 @@ async def data_processing(cancelable: Cancelable = None): def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: @wraps(func) - async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: # pyright: ignore[reportReturnType] cancel = Cancelable.with_condition( condition, check_interval=check_interval, @@ -459,8 +443,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: if inject_param in sig.parameters: kwargs[inject_param] = cancel - result = await func(*args, **kwargs) - return result # type: ignore[possibly-unbound] + return await func(*args, **kwargs) # Add attribute to access decorator parameters wrapper._cancelable_params = { # type: ignore[attr-defined] @@ -485,8 +468,7 @@ def cancelable_combine( register_globally: bool = False, inject_param: str | None = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator for combining multiple cancelation sources. + """Decorator for combining multiple cancelation sources. Creates a cancelable operation that cancels when ANY of the provided cancelables trigger. Useful for operations with multiple cancelation conditions. @@ -521,7 +503,7 @@ async def resilient_operation(cancelable: Cancelable = None): def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: @wraps(func) - async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: # pyright: ignore[reportReturnType] # Combine all cancelables if not cancelables: raise ValueError("At least one cancelable must be provided to cancelable_combine") @@ -530,10 +512,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: # Note: We use the provided cancelables as-is since they may have # internal state and sources already configured first = cancelables[0] - if len(cancelables) > 1: - cancel = first.combine(*cancelables[1:]) - else: - cancel = first + cancel = first.combine(*cancelables[1:]) if len(cancelables) > 1 else first # Determine the effective name # Always prefer explicit name, then function name (for decorator consistency) @@ -557,8 +536,7 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: if inject_param in sig.parameters: kwargs[inject_param] = final_cancel - result = await func(*args, **kwargs) - return result # type: ignore[possibly-unbound] + return await func(*args, **kwargs) return wrapper @@ -570,8 +548,7 @@ def with_cancelable( inject: bool = False, inject_param: str = "cancelable", ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: - """ - Decorator that wraps a function with an existing Cancelable instance. + """Decorator that wraps a function with an existing Cancelable instance. This decorator allows you to use a pre-configured Cancelable context with your async function. Unlike @cancelable which creates a new context, diff --git a/src/hother/cancelable/utils/logging.py b/src/hother/cancelable/utils/logging.py index 75a6fa5..a09e470 100644 --- a/src/hother/cancelable/utils/logging.py +++ b/src/hother/cancelable/utils/logging.py @@ -1,5 +1,4 @@ -""" -Logging utilities for the cancelable library. +"""Logging utilities for the cancelable library. Following Python library best practices, this module provides logger access but does not configure logging. Applications using cancelable should configure @@ -7,16 +6,13 @@ """ import logging -from typing import Optional - # Add a NullHandler to prevent "No handler found" warnings logging.getLogger("hother.cancelable").addHandler(logging.NullHandler()) -def get_logger(name: Optional[str] = None) -> logging.Logger: - """ - Get a standard library logger instance. +def get_logger(name: str | None = None) -> logging.Logger: + """Get a standard library logger instance. Args: name: Logger name. If None, uses the calling module's name @@ -47,9 +43,6 @@ def get_logger(name: Optional[str] = None) -> logging.Logger: import inspect frame = inspect.currentframe() - if frame and frame.f_back: - name = frame.f_back.f_globals.get("__name__", "cancelable") - else: - name = "cancelable" + name = frame.f_back.f_globals.get("__name__", "cancelable") if frame and frame.f_back else "cancelable" return logging.getLogger(name) diff --git a/src/hother/cancelable/utils/streams.py b/src/hother/cancelable/utils/streams.py index 1fc8984..4cde13f 100644 --- a/src/hother/cancelable/utils/streams.py +++ b/src/hother/cancelable/utils/streams.py @@ -1,6 +1,4 @@ -""" -Stream utilities for async cancelation. -""" +"""Stream utilities for async cancelation.""" from collections.abc import AsyncIterator, Callable from datetime import timedelta @@ -12,12 +10,15 @@ from hother.cancelable.utils.logging import get_logger if TYPE_CHECKING: - from ..core.token import CancelationToken + from hother.cancelable.core.token import CancelationToken logger = get_logger(__name__) T = TypeVar("T") +# Maximum items to keep in buffer to prevent unbounded memory growth +_MAX_BUFFER_SIZE = 1000 + async def cancelable_stream( stream: AsyncIterator[T], @@ -29,8 +30,7 @@ async def cancelable_stream( operation_id: str | None = None, name: str | None = None, ) -> AsyncIterator[T]: - """ - Make any async iterator cancelable with various options. + """Make any async iterator cancelable with various options. Args: stream: Async iterator to wrap @@ -99,8 +99,7 @@ async def report_wrapper(op_id: str, msg: Any, meta: dict[str, Any] | None): class CancelableAsyncIterator(AsyncIterator[T]): - """ - Wrapper class that makes any async iterator cancelable. + """Wrapper class that makes any async iterator cancelable. This provides a class-based alternative to the cancelable_stream function. """ @@ -112,8 +111,7 @@ def __init__( report_interval: int | None = None, buffer_partial: bool = False, ): - """ - Initialize cancelable iterator. + """Initialize cancelable iterator. Args: iterator: Async iterator to wrap @@ -147,8 +145,8 @@ async def __anext__(self) -> T: self._count += 1 if self._buffer is not None: self._buffer.append(item) - if len(self._buffer) > 1000: - self._buffer = self._buffer[-1000:] + if len(self._buffer) > _MAX_BUFFER_SIZE: + self._buffer = self._buffer[-_MAX_BUFFER_SIZE:] # Report progress if needed if self._report_interval and self._count % self._report_interval == 0: @@ -200,8 +198,7 @@ async def chunked_cancelable_stream( chunk_size: int, cancelable: Cancelable, ) -> AsyncIterator[list[T]]: - """ - Process stream in chunks with cancelation support. + """Process stream in chunks with cancelation support. Args: stream: Source async iterator diff --git a/src/hother/cancelable/utils/testing.py b/src/hother/cancelable/utils/testing.py index 204c2b3..d779624 100644 --- a/src/hother/cancelable/utils/testing.py +++ b/src/hother/cancelable/utils/testing.py @@ -1,6 +1,4 @@ -""" -Testing utilities for async cancelation. -""" +"""Testing utilities for async cancelation.""" from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager @@ -21,8 +19,7 @@ class MockCancelationToken(CancelationToken): - """ - Mock cancelation token for testing. + """Mock cancelation token for testing. Provides additional testing capabilities like scheduled cancelation. """ @@ -52,8 +49,7 @@ async def schedule_cancel( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> None: - """ - Schedule cancelation after a delay. + """Schedule cancelation after a delay. Args: delay: Delay in seconds before cancelation @@ -75,9 +71,7 @@ def get_cancel_count(self) -> int: class OperationRecorder: - """ - Records operation events for testing assertions. - """ + """Records operation events for testing assertions.""" def __init__(self): self.events: list[dict[str, Any]] = [] @@ -102,8 +96,7 @@ async def record_event( ) def attach_to_cancellable(self, cancelable: Cancelable) -> Cancelable: - """ - Attach recorder to a cancelable to track its events. + """Attach recorder to a cancelable to track its events. Args: cancelable: Cancelable to track @@ -146,8 +139,7 @@ def assert_event_occurred( event_type: str, timeout: float = 1.0, ) -> dict[str, Any]: - """ - Assert that an event occurred (synchronous check). + """Assert that an event occurred (synchronous check). Args: operation_id: Operation ID to check @@ -172,8 +164,7 @@ def assert_final_status( operation_id: str, expected_status: OperationStatus, ) -> None: - """ - Assert the final status of an operation. + """Assert the final status of an operation. Args: operation_id: Operation ID to check @@ -195,8 +186,7 @@ async def create_slow_stream( delay: float = 0.1, cancelable: Cancelable | None = None, ) -> AsyncIterator[T]: - """ - Create a slow async stream for testing cancelation. + """Create a slow async stream for testing cancelation. Args: items: Items to yield @@ -221,8 +211,7 @@ async def run_with_timeout_test( expected_timeout: float, tolerance: float = 0.1, ) -> None: - """ - Test that a coroutine times out within expected duration. + """Test that a coroutine times out within expected duration. Args: coro: Coroutine to run @@ -242,9 +231,7 @@ async def run_with_timeout_test( duration = anyio.current_time() - start_time if abs(duration - expected_timeout) > tolerance: - raise AssertionError( - f"Timeout occurred after {duration:.2f}s, expected {expected_timeout:.2f}s ± {tolerance:.2f}s" - ) + raise AssertionError(f"Timeout occurred after {duration:.2f}s, expected {expected_timeout:.2f}s ± {tolerance:.2f}s") @asynccontextmanager @@ -252,8 +239,7 @@ async def assert_cancelation_within( min_time: float, max_time: float, ) -> AsyncIterator[MockCancelationToken]: - """ - Context manager that asserts cancelation occurs within a time range. + """Context manager that asserts cancelation occurs within a time range. Args: min_time: Minimum time before cancelation @@ -282,9 +268,7 @@ async def assert_cancelation_within( class CancelationScenario: - """ - Test scenario builder for cancelation testing. - """ + """Test scenario builder for cancelation testing.""" def __init__(self, name: str): self.name = name @@ -345,8 +329,7 @@ async def run( *args: Any, **kwargs: Any, ) -> OperationRecorder: - """ - Run the scenario. + """Run the scenario. Args: operation: Async callable to test diff --git a/src/hother/cancelable/utils/threading_bridge.py b/src/hother/cancelable/utils/threading_bridge.py index 346aee8..7ccee2e 100644 --- a/src/hother/cancelable/utils/threading_bridge.py +++ b/src/hother/cancelable/utils/threading_bridge.py @@ -1,5 +1,4 @@ -""" -Thread-safe wrapper for OperationRegistry. +"""Thread-safe wrapper for OperationRegistry. Provides synchronous API for accessing the registry from threads. """ @@ -8,7 +7,7 @@ import threading from datetime import datetime -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from hother.cancelable.core.models import CancelationReason, OperationContext, OperationStatus from hother.cancelable.core.registry import OperationRegistry @@ -18,8 +17,7 @@ class ThreadSafeRegistry: - """ - Thread-safe wrapper for OperationRegistry. + """Thread-safe wrapper for OperationRegistry. Provides synchronous API for accessing the registry from threads. All methods are thread-safe and can be called from any thread. @@ -55,9 +53,8 @@ def __init__(self): """Initialize thread-safe registry wrapper.""" self._registry = OperationRegistry.get_instance() - def get_operation(self, operation_id: str) -> Optional["Cancelable"]: - """ - Get operation by ID. + def get_operation(self, operation_id: str) -> Cancelable | None: + """Get operation by ID. Args: operation_id: Operation ID to look up @@ -73,8 +70,7 @@ def list_operations( parent_id: str | None = None, name_pattern: str | None = None, ) -> list[OperationContext]: - """ - List operations with optional filtering. + """List operations with optional filtering. Args: status: Filter by operation status @@ -87,8 +83,7 @@ def list_operations( return self._registry.list_operations_sync(status, parent_id, name_pattern) def get_statistics(self) -> dict[str, Any]: - """ - Get registry statistics. + """Get registry statistics. Returns: Dictionary with operation statistics containing: @@ -107,8 +102,7 @@ def get_history( status: OperationStatus | None = None, since: datetime | None = None, ) -> list[OperationContext]: - """ - Get operation history. + """Get operation history. Args: limit: Maximum number of operations to return @@ -126,8 +120,7 @@ def cancel_operation( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> None: - """ - Cancel a specific operation. + """Cancel a specific operation. Schedules cancelation to be executed asynchronously and returns immediately. @@ -148,8 +141,7 @@ def cancel_all( reason: CancelationReason = CancelationReason.MANUAL, message: str | None = None, ) -> None: - """ - Cancel all operations with optional status filter. + """Cancel all operations with optional status filter. Schedules cancelation to be executed asynchronously and returns immediately. @@ -166,13 +158,12 @@ def cancel_all( # Singleton pattern (optional - users can create instances directly or use singleton) - _instance: "ThreadSafeRegistry | None" = None + _instance: ThreadSafeRegistry | None = None _lock = threading.Lock() @classmethod - def get_instance(cls) -> "ThreadSafeRegistry": - """ - Get singleton instance of thread-safe registry. + def get_instance(cls) -> ThreadSafeRegistry: + """Get singleton instance of thread-safe registry. Thread-safe lazy initialization. diff --git a/tests/conftest.py b/tests/conftest.py index c3f82a7..03aeb08 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import asyncio import time from contextlib import asynccontextmanager -from typing import Optional import pytest @@ -21,13 +20,13 @@ def anyio_backend(request): async def assert_cancelled_within(timeout: float, tolerance: float = 0.5): """ Context manager to assert that code is cancelled within a specific timeframe. - + Args: timeout: Expected timeout in seconds tolerance: Acceptable deviation from expected timeout (default 0.5s) """ start_time = time.time() - + try: yield except asyncio.CancelledError: @@ -44,15 +43,15 @@ async def assert_cancelled_within(timeout: float, tolerance: float = 0.5): async def clean_registry(): """Fixture that provides a clean OperationRegistry and cleans up after test.""" from hother.cancelable.core.registry import OperationRegistry - + # Clear the singleton instance before test OperationRegistry._instance = None - + # Get a fresh registry registry = OperationRegistry.get_instance() - + yield registry - + # Cleanup after test await registry.clear_all() - OperationRegistry._instance = None \ No newline at end of file + OperationRegistry._instance = None diff --git a/tests/integration/test_base_integration.py b/tests/integration/test_base_integration.py index 0bb9eac..a649b69 100644 --- a/tests/integration/test_base_integration.py +++ b/tests/integration/test_base_integration.py @@ -12,6 +12,7 @@ # Check for optional dependencies at module level try: import fastapi + _has_fastapi = True except ImportError: _has_fastapi = False @@ -121,4 +122,3 @@ async def test_cancellable_websocket(self): assert text == "test message" assert json_data == {"key": "value"} - diff --git a/tests/integration/test_fastapi.py b/tests/integration/test_fastapi.py index b54c611..1606cf5 100644 --- a/tests/integration/test_fastapi.py +++ b/tests/integration/test_fastapi.py @@ -2,22 +2,32 @@ Integration tests for FastAPI integration. """ -from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock import anyio import pytest -from fastapi import HTTPException, Request + +# Check for optional fastapi dependency +try: + from fastapi import HTTPException, Request + + from hother.cancelable.integrations.fastapi import ( + CancelableWebSocket, + RequestCancelationMiddleware, + cancelable_dependency, + cancelable_streaming_response, + get_request_token, + with_cancelation, + ) + + _has_fastapi = True +except ImportError: + _has_fastapi = False from hother.cancelable import Cancelable, CancelationReason -from hother.cancelable.integrations.fastapi import ( - CancelableWebSocket, - RequestCancelationMiddleware, - cancelable_dependency, - cancelable_streaming_response, - get_request_token, - with_cancelation, -) + +# Skip all tests in this module if fastapi is not available +pytestmark = pytest.mark.skipif(not _has_fastapi, reason="fastapi not installed") class TestRequestCancelationMiddleware: @@ -77,7 +87,7 @@ async def test_middleware_non_http_request(self): @pytest.mark.anyio async def test_middleware_client_disconnect(self): """Test middleware handles client disconnect.""" - app = AsyncMock() + AsyncMock() # Make app wait so disconnect can be detected async def slow_app(scope, receive, send): @@ -179,6 +189,7 @@ class TestWithCancelation: @pytest.mark.anyio async def test_decorator_success(self): """Test decorator with successful execution.""" + @with_cancelation(timeout=1.0) async def test_endpoint(request: Request): return {"status": "ok"} @@ -197,6 +208,7 @@ async def test_endpoint(request: Request): @pytest.mark.anyio async def test_decorator_timeout(self): """Test decorator with timeout cancelation.""" + @with_cancelation(timeout=0.05, raise_on_cancel=True) async def test_endpoint(request: Request): await anyio.sleep(1.0) # Will timeout @@ -279,6 +291,7 @@ async def test_endpoint(request: Request): @pytest.mark.anyio async def test_decorator_no_raise(self): """Test decorator with raise_on_cancel=False.""" + @with_cancelation(timeout=0.05, raise_on_cancel=False) async def test_endpoint(request: Request): await anyio.sleep(1.0) @@ -302,6 +315,7 @@ class TestCancelableStreamingResponse: @pytest.mark.anyio async def test_streaming_response_success(self): """Test successful streaming response.""" + async def generate(): for i in range(3): await anyio.sleep(0.01) @@ -309,9 +323,7 @@ async def generate(): cancelable = Cancelable(name="test_stream") - response = await cancelable_streaming_response( - generate(), cancelable, media_type="text/plain" - ) + response = await cancelable_streaming_response(generate(), cancelable, media_type="text/plain") # Collect streamed data chunks = [] @@ -325,6 +337,7 @@ async def generate(): @pytest.mark.anyio async def test_streaming_response_cancelled(self): """Test streaming response with cancelation.""" + async def generate(): for i in range(100): await anyio.sleep(0.01) @@ -332,9 +345,7 @@ async def generate(): cancelable = Cancelable.with_timeout(0.05, name="test_stream") - response = await cancelable_streaming_response( - generate(), cancelable, media_type="text/plain" - ) + response = await cancelable_streaming_response(generate(), cancelable, media_type="text/plain") chunks = [] try: @@ -350,6 +361,7 @@ async def generate(): @pytest.mark.anyio async def test_streaming_response_sse_cancelled(self): """Test SSE streaming with cancelation message.""" + async def generate(): for i in range(100): await anyio.sleep(0.01) @@ -357,9 +369,7 @@ async def generate(): cancelable = Cancelable.with_timeout(0.05, name="test_sse") - response = await cancelable_streaming_response( - generate(), cancelable, media_type="text/event-stream" - ) + response = await cancelable_streaming_response(generate(), cancelable, media_type="text/event-stream") chunks = [] try: @@ -371,7 +381,7 @@ async def generate(): # Last chunk should be cancelation message for SSE if chunks: - last_chunk = chunks[-1].decode() if isinstance(chunks[-1], bytes) else chunks[-1] + chunks[-1].decode() if isinstance(chunks[-1], bytes) else chunks[-1] # May or may not have cancelation message depending on timing assert len(chunks) > 0 diff --git a/tests/integration/test_real_world.py b/tests/integration/test_real_world.py index 34ed432..72b6172 100644 --- a/tests/integration/test_real_world.py +++ b/tests/integration/test_real_world.py @@ -26,7 +26,9 @@ async def simulate_download(url: str, cancelable: Cancelable): downloaded_chunks.append(chunk) if (i + 1) % 5 == 0: - await cancelable.report_progress(f"Downloaded {i + 1}/{total_chunks} chunks", {"progress": (i + 1) / total_chunks * 100}) + await cancelable.report_progress( + f"Downloaded {i + 1}/{total_chunks} chunks", {"progress": (i + 1) / total_chunks * 100} + ) # Check for cancelation await cancelable._token.check_async() @@ -128,8 +130,8 @@ async def cancel_after_delay(): assert not db.in_transaction @pytest.mark.anyio - async def test_multi_stage_pipeline(self): - """Test multi-stage data processing pipeline.""" + async def test_multi_stage_pipeline_success(self): + """Test multi-stage data processing pipeline without cancelation.""" async def stage1_fetch(cancelable: Cancelable) -> list: """Fetch data from source.""" @@ -162,7 +164,7 @@ async def stage3_save(data: list, cancelable: Cancelable) -> int: await cancelable.report_progress("Saving data") saved_count = 0 - for item in data: + for _item in data: await anyio.sleep(0.01) # Simulate save saved_count += 1 @@ -186,13 +188,58 @@ async def stage3_save(data: list, cancelable: Cancelable) -> int: saved = await stage3_save(processed_data, cancel) assert saved == 10 + @pytest.mark.anyio + async def test_multi_stage_pipeline_cancelation(self): + """Test pipeline cancelation at different stages.""" + + async def stage1_fetch(cancelable: Cancelable) -> list: + """Fetch data from source.""" + await cancelable.report_progress("Fetching data") + data = [] + + for i in range(10): + await anyio.sleep(0.05) + data.append({"id": i, "value": i * 10}) + await cancelable._token.check_async() + + await cancelable.report_progress(f"Fetched {len(data)} items") + return data + + async def stage2_process(data: list, cancelable: Cancelable) -> list: + """Process the data.""" + await cancelable.report_progress("Processing data") + processed = [] + + for item in data: + await anyio.sleep(0.02) + processed.append({**item, "processed": True, "score": item["value"] * 2}) + await cancelable._token.check_async() + + await cancelable.report_progress(f"Processed {len(processed)} items") + return processed + + async def stage3_save(data: list, cancelable: Cancelable) -> int: + """Save the processed data.""" + await cancelable.report_progress("Saving data") + saved_count = 0 + + for _item in data: + await anyio.sleep(0.01) + # Simulate save + saved_count += 1 + await cancelable._token.check_async() + + await cancelable.report_progress(f"Saved {saved_count} items") + return saved_count + # Test pipeline cancelation at different stages for cancel_after in [0.3, 0.8, 1.2]: token = CancelationToken() - async def cancel_pipeline(): - await anyio.sleep(cancel_after) - await token.cancel() + # Bind loop variables to avoid B023 closure issue + async def cancel_pipeline(delay=cancel_after, tok=token): + await anyio.sleep(delay) + await tok.cancel() stage_reached = 0 @@ -207,7 +254,7 @@ async def cancel_pipeline(): processed_data = await stage2_process(raw_data, cancel) stage_reached = 2 - saved = await stage3_save(processed_data, cancel) + await stage3_save(processed_data, cancel) stage_reached = 3 except anyio.get_cancelled_exc_class(): diff --git a/tests/integration/test_shielding.py b/tests/integration/test_shielding.py index 7a2b59c..d7f8cf5 100644 --- a/tests/integration/test_shielding.py +++ b/tests/integration/test_shielding.py @@ -64,11 +64,10 @@ async def test_nested_shields(self): @pytest.mark.anyio async def test_shield_status(self): """Test shield status tracking.""" - async with Cancelable() as parent: - async with parent.shield() as shielded: - assert shielded.context.status == OperationStatus.SHIELDED - assert shielded.context.metadata.get("shielded") is True - assert shielded.context.parent_id == parent.context.id + async with Cancelable() as parent, parent.shield() as shielded: + assert shielded.context.status == OperationStatus.SHIELDED + assert shielded.context.metadata.get("shielded") is True + assert shielded.context.parent_id == parent.context.id @pytest.mark.anyio async def test_shield_with_manual_cancelation(self): diff --git a/tests/performance/test_overhead.py b/tests/performance/test_overhead.py index 497f15d..abf5955 100644 --- a/tests/performance/test_overhead.py +++ b/tests/performance/test_overhead.py @@ -175,7 +175,9 @@ async def async_callback(ctx): with_callback_times = [] for _ in range(iterations): start = time.perf_counter() - async with Cancelable().on_progress(progress_callback).on_start(async_callback).on_complete(async_callback) as cancel: + async with ( + Cancelable().on_progress(progress_callback).on_start(async_callback).on_complete(async_callback) as cancel + ): await cancel.report_progress("test") with_callback_times.append(time.perf_counter() - start) diff --git a/tests/performance/test_performance.py b/tests/performance/test_performance.py index 70af645..f71bc75 100644 --- a/tests/performance/test_performance.py +++ b/tests/performance/test_performance.py @@ -300,7 +300,9 @@ async def async_callback(ctx): with_callback_times = [] for _ in range(iterations): start = time.perf_counter() - async with Cancelable().on_progress(progress_callback).on_start(async_callback).on_complete(async_callback) as cancel: + async with ( + Cancelable().on_progress(progress_callback).on_start(async_callback).on_complete(async_callback) as cancel + ): await cancel.report_progress("test") with_callback_times.append(time.perf_counter() - start) @@ -365,7 +367,7 @@ async def large_stream(): async def process_until_count(): nonlocal processed async with Cancelable() as cancel: - async for item in cancel.stream(large_stream()): + async for _item in cancel.stream(large_stream()): processed += 1 if processed >= cancel_at: await cancel.cancel() diff --git a/tests/performance/test_scalability.py b/tests/performance/test_scalability.py index c0075bb..17048c8 100644 --- a/tests/performance/test_scalability.py +++ b/tests/performance/test_scalability.py @@ -94,7 +94,7 @@ async def large_stream(): async def process_until_count(): nonlocal processed async with Cancelable() as cancel: - async for item in cancel.stream(large_stream()): + async for _item in cancel.stream(large_stream()): processed += 1 if processed >= cancel_at: await cancel.cancel() diff --git a/tests/streaming/test_simulator.py b/tests/streaming/test_simulator.py index 2c42b17..ec40916 100644 --- a/tests/streaming/test_simulator.py +++ b/tests/streaming/test_simulator.py @@ -23,13 +23,7 @@ def test_default_config(self): def test_custom_config(self): """Test custom configuration values.""" - config = StreamConfig( - chunk_size=50, - stall_probability=0.2, - burst_probability=0.3, - stall_duration=1.0, - burst_size=5 - ) + config = StreamConfig(chunk_size=50, stall_probability=0.2, burst_probability=0.3, stall_duration=1.0, burst_size=5) assert config.chunk_size == 50 assert config.stall_probability == 0.2 assert config.burst_probability == 0.3 @@ -67,11 +61,7 @@ def test_random_chunk_size_fixed(self): def test_random_chunk_size_variable(self): """Test variable chunk size.""" - config = StreamConfig( - chunk_size=100, - variable_chunk_size=True, - chunk_size_range=(50, 150) - ) + config = StreamConfig(chunk_size=100, variable_chunk_size=True, chunk_size_range=(50, 150)) chunk_size = get_random_chunk_size(config) # Should be between 50 and 150 @@ -79,11 +69,7 @@ def test_random_chunk_size_variable(self): def test_random_chunk_size_variation(self): """Test that random chunk size varies.""" - config = StreamConfig( - chunk_size=100, - variable_chunk_size=True, - chunk_size_range=(80, 120) - ) + config = StreamConfig(chunk_size=100, variable_chunk_size=True, chunk_size_range=(80, 120)) sizes = [get_random_chunk_size(config) for _ in range(10)] # Should have some variation (not all the same) @@ -104,7 +90,7 @@ def test_random_chunk_size_with_weights(self): chunk_size=10, variable_chunk_size=True, chunk_size_range=(5, 10), - chunk_size_weights=[10, 5, 3, 1, 0, 0] # Heavy bias toward smaller sizes + chunk_size_weights=[10, 5, 3, 1, 0, 0], # Heavy bias toward smaller sizes ) # Generate many samples to test the weighted selection @@ -121,7 +107,7 @@ def test_random_chunk_size_with_partial_weights(self): chunk_size=10, variable_chunk_size=True, chunk_size_range=(5, 15), # 11 possible values (5-15) - chunk_size_weights=[10, 5] # Only 2 weights, rest should be filled with 1 + chunk_size_weights=[10, 5], # Only 2 weights, rest should be filled with 1 ) sizes = [get_random_chunk_size(config) for _ in range(50)] @@ -165,7 +151,7 @@ async def test_simulation_with_stalls(self): config = StreamConfig( chunk_size=4, stall_probability=1.0, # Always stall - stall_duration=0.1 + stall_duration=0.1, ) events = [] @@ -188,7 +174,7 @@ async def test_simulation_with_bursts(self): config = StreamConfig( chunk_size=3, burst_probability=1.0, # Always burst - burst_size=2 + burst_size=2, ) events = [] @@ -321,7 +307,7 @@ async def test_simulation_with_stall_progress_reporting(self): config = StreamConfig( chunk_size=5, stall_probability=1.0, # Force stalls - stall_duration=0.01 # Very short for testing + stall_duration=0.01, # Very short for testing ) progress_reports = [] @@ -352,7 +338,7 @@ async def test_burst_ends_mid_text_boundary(self): chunk_size=2, burst_probability=1.0, # Force burst burst_size=10, # Large enough to exceed text length - stall_probability=0.0 # No stalls for predictable test + stall_probability=0.0, # No stalls for predictable test ) events = [] @@ -371,4 +357,4 @@ async def test_burst_ends_mid_text_boundary(self): # Should have a complete event complete_events = [e for e in events if e.get("type") == "complete"] - assert len(complete_events) == 1 \ No newline at end of file + assert len(complete_events) == 1 diff --git a/tests/test_sources.py b/tests/test_sources.py index 024db9a..ea947cd 100644 --- a/tests/test_sources.py +++ b/tests/test_sources.py @@ -2,7 +2,6 @@ Tests for cancelation sources. """ -import signal from datetime import timedelta import anyio @@ -13,7 +12,6 @@ from hother.cancelable.sources.base import CancelationSource from hother.cancelable.sources.composite import AllOfSource, AnyOfSource, CompositeSource from hother.cancelable.sources.condition import ConditionSource -from hother.cancelable.sources.signal import SignalSource from hother.cancelable.sources.timeout import TimeoutSource @@ -29,7 +27,6 @@ async def test_timeout_basic(self): assert not source.triggered # Test with actual cancelable - from hother.cancelable import Cancelable start = anyio.current_time() with pytest.raises(anyio.get_cancelled_exc_class()): @@ -60,7 +57,6 @@ async def test_timeout_with_scope(self): source = TimeoutSource(0.1) # Create a cancelable that uses this source - from hother.cancelable import Cancelable cancelable = Cancelable() cancelable._sources.append(source) @@ -86,7 +82,6 @@ def condition(): return check_count >= 3 # Test with actual cancelable - from hother.cancelable import Cancelable start = anyio.current_time() with pytest.raises(anyio.get_cancelled_exc_class()): @@ -109,7 +104,6 @@ async def async_condition(): return check_count >= 2 # Test with actual cancelable - from hother.cancelable import Cancelable with pytest.raises(anyio.get_cancelled_exc_class()): async with Cancelable.with_condition(async_condition, check_interval=0.1): @@ -130,7 +124,6 @@ def faulty_condition(): return call_count >= 4 # Test with actual cancelable - from hother.cancelable import Cancelable with pytest.raises(anyio.get_cancelled_exc_class()): async with Cancelable.with_condition(faulty_condition, check_interval=0.05): @@ -170,7 +163,6 @@ class TestCompositeSource: @pytest.mark.anyio async def test_composite_any_of(self): """Test composite source with ANY logic.""" - from hother.cancelable import Cancelable # Create two cancelables with different timeouts cancel1 = Cancelable.with_timeout(0.2) @@ -202,7 +194,6 @@ async def test_any_of_alias(self): @pytest.mark.anyio async def test_composite_multiple_types(self): """Test combining different source types.""" - from hother.cancelable import Cancelable check_count = 0 @@ -267,6 +258,7 @@ async def test_composite_reason_propagation(self): """Test that composite propagates reason from triggered source.""" # Create sources with different reasons check_count = 0 + def condition(): nonlocal check_count check_count += 1 @@ -298,6 +290,7 @@ async def test_composite_custom_name(self): @pytest.mark.anyio async def test_composite_stop_monitoring_with_errors(self): """Test composite handles errors when stopping sources.""" + # Create a mock source that raises error on stop class FailingSource(CancelationSource): def __init__(self): @@ -323,6 +316,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_composite_monitor_source_exception(self): """Test composite handles exception in source monitoring.""" + # Create a source that raises during start_monitoring class ExceptionSource(CancelationSource): def __init__(self): @@ -388,6 +382,7 @@ def make_condition(idx, threshold): def condition(): counts[idx] += 1 return counts[idx] >= threshold + return condition source1 = ConditionSource(make_condition(0, 2), check_interval=0.05) @@ -519,6 +514,7 @@ def make_condition(idx, threshold): def condition(): counts[idx] += 1 return counts[idx] >= threshold + return condition # Different thresholds mean they trigger at different times @@ -569,6 +565,7 @@ async def test_all_of_stop_monitoring(self): @pytest.mark.anyio async def test_all_of_monitor_source_exception(self): """Test AllOfSource handles exception in source monitoring.""" + # Create a source that raises during start_monitoring class ExceptionSource(CancelationSource): def __init__(self): @@ -593,7 +590,3 @@ async def stop_monitoring(self): await anyio.sleep(0.1) await all_of.stop_monitoring() - - - - diff --git a/tests/unit/test_anyio_bridge.py b/tests/unit/test_anyio_bridge.py index f70536e..69c0800 100644 --- a/tests/unit/test_anyio_bridge.py +++ b/tests/unit/test_anyio_bridge.py @@ -3,6 +3,7 @@ """ import asyncio +import contextlib import threading from unittest.mock import AsyncMock, MagicMock, patch @@ -67,9 +68,10 @@ async def test_bridge_start_processes_pending(self): bridge.call_soon_threadsafe(lambda: called.append("pending2")) # Mock the streams and worker to avoid infinite loop - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock) as mock_worker: - + with ( + patch("anyio.create_memory_object_stream") as mock_stream, + patch.object(bridge, "_worker", new_callable=AsyncMock), + ): mock_send, mock_receive = MagicMock(), MagicMock() mock_stream.return_value = (mock_send, mock_receive) @@ -89,10 +91,8 @@ async def test_bridge_start_processes_pending(self): # Cancel the start task to avoid hanging start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_call_soon_threadsafe_after_start(self): @@ -100,9 +100,7 @@ async def test_call_soon_threadsafe_after_start(self): bridge = AnyioBridge() # Mock the streams - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = MagicMock(), MagicMock() mock_stream.return_value = (mock_send, mock_receive) @@ -120,19 +118,15 @@ def callback(): mock_send.send_nowait.assert_called_with(callback) start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_bridge_queue_full_handling(self): """Test behavior when bridge queue is full.""" bridge = AnyioBridge() - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = MagicMock(), MagicMock() mock_send.send_nowait.side_effect = anyio.WouldBlock() mock_stream.return_value = (mock_send, mock_receive) @@ -149,10 +143,8 @@ def callback(): bridge.call_soon_threadsafe(callback) start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_convenience_function(self): @@ -161,7 +153,7 @@ async def test_convenience_function(self): AnyioBridge._instance = None # Mock the instance - with patch.object(AnyioBridge, 'get_instance') as mock_get_instance: + with patch.object(AnyioBridge, "get_instance") as mock_get_instance: mock_bridge = MagicMock() mock_get_instance.return_value = mock_bridge @@ -192,9 +184,7 @@ async def test_bridge_start_already_started_warning(self): """Test warning when calling start() on already started bridge.""" bridge = AnyioBridge() - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = MagicMock(), MagicMock() mock_stream.return_value = (mock_send, mock_receive) @@ -214,14 +204,10 @@ async def test_bridge_start_already_started_warning(self): # Cleanup start_task1.cancel() start_task2.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task1 - except asyncio.CancelledError: - pass - try: + with contextlib.suppress(asyncio.CancelledError): await start_task2 - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_bridge_pending_callback_queue_full_on_startup(self): @@ -229,15 +215,19 @@ async def test_bridge_pending_callback_queue_full_on_startup(self): bridge = AnyioBridge(buffer_size=1) # Queue multiple callbacks before starting - for i in range(5): + for _i in range(5): bridge.call_soon_threadsafe(lambda: None) - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = MagicMock(), MagicMock() # Make send_nowait raise WouldBlock after first callback - mock_send.send_nowait.side_effect = [None, anyio.WouldBlock(), anyio.WouldBlock(), anyio.WouldBlock(), anyio.WouldBlock()] + mock_send.send_nowait.side_effect = [ + None, + anyio.WouldBlock(), + anyio.WouldBlock(), + anyio.WouldBlock(), + anyio.WouldBlock(), + ] mock_stream.return_value = (mock_send, mock_receive) # Start bridge - should handle WouldBlock gracefully @@ -249,19 +239,15 @@ async def test_bridge_pending_callback_queue_full_on_startup(self): assert mock_send.send_nowait.call_count >= 2 start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_call_soon_threadsafe_send_exception(self): """Test exception handling in call_soon_threadsafe.""" bridge = AnyioBridge() - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = MagicMock(), MagicMock() # Make send_nowait raise a different exception (not WouldBlock) mock_send.send_nowait.side_effect = RuntimeError("Simulated send error") @@ -279,10 +265,8 @@ def callback(): bridge.call_soon_threadsafe(callback) start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_worker_end_of_stream_handling(self): @@ -356,7 +340,7 @@ def slow_init(self, buffer_size=1000): def thread1_func(): """First thread - creates the instance with delay.""" - with patch.object(AnyioBridge, '__init__', slow_init): + with patch.object(AnyioBridge, "__init__", slow_init): instance = AnyioBridge.get_instance() instances.append(instance) @@ -394,9 +378,7 @@ async def test_stop_method_closes_streams(self): bridge = AnyioBridge() # Create and start bridge - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = AsyncMock(), AsyncMock() mock_stream.return_value = (mock_send, mock_receive) @@ -421,19 +403,15 @@ async def test_stop_method_closes_streams(self): assert bridge._receive_stream is None start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_stop_method_handles_stream_close_errors(self): """Test that stop() handles exceptions during stream closing.""" bridge = AnyioBridge() - with patch('anyio.create_memory_object_stream') as mock_stream, \ - patch.object(bridge, '_worker', new_callable=AsyncMock): - + with patch("anyio.create_memory_object_stream") as mock_stream, patch.object(bridge, "_worker", new_callable=AsyncMock): mock_send, mock_receive = AsyncMock(), AsyncMock() # Make streams raise exceptions on aclose mock_send.aclose.side_effect = RuntimeError("Send stream close error") @@ -453,10 +431,8 @@ async def test_stop_method_handles_stream_close_errors(self): assert bridge._receive_stream is None start_task.cancel() - try: + with contextlib.suppress(asyncio.CancelledError): await start_task - except asyncio.CancelledError: - pass @pytest.mark.anyio async def test_stop_method_when_streams_dont_exist(self): @@ -469,4 +445,4 @@ async def test_stop_method_when_streams_dont_exist(self): # Should handle gracefully assert not bridge.is_started assert bridge._send_stream is None - assert bridge._receive_stream is None \ No newline at end of file + assert bridge._receive_stream is None diff --git a/tests/unit/test_cancelable.py b/tests/unit/test_cancelable.py index 2b57bd6..3c0da74 100644 --- a/tests/unit/test_cancelable.py +++ b/tests/unit/test_cancelable.py @@ -8,7 +8,14 @@ import anyio import pytest -from hother.cancelable import Cancelable, CancelationReason, CancelationToken, OperationContext, OperationStatus, current_operation +from hother.cancelable import ( + Cancelable, + CancelationReason, + CancelationToken, + OperationContext, + OperationStatus, + current_operation, +) from tests.conftest import assert_cancelled_within @@ -149,7 +156,7 @@ async def cancel_after_delay(): tg.start_soon(cancel_after_delay) async with assert_cancelled_within(0.2), Cancelable.with_token(token) as cancel: - await anyio.sleep(1.0) + await anyio.sleep(1.0) assert cancel.context.cancel_reason == CancelationReason.MANUAL assert cancel.context.cancel_message == "Test cancel" @@ -194,7 +201,7 @@ async def async_condition() -> bool: # Wait for condition to trigger (should take ~0.15s) await anyio.sleep(1.0) # Should not reach here - assert False, "Should have been cancelled" + raise AssertionError("Should have been cancelled") except anyio.get_cancelled_exc_class(): # Expected - condition triggered duration = anyio.current_time() - start_time @@ -227,7 +234,7 @@ async def cancel_soon(): tg.start_soon(cancel_soon) async with assert_cancelled_within(0.2), combined: - await anyio.sleep(2.0) + await anyio.sleep(2.0) # The combined cancelable might show PARENT because it's linked # But we should check the original token @@ -251,7 +258,7 @@ async def cancel_token2(): tg.start_soon(cancel_token2) async with assert_cancelled_within(0.2), combined: - await anyio.sleep(1.0) + await anyio.sleep(1.0) assert combined.is_cancelled @@ -476,11 +483,10 @@ async def test_shield_basic(self): @pytest.mark.anyio async def test_shield_status(self): """Test shield status tracking.""" - async with Cancelable() as parent: - async with parent.shield() as shielded: - assert shielded.context.status == OperationStatus.SHIELDED - assert shielded.context.metadata.get("shielded") is True - assert shielded.context.parent_id == parent.context.id + async with Cancelable() as parent, parent.shield() as shielded: + assert shielded.context.status == OperationStatus.SHIELDED + assert shielded.context.metadata.get("shielded") is True + assert shielded.context.parent_id == parent.context.id class TestCancelableWrapping: @@ -545,11 +551,10 @@ async def async_function(value: int) -> int: cancelable = Cancelable.with_timeout(1.0) - async with cancelable: - async with cancelable.wrapping() as wrap: - result = await wrap(async_function, 21) - assert result == 42 - assert call_count == 1 + async with cancelable, cancelable.wrapping() as wrap: + result = await wrap(async_function, 21) + assert result == 42 + assert call_count == 1 assert cancelable.is_completed @@ -648,6 +653,7 @@ async def test_multiple_signals(self): @pytest.mark.anyio async def test_progress_callback_error_handling(self): """Test that progress callback errors are handled gracefully.""" + # Test that failing callbacks don't crash the operation def failing_callback(op_id: str, msg: Any, meta: dict[str, Any] | None) -> None: raise ValueError("Callback failed") @@ -665,6 +671,7 @@ def failing_callback(op_id: str, msg: Any, meta: dict[str, Any] | None) -> None: @pytest.mark.anyio async def test_async_progress_callback_error_handling(self): """Test that async progress callback errors are handled gracefully.""" + # Test that failing async callbacks don't crash the operation async def failing_async_callback(op_id: str, msg: Any, meta: dict[str, Any] | None) -> None: raise ValueError("Async callback failed") @@ -871,6 +878,7 @@ def thread_func(): @pytest.mark.anyio async def test_run_in_thread_with_args(self): """Test run_in_thread with arguments.""" + def thread_func(a, b, c=None): return f"{a}-{b}-{c}" @@ -997,6 +1005,7 @@ class TestCancelableStreamFeatures: @pytest.mark.anyio async def test_stream_buffer_limiting(self): """Test that stream buffer is limited to 1000 items.""" + async def large_stream(): for i in range(2000): yield i @@ -1042,6 +1051,7 @@ async def counted_stream(): @pytest.mark.anyio async def test_stream_cancelation_with_buffer(self): """Test stream cancelation preserves partial results in buffer.""" + async def slow_stream(): for i in range(100): await anyio.sleep(0.01) @@ -1106,9 +1116,8 @@ async def test_shield_without_active_cancelation(self): cancel = Cancelable(name="shield_no_cancel") result = None - async with cancel: - async with cancel.shield(): - result = "completed" + async with cancel, cancel.shield(): + result = "completed" assert result == "completed" @@ -1175,9 +1184,8 @@ async def test_shield_cleanup_on_exception(self): cancel = Cancelable(name="shield_exception") try: - async with cancel: - async with cancel.shield(): - raise ValueError("Test error") + async with cancel, cancel.shield(): + raise ValueError("Test error") except ValueError: pass @@ -1205,6 +1213,7 @@ class TestCancelableCallbackErrors: @pytest.mark.anyio async def test_error_callback_exception(self): """Test that error callback exceptions are caught.""" + def failing_callback(ctx: OperationContext, error: Exception) -> None: raise RuntimeError("Error callback failed") @@ -1221,6 +1230,7 @@ def failing_callback(ctx: OperationContext, error: Exception) -> None: @pytest.mark.anyio async def test_async_complete_callback_exception(self): """Test that async complete callback exceptions are caught.""" + async def failing_async_callback(ctx: OperationContext) -> None: raise RuntimeError("Async complete callback failed") @@ -1267,6 +1277,7 @@ async def test_parent_child_cancelation(self): @pytest.mark.anyio async def test_stream_with_non_cancelation_exception(self): """Test stream handling of non-cancelation exceptions.""" + async def failing_stream(): yield 1 yield 2 @@ -1290,6 +1301,7 @@ async def failing_stream(): @pytest.mark.anyio async def test_stream_complete_with_buffer(self): """Test stream completion with buffered results.""" + async def complete_stream(): for i in range(5): yield i @@ -1308,6 +1320,7 @@ async def complete_stream(): @pytest.mark.anyio async def test_stream_buffer_exceeds_1000_items(self): """Test stream buffer limiting at exactly 1000 items.""" + async def large_stream(): for i in range(1500): yield i @@ -1345,9 +1358,8 @@ async def test_scope_already_cancelled_on_token_callback(self): @pytest.mark.anyio async def test_token_linking_exception(self): """Test exception during token linking.""" - from unittest.mock import AsyncMock, patch - cancel = Cancelable(name="link_fail") + Cancelable(name="link_fail") # Create a scenario where linking might fail # This is hard to trigger naturally, so we test recovery @@ -1528,11 +1540,7 @@ async def test_parent_cancelation_propagates_to_children(self): parent._children.add(child2) # Cancel parent with propagation - await parent.cancel( - CancelationReason.MANUAL, - "Parent cancel", - propagate_to_children=True - ) + await parent.cancel(CancelationReason.MANUAL, "Parent cancel", propagate_to_children=True) # Children should be cancelled assert child1.is_cancelled or child1._token.is_cancelled @@ -1567,10 +1575,7 @@ async def test_custom_cancelation_error(self): try: async with cancel: - raise CustomCancelError( - CancelationReason.SIGNAL, - "Custom cancelation" - ) + raise CustomCancelError(CancelationReason.SIGNAL, "Custom cancelation") except CustomCancelError: pass @@ -1665,7 +1670,7 @@ async def test_destructor_with_parent_cleanup(self): assert child in parent._children # Delete child and trigger garbage collection - child_id = id(child) + id(child) del child gc.collect() await anyio.sleep(0.01) @@ -1774,10 +1779,9 @@ async def test_shields_cleanup_in_finally(self): shield_entered = False try: - async with cancel: - async with cancel.shield(): - shield_entered = True - await anyio.sleep(0.2) # Will timeout + async with cancel, cancel.shield(): + shield_entered = True + await anyio.sleep(0.2) # Will timeout except anyio.get_cancelled_exc_class(): pass @@ -1845,11 +1849,10 @@ async def test_parent_token_linking(self): child._parent_ref = weakref.ref(parent) try: - async with parent: - async with child: - # Trigger parent cancelation - await parent_token.cancel(CancelationReason.MANUAL, "Parent cancelled") - await anyio.sleep(0.05) + async with parent, child: + # Trigger parent cancelation + await parent_token.cancel(CancelationReason.MANUAL, "Parent cancelled") + await anyio.sleep(0.05) except anyio.get_cancelled_exc_class(): pass @@ -1882,6 +1885,7 @@ async def test_stream_buffer_exactly_1001_items(self): Targets lines 650-656: buffer limiting at >1000 items. """ + async def large_stream(): for i in range(1001): yield i @@ -1903,6 +1907,7 @@ async def test_stream_completion_with_buffer_no_items(self): Targets lines 676-683: stream completion with buffer. """ + async def empty_stream(): # Stream that completes immediately if False: @@ -1927,11 +1932,8 @@ async def test_shield_in_shields_list(self): """ cancel = Cancelable(name="shield_list") - shield_scope = None - async with cancel: - async with cancel.shield() as shielded: - shield_scope = shielded + async with cancel.shield(): # Verify shield is in list assert len(cancel._shields) == 1 @@ -1976,7 +1978,7 @@ async def test_aexit_scope_exit_exception(self): Targets lines 438-441: exception handling in scope exit. """ - from unittest.mock import Mock, patch + from unittest.mock import Mock cancel = Cancelable(name="scope_exit_error") @@ -1987,7 +1989,6 @@ async def test_aexit_scope_exit_exception(self): async with cancel: # Replace the scope with our mock - original_scope = cancel._scope cancel._scope = mock_scope # The error from scope exit should propagate @@ -2033,15 +2034,13 @@ async def test_shield_cleanup_on_cancelation(self): Targets line 519: shield.cancel() in cleanup. """ cancel = Cancelable.with_timeout(0.1, name="shield_cleanup") - shield_cleaned = False try: - async with cancel: - async with cancel.shield() as shielded: - # Verify shield is tracked - assert len(cancel._shields) > 0 - # Wait for timeout - await anyio.sleep(1.0) + async with cancel, cancel.shield(): + # Verify shield is tracked + assert len(cancel._shields) > 0 + # Wait for timeout + await anyio.sleep(1.0) except anyio.get_cancelled_exc_class(): pass @@ -2082,7 +2081,7 @@ async def failing_link(self, *args, **kwargs): raise RuntimeError("Link failed") # Patch the LinkedCancelationToken.link method on the class - with patch.object(LinkedCancelationToken, 'link', failing_link): + with patch.object(LinkedCancelationToken, "link", failing_link): # Exception should propagate from __aenter__ with pytest.raises(RuntimeError, match="Link failed"): async with parent: @@ -2096,6 +2095,7 @@ async def test_stream_complete_without_buffer(self): Targets branch 676->683: else clause with count > 0. """ + async def counter_stream(): for i in range(5): yield i @@ -2131,10 +2131,7 @@ async def test_cancel_with_precancelled_children(self): # Now cancel parent with propagate_to_children=True # This should: - await parent.cancel( - reason=CancelationReason.MANUAL, - propagate_to_children=True - ) + await parent.cancel(reason=CancelationReason.MANUAL, propagate_to_children=True) # Both tokens should be cancelled assert child1._token.is_cancelled @@ -2163,7 +2160,7 @@ def failing_update_status(self, status): original_update(status) # Patch update_status method on the OperationContext class - with patch.object(type(cancel.context), 'update_status', failing_update_status): + with patch.object(type(cancel.context), "update_status", failing_update_status): # Should complete without raising despite the error in status update async with cancel: pass @@ -2206,6 +2203,7 @@ async def test_stream_with_buffer_partial_true(self): Targets branch 676->683: save buffer when completed normally with buffering. """ + async def counter(): for i in range(5): yield i @@ -2234,7 +2232,7 @@ async def test_shield_normal_exit(self): async with cancel: # Enter shield context - async with cancel.shield() as shielded: + async with cancel.shield(): # Shield should be in the shields list assert len(cancel._shields) > 0 # Do some work @@ -2244,7 +2242,6 @@ async def test_shield_normal_exit(self): # Lines 788-789 should execute assert len(cancel._shields) == 0 - @pytest.mark.anyio async def test_source_check_without_deadline(self): """Test source checking when no deadline exists. @@ -2328,10 +2325,7 @@ async def test_child_cancelation_mix_states(self): await child2._token.cancel(CancelationReason.MANUAL) # Cancel parent with propagation - await parent.cancel( - reason=CancelationReason.MANUAL, - propagate_to_children=True - ) + await parent.cancel(reason=CancelationReason.MANUAL, propagate_to_children=True) # All should be cancelled assert child1._token.is_cancelled @@ -2359,7 +2353,6 @@ async def test_shield_cleanup_multiple_shields(self): except anyio.get_cancelled_exc_class(): pass - @pytest.mark.anyio async def test_duplicate_token_in_combine_tree(self): """Test that duplicate tokens are not added when same Cancelable used in multiple combines. @@ -2397,6 +2390,7 @@ async def test_stream_empty_no_buffer(self): Targets branch 676->683: skip setting partial_result when count=0 and no buffer. """ + async def empty_stream(): # Yield nothing - empty generator return @@ -2430,7 +2424,7 @@ async def test_cancel_without_propagating_to_children(self): # Cancel parent WITHOUT propagating await parent.cancel( reason=CancelationReason.MANUAL, - propagate_to_children=False # Branch 818->829 + propagate_to_children=False, # Branch 818->829 ) # Parent should be cancelled @@ -2465,10 +2459,7 @@ async def test_child_already_cancelled_skip(self): # Cancel parent with propagation # Should cancel child1 (821->822) # Should skip child2 since already cancelled (821->820) - await parent.cancel( - reason=CancelationReason.MANUAL, - propagate_to_children=True - ) + await parent.cancel(reason=CancelationReason.MANUAL, propagate_to_children=True) # Both tokens should be cancelled assert child1._token.is_cancelled @@ -2548,9 +2539,8 @@ async def test_parent_token_not_linkable_warning(self, caplog): regular_token = CancelationToken() child = Cancelable.with_token(regular_token, name="child", parent=parent) - async with parent: - async with child: - pass # Line 810 should log warning + async with parent, child: + pass # Line 810 should log warning # Verify warning was logged assert any("Cannot link to parent" in record.message for record in caplog.records) @@ -2600,5 +2590,3 @@ def on_error(ctx: OperationContext, error: Exception) -> None: # Verify error callback was NOT called (line 723 condition False) assert not error_callback_called assert cancel.context.status == OperationStatus.FAILED - - diff --git a/tests/unit/test_condition.py b/tests/unit/test_condition.py index de10970..affb9c1 100644 --- a/tests/unit/test_condition.py +++ b/tests/unit/test_condition.py @@ -114,9 +114,7 @@ async def condition_with_unexpected_error(): from hother.cancelable import Cancelable cancelable = Cancelable.with_condition( - condition_with_unexpected_error, - check_interval=0.05, - condition_name="test_unexpected" + condition_with_unexpected_error, check_interval=0.05, condition_name="test_unexpected" ) # The monitoring task should handle the exception gracefully @@ -179,23 +177,23 @@ def __init__(self, percent): self.percent = percent mock_values = { - 'memory_percent': 50.0, - 'cpu_percent': 50.0, - 'disk_percent': 50.0, + "memory_percent": 50.0, + "cpu_percent": 50.0, + "disk_percent": 50.0, } def mock_virtual_memory(): - return MockMemory(mock_values['memory_percent']) + return MockMemory(mock_values["memory_percent"]) def mock_cpu_percent(interval=None): - return mock_values['cpu_percent'] + return mock_values["cpu_percent"] def mock_disk_usage(path): - return MockDiskUsage(mock_values['disk_percent']) + return MockDiskUsage(mock_values["disk_percent"]) - monkeypatch.setattr(psutil, 'virtual_memory', mock_virtual_memory) - monkeypatch.setattr(psutil, 'cpu_percent', mock_cpu_percent) - monkeypatch.setattr(psutil, 'disk_usage', mock_disk_usage) + monkeypatch.setattr(psutil, "virtual_memory", mock_virtual_memory) + monkeypatch.setattr(psutil, "cpu_percent", mock_cpu_percent) + monkeypatch.setattr(psutil, "disk_usage", mock_disk_usage) return mock_values @@ -205,17 +203,13 @@ async def test_memory_threshold_exceeded(self, mock_psutil): from hother.cancelable import Cancelable # Set memory above threshold - mock_psutil['memory_percent'] = 85.0 + mock_psutil["memory_percent"] = 85.0 # Create source that monitors memory at 80% threshold source = ResourceConditionSource(memory_threshold=80.0, check_interval=0.1) # Create cancelable with the source - cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.1, - condition_name="memory_check" - ) + cancelable = Cancelable.with_condition(source._check_resources, check_interval=0.1, condition_name="memory_check") # Should cancel due to high memory with pytest.raises(anyio.get_cancelled_exc_class()): @@ -230,16 +224,12 @@ async def test_cpu_threshold_exceeded(self, mock_psutil): from hother.cancelable import Cancelable # Set CPU above threshold - mock_psutil['cpu_percent'] = 95.0 + mock_psutil["cpu_percent"] = 95.0 # Create source that monitors CPU at 90% threshold source = ResourceConditionSource(cpu_threshold=90.0, check_interval=0.1) - cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.1, - condition_name="cpu_check" - ) + cancelable = Cancelable.with_condition(source._check_resources, check_interval=0.1, condition_name="cpu_check") # Should cancel due to high CPU with pytest.raises(anyio.get_cancelled_exc_class()): @@ -254,16 +244,12 @@ async def test_disk_threshold_exceeded(self, mock_psutil): from hother.cancelable import Cancelable # Set disk usage above threshold - mock_psutil['disk_percent'] = 97.0 + mock_psutil["disk_percent"] = 97.0 # Create source that monitors disk at 95% threshold source = ResourceConditionSource(disk_threshold=95.0, check_interval=0.1) - cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.1, - condition_name="disk_check" - ) + cancelable = Cancelable.with_condition(source._check_resources, check_interval=0.1, condition_name="disk_check") # Should cancel due to high disk usage with pytest.raises(anyio.get_cancelled_exc_class()): @@ -278,23 +264,14 @@ async def test_combined_thresholds(self, mock_psutil): from hother.cancelable import Cancelable # Start with one resource above threshold - mock_psutil['memory_percent'] = 85.0 # Above 80% threshold - mock_psutil['cpu_percent'] = 75.0 # Below 85% threshold - mock_psutil['disk_percent'] = 80.0 # Below 90% threshold + mock_psutil["memory_percent"] = 85.0 # Above 80% threshold + mock_psutil["cpu_percent"] = 75.0 # Below 85% threshold + mock_psutil["disk_percent"] = 80.0 # Below 90% threshold # Create source monitoring all three resources - source = ResourceConditionSource( - memory_threshold=80.0, - cpu_threshold=85.0, - disk_threshold=90.0, - check_interval=0.05 - ) + source = ResourceConditionSource(memory_threshold=80.0, cpu_threshold=85.0, disk_threshold=90.0, check_interval=0.05) - cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.05, - condition_name="combined_check" - ) + cancelable = Cancelable.with_condition(source._check_resources, check_interval=0.05, condition_name="combined_check") # Should cancel due to high memory (already above threshold) with pytest.raises(anyio.get_cancelled_exc_class()): @@ -309,23 +286,14 @@ async def test_thresholds_not_exceeded(self, mock_psutil): from hother.cancelable import Cancelable # All resources well below thresholds - mock_psutil['memory_percent'] = 50.0 - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 60.0 + mock_psutil["memory_percent"] = 50.0 + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 60.0 # Create source with high thresholds - source = ResourceConditionSource( - memory_threshold=80.0, - cpu_threshold=85.0, - disk_threshold=90.0, - check_interval=0.05 - ) + source = ResourceConditionSource(memory_threshold=80.0, cpu_threshold=85.0, disk_threshold=90.0, check_interval=0.05) - cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.05, - condition_name="normal_operation" - ) + cancelable = Cancelable.with_condition(source._check_resources, check_interval=0.05, condition_name="normal_operation") # Should complete without cancelation completed = False @@ -342,19 +310,13 @@ async def test_resource_monitoring_with_work(self, mock_psutil): from hother.cancelable import Cancelable # Start with memory already above threshold - mock_psutil['memory_percent'] = 80.0 # Above 75% threshold - mock_psutil['cpu_percent'] = 50.0 + mock_psutil["memory_percent"] = 80.0 # Above 75% threshold + mock_psutil["cpu_percent"] = 50.0 - source = ResourceConditionSource( - memory_threshold=75.0, - cpu_threshold=85.0, - check_interval=0.05 - ) + source = ResourceConditionSource(memory_threshold=75.0, cpu_threshold=85.0, check_interval=0.05) cancelable = Cancelable.with_condition( - source._check_resources, - check_interval=0.05, - condition_name="work_with_monitoring" + source._check_resources, check_interval=0.05, condition_name="work_with_monitoring" ) # Should cancel due to high memory before completing work @@ -369,15 +331,11 @@ async def test_resource_monitoring_with_work(self, mock_psutil): @pytest.mark.anyio async def test_resource_check_returns_false_initially(self, mock_psutil): """Test that _check_resources returns False when thresholds not exceeded.""" - mock_psutil['memory_percent'] = 50.0 - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 60.0 + mock_psutil["memory_percent"] = 50.0 + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 60.0 - source = ResourceConditionSource( - memory_threshold=80.0, - cpu_threshold=85.0, - disk_threshold=90.0 - ) + source = ResourceConditionSource(memory_threshold=80.0, cpu_threshold=85.0, disk_threshold=90.0) result = await source._check_resources() assert result is False @@ -385,15 +343,11 @@ async def test_resource_check_returns_false_initially(self, mock_psutil): @pytest.mark.anyio async def test_resource_check_returns_true_on_threshold(self, mock_psutil): """Test that _check_resources returns True when any threshold exceeded.""" - mock_psutil['memory_percent'] = 85.0 # Above 80% threshold - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 60.0 + mock_psutil["memory_percent"] = 85.0 # Above 80% threshold + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 60.0 - source = ResourceConditionSource( - memory_threshold=80.0, - cpu_threshold=90.0, - disk_threshold=95.0 - ) + source = ResourceConditionSource(memory_threshold=80.0, cpu_threshold=90.0, disk_threshold=95.0) result = await source._check_resources() assert result is True @@ -401,9 +355,9 @@ async def test_resource_check_returns_true_on_threshold(self, mock_psutil): @pytest.mark.anyio async def test_resource_disk_threshold_only(self, mock_psutil): """Test disk threshold check when disk is exceeded.""" - mock_psutil['memory_percent'] = 50.0 - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 96.0 # Above 95% threshold + mock_psutil["memory_percent"] = 50.0 + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 96.0 # Above 95% threshold source = ResourceConditionSource( disk_threshold=95.0 # Only disk threshold set @@ -415,9 +369,9 @@ async def test_resource_disk_threshold_only(self, mock_psutil): @pytest.mark.anyio async def test_resource_disk_threshold_not_exceeded(self, mock_psutil): """Test disk threshold check when disk is NOT exceeded.""" - mock_psutil['memory_percent'] = 50.0 - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 80.0 # Below 95% threshold + mock_psutil["memory_percent"] = 50.0 + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 80.0 # Below 95% threshold source = ResourceConditionSource( disk_threshold=95.0 # Only disk threshold set @@ -433,16 +387,16 @@ async def test_no_disk_threshold_set(self, mock_psutil): Targets branch 248->260: disk_threshold is None, skip disk check, return False. """ # Set mock values for other resources - mock_psutil['memory_percent'] = 50.0 - mock_psutil['cpu_percent'] = 40.0 - mock_psutil['disk_percent'] = 90.0 # High, but won't be checked + mock_psutil["memory_percent"] = 50.0 + mock_psutil["cpu_percent"] = 40.0 + mock_psutil["disk_percent"] = 90.0 # High, but won't be checked # Create source WITHOUT disk_threshold (None) source = ResourceConditionSource( memory_threshold=80.0, # Set memory threshold - cpu_threshold=85.0, # Set CPU threshold + cpu_threshold=85.0, # Set CPU threshold # disk_threshold NOT SET (None) - this is the key! - check_interval=0.1 + check_interval=0.1, ) # Should return False - memory and CPU OK, disk check skipped @@ -456,6 +410,7 @@ class TestConditionSourceEdgeCases: @pytest.mark.anyio async def test_stop_monitoring_without_task_group(self): """Test stop_monitoring when _task_group is None.""" + def simple_condition(): return False @@ -469,7 +424,6 @@ def simple_condition(): @pytest.mark.anyio async def test_stop_monitoring_with_task_group_error(self): """Test stop_monitoring handles task group exit errors.""" - from unittest.mock import AsyncMock, MagicMock def simple_condition(): return False @@ -480,7 +434,6 @@ def simple_condition(): await source.start_monitoring(scope) # Mock the task group to raise an error on exit - original_exit = source._task_group.__aexit__ async def failing_exit(*args): raise RuntimeError("Task group exit failed") @@ -525,6 +478,7 @@ async def test_monitor_condition_cancelled_error(self): Targets lines 151-154: except CancelledError with debug log and re-raise. """ + def never_true_condition(): return False @@ -549,7 +503,6 @@ def never_true_condition(): # The monitoring task should have been stopped assert source._task_group is None - @pytest.mark.anyio async def test_monitor_infrastructure_error(self): """Test that outer exception handler catches monitoring infrastructure errors. @@ -557,7 +510,6 @@ async def test_monitor_infrastructure_error(self): This test targets lines 162-163 in condition.py by triggering an exception in the monitoring loop infrastructure (outside condition checking). """ - from unittest.mock import AsyncMock condition_called = False @@ -607,7 +559,7 @@ def __init__(self, percent): def mock_disk_usage(path): return MockDiskUsage(80.0) # Below 95% threshold - monkeypatch.setattr(psutil, 'disk_usage', mock_disk_usage) + monkeypatch.setattr(psutil, "disk_usage", mock_disk_usage) return psutil @pytest.mark.anyio @@ -616,12 +568,11 @@ async def test_disk_threshold_not_exceeded(self, mock_psutil_disk_ok): Targets branch 248->260: disk check performed, threshold not exceeded, returns False. """ - from hother.cancelable import Cancelable # Create source that ONLY monitors disk (no memory or CPU thresholds) source = ResourceConditionSource( disk_threshold=95.0, # Set high threshold - check_interval=0.1 + check_interval=0.1, ) # Test the internal _check_resources method directly diff --git a/tests/unit/test_context_bridge.py b/tests/unit/test_context_bridge.py index 8eaf55a..2415448 100644 --- a/tests/unit/test_context_bridge.py +++ b/tests/unit/test_context_bridge.py @@ -2,9 +2,7 @@ Unit tests for context_bridge.py utilities. """ -import asyncio import contextvars -import threading from concurrent.futures import ThreadPoolExecutor import pytest @@ -18,49 +16,49 @@ class TestContextBridge: def test_copy_context(self): """Test copying current context variables.""" # Create a context variable - var = contextvars.ContextVar('test_var', default='default') + var = contextvars.ContextVar("test_var", default="default") # Set a value - var.set('test_value') + var.set("test_value") # Copy context context_dict = ContextBridge.copy_context() # Should contain our variable assert var in context_dict - assert context_dict[var] == 'test_value' + assert context_dict[var] == "test_value" def test_restore_context(self): """Test restoring context variables.""" - var1 = contextvars.ContextVar('var1', default='default1') - var2 = contextvars.ContextVar('var2', default='default2') + var1 = contextvars.ContextVar("var1", default="default1") + var2 = contextvars.ContextVar("var2", default="default2") # Set values - var1.set('value1') - var2.set('value2') + var1.set("value1") + var2.set("value2") # Copy context context_dict = ContextBridge.copy_context() # Reset variables - var1.set('changed1') - var2.set('changed2') + var1.set("changed1") + var2.set("changed2") # Restore context ContextBridge.restore_context(context_dict) # Should have restored values - assert var1.get() == 'value1' - assert var2.get() == 'value2' + assert var1.get() == "value1" + assert var2.get() == "value2" @pytest.mark.anyio async def test_run_in_thread_with_context(self): """Test running function in thread with context propagation.""" # Create context variable - var = contextvars.ContextVar('thread_var', default='default') + var = contextvars.ContextVar("thread_var", default="default") # Set value in async context - var.set('async_value') + var.set("async_value") # Function to run in thread def thread_function(): @@ -70,96 +68,90 @@ def thread_function(): # Run in thread with context result = await ContextBridge.run_in_thread_with_context(thread_function) - assert result == 'async_value' + assert result == "async_value" @pytest.mark.anyio async def test_run_in_thread_with_context_args(self): """Test running function in thread with arguments.""" - var = contextvars.ContextVar('arg_var', default='default') - var.set('context_value') + var = contextvars.ContextVar("arg_var", default="default") + var.set("context_value") def thread_function(x, y, z=None): return f"{var.get()}-{x}-{y}-{z}" - result = await ContextBridge.run_in_thread_with_context( - thread_function, - 1, 2, z=3 - ) + result = await ContextBridge.run_in_thread_with_context(thread_function, 1, 2, z=3) assert result == "context_value-1-2-3" @pytest.mark.anyio async def test_run_in_thread_with_context_executor(self): """Test using custom thread pool executor.""" - var = contextvars.ContextVar('executor_var', default='default') - var.set('executor_value') + var = contextvars.ContextVar("executor_var", default="default") + var.set("executor_value") def thread_function(): return var.get() # Use custom executor with ThreadPoolExecutor(max_workers=1) as executor: - result = await ContextBridge.run_in_thread_with_context( - thread_function, - executor=executor - ) + result = await ContextBridge.run_in_thread_with_context(thread_function, executor=executor) - assert result == 'executor_value' + assert result == "executor_value" @pytest.mark.anyio async def test_context_isolation(self): """Test that context changes in thread don't affect main context.""" - var = contextvars.ContextVar('isolation_var', default='original') + var = contextvars.ContextVar("isolation_var", default="original") - var.set('main_value') + var.set("main_value") def thread_function(): # Change value in thread - var.set('thread_value') + var.set("thread_value") return var.get() # Run in thread thread_result = await ContextBridge.run_in_thread_with_context(thread_function) # Thread should see its own change - assert thread_result == 'thread_value' + assert thread_result == "thread_value" # Main context should still have original value - assert var.get() == 'main_value' + assert var.get() == "main_value" @pytest.mark.anyio async def test_multiple_context_vars(self): """Test propagation of multiple context variables.""" - var1 = contextvars.ContextVar('multi1', default='def1') - var2 = contextvars.ContextVar('multi2', default='def2') - var3 = contextvars.ContextVar('multi3', default='def3') + var1 = contextvars.ContextVar("multi1", default="def1") + var2 = contextvars.ContextVar("multi2", default="def2") + var3 = contextvars.ContextVar("multi3", default="def3") - var1.set('val1') - var2.set('val2') + var1.set("val1") + var2.set("val2") # var3 keeps default def thread_function(): return { - 'var1': var1.get(), - 'var2': var2.get(), - 'var3': var3.get(), + "var1": var1.get(), + "var2": var2.get(), + "var3": var3.get(), } result = await ContextBridge.run_in_thread_with_context(thread_function) assert result == { - 'var1': 'val1', - 'var2': 'val2', - 'var3': 'def3', + "var1": "val1", + "var2": "val2", + "var3": "def3", } @pytest.mark.anyio async def test_context_inheritance(self): """Test that thread inherits the context at call time.""" - var = contextvars.ContextVar('inherit_var', default='default') + var = contextvars.ContextVar("inherit_var", default="default") async def async_function(): - var.set('async_set') + var.set("async_set") # Start thread from within async function def thread_function(): @@ -168,4 +160,4 @@ def thread_function(): return await ContextBridge.run_in_thread_with_context(thread_function) result = await async_function() - assert result == 'async_set' \ No newline at end of file + assert result == "async_set" diff --git a/tests/unit/test_decorators.py b/tests/unit/test_decorators.py index 1b93560..c74a869 100644 --- a/tests/unit/test_decorators.py +++ b/tests/unit/test_decorators.py @@ -2,15 +2,16 @@ Unit tests for decorator utilities. """ -import anyio -import pytest import signal import sys +import anyio +import pytest + from hother.cancelable import ( Cancelable, - CancelationToken, CancelationReason, + CancelationToken, ) from hother.cancelable.utils.decorators import ( cancelable, @@ -31,6 +32,7 @@ class TestCancelableDecorator: @pytest.mark.anyio async def test_basic_decorator(self): """Test basic usage of @cancelable decorator.""" + @cancelable() async def simple_task(): await anyio.sleep(0.01) @@ -42,6 +44,7 @@ async def simple_task(): @pytest.mark.anyio async def test_decorator_with_timeout(self): """Test @cancelable with timeout parameter.""" + @cancelable(timeout=1.0) async def timed_task(): await anyio.sleep(0.01) @@ -53,6 +56,7 @@ async def timed_task(): @pytest.mark.anyio async def test_decorator_without_timeout(self): """Test @cancelable without timeout.""" + @cancelable() async def no_timeout_task(): return "done" @@ -79,6 +83,7 @@ async def task_with_injection(data: str, cancelable: Cancelable): @pytest.mark.anyio async def test_decorator_without_inject_param(self): """Test @cancelable with injection disabled.""" + @cancelable(inject_param=None) async def task_no_injection(): return "done" @@ -89,6 +94,7 @@ async def task_no_injection(): @pytest.mark.anyio async def test_decorator_inject_param_not_in_signature(self): """Test that decorator doesn't inject if param not in signature.""" + @cancelable(inject_param="cancelable") async def task_without_param(data: str): return data.upper() @@ -99,6 +105,7 @@ async def task_without_param(data: str): @pytest.mark.anyio async def test_decorator_with_custom_name(self): """Test @cancelable with custom operation name.""" + @cancelable(name="custom_operation") async def task(): return "done" @@ -109,6 +116,7 @@ async def task(): @pytest.mark.anyio async def test_decorator_with_operation_id(self): """Test @cancelable with custom operation ID.""" + @cancelable(operation_id="op-12345") async def task(): return "done" @@ -119,6 +127,7 @@ async def task(): @pytest.mark.anyio async def test_decorator_with_register_globally(self): """Test @cancelable with global registration.""" + @cancelable(register_globally=True) async def task(): return "done" @@ -129,6 +138,7 @@ async def task(): @pytest.mark.anyio async def test_decorator_preserves_metadata(self): """Test that decorator preserves function metadata.""" + @cancelable(timeout=5.0, name="test_op") async def documented_task(): """This is a documented function.""" @@ -144,6 +154,7 @@ async def documented_task(): @pytest.mark.anyio async def test_decorator_with_args_and_kwargs(self): """Test @cancelable with function arguments.""" + @cancelable() async def task_with_args(x: int, y: int, z: int = 0, cancelable: Cancelable = None): return x + y + z @@ -158,6 +169,7 @@ class TestWithTimeout: @pytest.mark.anyio async def test_with_timeout_basic(self): """Test basic timeout functionality.""" + async def quick_task(): await anyio.sleep(0.01) return "completed" @@ -168,6 +180,7 @@ async def quick_task(): @pytest.mark.anyio async def test_with_timeout_with_operation_id(self): """Test with_timeout with custom operation ID.""" + async def task(): return "done" @@ -177,6 +190,7 @@ async def task(): @pytest.mark.anyio async def test_with_timeout_with_name(self): """Test with_timeout with custom name.""" + async def task(): return "done" @@ -208,6 +222,7 @@ async def task_with_operation(operation: Cancelable = None): @pytest.mark.anyio async def test_current_operation_no_operation_param(self): """Test decorator with function that doesn't have operation parameter.""" + @with_current_operation() async def task_without_param(data: str): return data.upper() @@ -237,6 +252,7 @@ class TestCancelableMethod: @pytest.mark.anyio async def test_method_decorator_basic(self): """Test basic @cancelable_method usage.""" + class Worker: @cancelable_method() async def process(self): @@ -249,6 +265,7 @@ async def process(self): @pytest.mark.anyio async def test_method_decorator_with_timeout(self): """Test @cancelable_method with timeout.""" + class Worker: @cancelable_method(timeout=1.0) async def process(self): @@ -262,6 +279,7 @@ async def process(self): @pytest.mark.anyio async def test_method_decorator_without_timeout(self): """Test @cancelable_method without timeout.""" + class Worker: @cancelable_method() async def process(self): @@ -291,6 +309,7 @@ async def process(self, data: str, cancelable: Cancelable): @pytest.mark.anyio async def test_method_decorator_without_cancelable_param(self): """Test @cancelable_method when method doesn't have cancelable param.""" + class Worker: @cancelable_method() async def process(self, data: str): @@ -303,6 +322,7 @@ async def process(self, data: str): @pytest.mark.anyio async def test_method_decorator_uses_class_name(self): """Test that method decorator includes class name.""" + class DataProcessor: @cancelable_method() async def transform(self, cancelable: Cancelable): @@ -316,6 +336,7 @@ async def transform(self, cancelable: Cancelable): @pytest.mark.anyio async def test_method_decorator_with_custom_name(self): """Test @cancelable_method with custom name.""" + class Worker: @cancelable_method(name="custom_method") async def process(self, cancelable: Cancelable): @@ -328,6 +349,7 @@ async def process(self, cancelable: Cancelable): @pytest.mark.anyio async def test_method_decorator_with_register_globally(self): """Test @cancelable_method with global registration.""" + class Worker: @cancelable_method(register_globally=True) async def process(self): @@ -340,6 +362,7 @@ async def process(self): @pytest.mark.anyio async def test_method_decorator_with_args(self): """Test @cancelable_method with method arguments.""" + class Calculator: @cancelable_method() async def add(self, x: int, y: int, z: int = 0): @@ -503,9 +526,7 @@ async def test_condition_decorator_basic(self): """Test basic condition-based cancelation.""" should_cancel = False - @cancelable_with_condition( - lambda: should_cancel, check_interval=0.01, condition_name="test_condition" - ) + @cancelable_with_condition(lambda: should_cancel, check_interval=0.01, condition_name="test_condition") async def task(cancelable: Cancelable): await anyio.sleep(0.01) return "completed" @@ -518,9 +539,7 @@ async def test_condition_decorator_cancelation(self): """Test that condition triggers cancelation.""" should_cancel = False - @cancelable_with_condition( - lambda: should_cancel, check_interval=0.01, condition_name="cancel_check" - ) + @cancelable_with_condition(lambda: should_cancel, check_interval=0.01, condition_name="cancel_check") async def task(cancelable: Cancelable): # Loop for a bit to allow condition check for _ in range(20): @@ -621,9 +640,7 @@ async def test_combine_decorator_with_token_cancel(self): """Test that combined sources work correctly.""" token = CancelationToken() - @cancelable_combine( - Cancelable.with_timeout(10.0), Cancelable.with_token(token), name="multi_cancel" - ) + @cancelable_combine(Cancelable.with_timeout(10.0), Cancelable.with_token(token), name="multi_cancel") async def task(cancelable: Cancelable): await anyio.sleep(0.01) return "completed" @@ -798,6 +815,7 @@ async def task(): nonlocal result_value # Access via current_operation from hother.cancelable import current_operation + ctx = current_operation() assert ctx is not None assert ctx.context.name == "test_operation" @@ -899,6 +917,7 @@ async def test_shared_instance_multiple_functions(self): @with_cancelable(cancel) async def task1(): from hother.cancelable import current_operation + ctx = current_operation() assert ctx.context.name == "shared_context" results.append("task1") @@ -907,6 +926,7 @@ async def task1(): @with_cancelable(cancel) async def task2(): from hother.cancelable import current_operation + ctx = current_operation() assert ctx.context.name == "shared_context" results.append("task2") diff --git a/tests/unit/test_edge_cases.py b/tests/unit/test_edge_cases.py index 2bff5fc..b67c324 100644 --- a/tests/unit/test_edge_cases.py +++ b/tests/unit/test_edge_cases.py @@ -7,9 +7,8 @@ import asyncio import gc import threading -import pytest -import anyio +import pytest from hother.cancelable import Cancelable from hother.cancelable.core.cancelable import _current_operation @@ -42,9 +41,7 @@ def check_context_in_thread(iteration: int): assert main_context.context.name == "context_test" # Run multiple thread checks - thread_results = await asyncio.gather(*[ - cancel.run_in_thread(check_context_in_thread, i) for i in range(5) - ]) + thread_results = await asyncio.gather(*[cancel.run_in_thread(check_context_in_thread, i) for i in range(5)]) # All thread operations should succeed assert all(thread_results) @@ -108,7 +105,7 @@ def test_no_circular_references_in_hierarchy(self): del current # Force garbage collection - collected = gc.collect() + gc.collect() # Check if objects were properly collected final_objects = len(gc.get_objects()) @@ -136,7 +133,7 @@ def test_parent_child_relationship_cleanup(self): del child # Force garbage collection - collected = gc.collect() + gc.collect() # Check for significant object retention final_objects = len(gc.get_objects()) @@ -193,7 +190,7 @@ def count_relevant_objects(): # Create and use multiple operations for i in range(10): - async with Cancelable(name=f"operation_{i}") as cancel: + async with Cancelable(name=f"operation_{i}"): # Do some work await asyncio.sleep(0.001) @@ -235,9 +232,7 @@ def dummy_thread_work(): # Create operations that use threads async with Cancelable(name="thread_test") as cancel: - results = await asyncio.gather(*[ - cancel.run_in_thread(dummy_thread_work) for _ in range(10) - ]) + results = await asyncio.gather(*[cancel.run_in_thread(dummy_thread_work) for _ in range(10)]) assert len(results) == 10 assert all(r == "completed" for r in results) diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 636aff5..c07fdf8 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -2,8 +2,6 @@ Unit tests for custom exception classes in hother.cancelable.core.exceptions. """ -import pytest - from hother.cancelable.core.exceptions import ( CancelationError, ConditionCancelation, @@ -256,4 +254,4 @@ def test_exception_attributes_preserved(self): assert hasattr(parent_exc, "parent_id") assert hasattr(parent_exc, "parent_reason") assert parent_exc.parent_id == "parent_123" - assert parent_exc.parent_reason == CancelationReason.MANUAL \ No newline at end of file + assert parent_exc.parent_reason == CancelationReason.MANUAL diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 8cd0f43..e87e4fb 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -20,7 +20,7 @@ def test_get_logger_with_name(self): def test_get_logger_without_name(self): """Test getting logger without name (uses caller module).""" # Mock inspect to control the frame - with patch('inspect.currentframe') as mock_frame: + with patch("inspect.currentframe") as mock_frame: mock_frame.return_value = None logger = get_logger() assert isinstance(logger, logging.Logger) @@ -29,13 +29,9 @@ def test_get_logger_without_name(self): def test_get_logger_with_frame(self): """Test getting logger with frame inspection.""" # Mock inspect to return a frame with __name__ - mock_frame = type('Frame', (), { - 'f_back': type('Frame', (), { - 'f_globals': {'__name__': 'test_module'} - })() - })() + mock_frame = type("Frame", (), {"f_back": type("Frame", (), {"f_globals": {"__name__": "test_module"}})()})() - with patch('inspect.currentframe', return_value=mock_frame): + with patch("inspect.currentframe", return_value=mock_frame): logger = get_logger() assert logger.name == "test_module" @@ -50,7 +46,7 @@ def test_get_logger_hierarchy(self): parent_logger = get_logger("hother.cancelable") child_logger = get_logger("hother.cancelable.core") - assert child_logger.parent == parent_logger or child_logger.parent == parent_logger.parent + assert child_logger.parent in (parent_logger, parent_logger.parent) def test_null_handler_present(self): """Test that NullHandler is added to prevent 'No handler found' warnings.""" @@ -58,4 +54,4 @@ def test_null_handler_present(self): # Check that at least one NullHandler exists null_handlers = [h for h in logger.handlers if isinstance(h, logging.NullHandler)] - assert len(null_handlers) > 0 \ No newline at end of file + assert len(null_handlers) > 0 diff --git a/tests/unit/test_registry.py b/tests/unit/test_registry.py index 0658cea..532d2c5 100644 --- a/tests/unit/test_registry.py +++ b/tests/unit/test_registry.py @@ -394,11 +394,11 @@ async def test_sync_list_operations_with_filters(self, clean_registry): def thread_func(): try: - results['status_filter'] = registry.list_operations_sync(status=OperationStatus.RUNNING) + results["status_filter"] = registry.list_operations_sync(status=OperationStatus.RUNNING) - results['parent_filter'] = registry.list_operations_sync(parent_id=parent.context.id) + results["parent_filter"] = registry.list_operations_sync(parent_id=parent.context.id) - results['name_filter'] = registry.list_operations_sync(name_pattern="child") + results["name_filter"] = registry.list_operations_sync(name_pattern="child") except Exception as e: error[0] = e @@ -409,16 +409,16 @@ def thread_func(): assert error[0] is None, f"Thread raised error: {error[0]}" # Verify status filter worked - assert len(results['status_filter']) == 1 - assert results['status_filter'][0].status == OperationStatus.RUNNING + assert len(results["status_filter"]) == 1 + assert results["status_filter"][0].status == OperationStatus.RUNNING # Verify parent_id filter worked - assert len(results['parent_filter']) == 2 - assert all(ctx.parent_id == parent.context.id for ctx in results['parent_filter']) + assert len(results["parent_filter"]) == 2 + assert all(ctx.parent_id == parent.context.id for ctx in results["parent_filter"]) # Verify name_pattern filter worked - assert len(results['name_filter']) == 2 - assert all('child' in ctx.name.lower() for ctx in results['name_filter']) + assert len(results["name_filter"]) == 2 + assert all("child" in ctx.name.lower() for ctx in results["name_filter"]) @pytest.mark.anyio async def test_sync_statistics_with_successful_operations(self, clean_registry): @@ -462,8 +462,8 @@ def thread_func(): stats = result[0] # Lines 449-450 should be executed, calculating average duration - assert stats['total_completed'] == 3 - assert stats['average_duration_seconds'] == 2.0 # (1+2+3)/3 + assert stats["total_completed"] == 3 + assert stats["average_duration_seconds"] == 2.0 # (1+2+3)/3 @pytest.mark.anyio async def test_sync_get_statistics(self, clean_registry): @@ -866,9 +866,10 @@ async def test_cancel_operation_sync(self, clean_registry): Targets line 522 in cancel_operation_sync() - the actual async cancelation call via bridge. """ - from hother.cancelable.utils.anyio_bridge import AnyioBridge import threading + from hother.cancelable.utils.anyio_bridge import AnyioBridge + registry = clean_registry # Start the bridge @@ -882,11 +883,7 @@ async def run_test(): cancelled = [False] def thread_func(): - registry.cancel_operation_sync( - op.context.id, - reason=CancelationReason.MANUAL, - message="Cancelled from thread" - ) + registry.cancel_operation_sync(op.context.id, reason=CancelationReason.MANUAL, message="Cancelled from thread") cancelled[0] = True thread = threading.Thread(target=thread_func) @@ -912,9 +909,10 @@ async def test_cancel_all_sync(self, clean_registry): Targets line 550 in cancel_all_sync() - the actual async cancelation call via bridge. """ - from hother.cancelable.utils.anyio_bridge import AnyioBridge import threading + from hother.cancelable.utils.anyio_bridge import AnyioBridge + registry = clean_registry # Start the bridge @@ -931,9 +929,7 @@ async def run_test(): def thread_func(): registry.cancel_all_sync( - status=OperationStatus.RUNNING, - reason=CancelationReason.MANUAL, - message="Bulk cancel from thread" + status=OperationStatus.RUNNING, reason=CancelationReason.MANUAL, message="Bulk cancel from thread" ) cancelled[0] = True diff --git a/tests/unit/test_sources/test_composite.py b/tests/unit/test_sources/test_composite.py index 4e923ab..05a47c3 100644 --- a/tests/unit/test_sources/test_composite.py +++ b/tests/unit/test_sources/test_composite.py @@ -5,7 +5,6 @@ import anyio import pytest -from hother.cancelable import Cancelable from hother.cancelable.core.models import CancelationReason from hother.cancelable.sources.base import CancelationSource from hother.cancelable.sources.composite import AllOfSource, AnyOfSource, CompositeSource @@ -67,6 +66,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_composite_tracks_triggered_source(self): """Test that composite tracks which source triggered.""" + class ManualSource(CancelationSource): def __init__(self, name): super().__init__(CancelationReason.MANUAL, name) @@ -97,6 +97,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_composite_uses_source_reason(self): """Test that composite uses the triggering source's reason.""" + class CustomSource(CancelationSource): async def start_monitoring(self, scope): self.scope = scope @@ -122,6 +123,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_monitor_source_error_handling(self): """Test error handling in _monitor_source.""" + class FailingSource(CancelationSource): async def start_monitoring(self, scope): raise RuntimeError("Source failed to start") @@ -144,6 +146,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_stop_monitoring_with_exception(self): """Test that stop_monitoring handles exceptions from sources gracefully.""" + class FaultySource(TimeoutSource): async def stop_monitoring(self): raise RuntimeError("Simulated stop error") @@ -254,6 +257,7 @@ async def test_all_of_initialization(self): @pytest.mark.anyio async def test_all_of_start_monitoring(self): """Test AllOfSource start_monitoring.""" + class ManualSource(CancelationSource): def __init__(self, name, delay=0.01): super().__init__(CancelationReason.MANUAL, name) @@ -287,6 +291,7 @@ async def stop_monitoring(self): @pytest.mark.anyio async def test_all_of_waits_for_all_sources(self): """Test that AllOfSource waits for all sources before triggering.""" + class ManualSource(CancelationSource): def __init__(self, name, should_trigger): super().__init__(CancelationReason.MANUAL, name) @@ -336,6 +341,7 @@ async def test_all_of_stop_monitoring(self): @pytest.mark.anyio async def test_all_of_monitor_source_error(self): """Test AllOfSource _monitor_source error handling.""" + class FailingSource(CancelationSource): async def start_monitoring(self, scope): raise RuntimeError("Source failed") @@ -368,6 +374,7 @@ async def test_all_of_stop_monitoring_without_task_group(self): @pytest.mark.anyio async def test_all_of_stop_monitoring_source_error(self): """Test AllOfSource handles errors during source.stop_monitoring.""" + class FailingStopSource(TimeoutSource): async def stop_monitoring(self): raise RuntimeError("Stop failed") @@ -382,4 +389,3 @@ async def stop_monitoring(self): # Stop should not raise, even though one source fails await all_of.stop_monitoring() - diff --git a/tests/unit/test_sources/test_signal.py b/tests/unit/test_sources/test_signal.py index 815f25a..deb8346 100644 --- a/tests/unit/test_sources/test_signal.py +++ b/tests/unit/test_sources/test_signal.py @@ -1,7 +1,6 @@ """Unit tests for signal cancelation source.""" import signal -import os import anyio import pytest @@ -81,7 +80,6 @@ async def test_stop_monitoring_with_exception(self): await source.start_monitoring(scope) # Mock the task group's __aexit__ to raise an exception - original_aexit = source._task_group.__aexit__ async def mock_aexit(*args): raise RuntimeError("Simulated task group cleanup error") diff --git a/tests/unit/test_streams.py b/tests/unit/test_streams.py index a93a5e6..8e45b6d 100644 --- a/tests/unit/test_streams.py +++ b/tests/unit/test_streams.py @@ -2,9 +2,10 @@ Unit tests for stream utilities. """ +from typing import Any + import anyio import pytest -from typing import Any from hother.cancelable import Cancelable, CancelationToken from hother.cancelable.utils.streams import ( @@ -74,9 +75,7 @@ def on_progress(count, item): progress_calls.append((count, item)) items = [] - async for item in cancelable_stream( - async_range(10), report_interval=3, on_progress=on_progress - ): + async for item in cancelable_stream(async_range(10), report_interval=3, on_progress=on_progress): items.append(item) assert len(items) == 10 @@ -93,9 +92,7 @@ async def on_progress(count, item): progress_calls.append((count, item)) items = [] - async for item in cancelable_stream( - async_range(10), report_interval=3, on_progress=on_progress - ): + async for item in cancelable_stream(async_range(10), report_interval=3, on_progress=on_progress): items.append(item) assert len(items) == 10 @@ -109,7 +106,6 @@ async def test_progress_callback_with_invalid_metadata(self): def on_progress(count, item): callback_calls.append((count, item)) - return None cancel = Cancelable(name="test_progress") @@ -164,9 +160,7 @@ async def test_iterator_with_buffer(self): """Test iterator with buffer_partial enabled.""" cancelable = Cancelable() async with cancelable: - iterator = CancelableAsyncIterator( - async_range(5), cancelable, buffer_partial=True - ) + iterator = CancelableAsyncIterator(async_range(5), cancelable, buffer_partial=True) items = [] async for item in iterator: @@ -189,9 +183,7 @@ def on_progress(op_id, msg, meta): cancelable.on_progress(on_progress) async with cancelable: - iterator = CancelableAsyncIterator( - async_range(10), cancelable, report_interval=3 - ) + iterator = CancelableAsyncIterator(async_range(10), cancelable, report_interval=3) items = [] async for item in iterator: @@ -206,9 +198,7 @@ async def test_iterator_normal_completion_with_buffer(self): """Test iterator normal completion saves partial results.""" cancelable = Cancelable() async with cancelable: - iterator = CancelableAsyncIterator( - async_range(5), cancelable, buffer_partial=True - ) + iterator = CancelableAsyncIterator(async_range(5), cancelable, buffer_partial=True) items = [] async for item in iterator: @@ -222,6 +212,7 @@ async def test_iterator_normal_completion_with_buffer(self): @pytest.mark.anyio async def test_iterator_cancelation_saves_partial(self): """Test iterator saves partial results on cancelation.""" + async def cancelling_iterator(): """Iterator that raises CancelledError after a few items.""" for i in range(100): @@ -234,9 +225,7 @@ async def cancelling_iterator(): try: async with cancelable: - iterator = CancelableAsyncIterator( - cancelling_iterator(), cancelable, buffer_partial=True - ) + iterator = CancelableAsyncIterator(cancelling_iterator(), cancelable, buffer_partial=True) items = [] async for item in iterator: @@ -252,6 +241,7 @@ async def cancelling_iterator(): @pytest.mark.anyio async def test_iterator_exception_saves_partial(self): """Test iterator saves partial results on exception.""" + async def failing_iterator(): for i in range(10): yield i @@ -261,9 +251,7 @@ async def failing_iterator(): cancelable = Cancelable() async with cancelable: - iterator = CancelableAsyncIterator( - failing_iterator(), cancelable, buffer_partial=True - ) + iterator = CancelableAsyncIterator(failing_iterator(), cancelable, buffer_partial=True) items = [] try: @@ -280,6 +268,7 @@ async def failing_iterator(): @pytest.mark.anyio async def test_iterator_aclose(self): """Test iterator aclose method.""" + class CloseableIterator: def __init__(self): self.closed = False @@ -310,9 +299,7 @@ async def test_iterator_buffer_size_limit(self): """Test that buffer is limited to 1000 items.""" cancelable = Cancelable() async with cancelable: - iterator = CancelableAsyncIterator( - async_range(2000), cancelable, buffer_partial=True - ) + iterator = CancelableAsyncIterator(async_range(2000), cancelable, buffer_partial=True) items = [] async for item in iterator: @@ -333,9 +320,7 @@ async def test_chunked_stream_basic(self): async with cancelable: chunks = [] - async for chunk in chunked_cancelable_stream( - async_range(10), chunk_size=3, cancelable=cancelable - ): + async for chunk in chunked_cancelable_stream(async_range(10), chunk_size=3, cancelable=cancelable): chunks.append(chunk) # Should have 4 chunks: [0,1,2], [3,4,5], [6,7,8], [9] @@ -352,9 +337,7 @@ async def test_chunked_stream_final_chunk(self): async with cancelable: chunks = [] - async for chunk in chunked_cancelable_stream( - async_range(7), chunk_size=3, cancelable=cancelable - ): + async for chunk in chunked_cancelable_stream(async_range(7), chunk_size=3, cancelable=cancelable): chunks.append(chunk) # Should have 3 chunks: [0,1,2], [3,4,5], [6] @@ -372,9 +355,7 @@ async def empty_iterator(): async with cancelable: chunks = [] - async for chunk in chunked_cancelable_stream( - empty_iterator(), chunk_size=3, cancelable=cancelable - ): + async for chunk in chunked_cancelable_stream(empty_iterator(), chunk_size=3, cancelable=cancelable): chunks.append(chunk) assert len(chunks) == 0 @@ -397,11 +378,7 @@ def on_progress(count, item): # Using cancelable_stream normally should always have proper meta collected = [] - async for item in cancelable_stream( - async_range(5), - on_progress=on_progress, - report_interval=2 - ): + async for item in cancelable_stream(async_range(5), on_progress=on_progress, report_interval=2): collected.append(item) # Stream should work normally @@ -412,6 +389,7 @@ def on_progress(count, item): @pytest.mark.anyio async def test_iterator_exception_without_buffer(self): """Test iterator exception when buffer_partial is False.""" + async def failing_iterator(): for i in range(10): yield i @@ -422,9 +400,7 @@ async def failing_iterator(): async with cancelable: # buffer_partial=False means _buffer stays None - iterator = CancelableAsyncIterator( - failing_iterator(), cancelable, buffer_partial=False - ) + iterator = CancelableAsyncIterator(failing_iterator(), cancelable, buffer_partial=False) items = [] try: @@ -440,6 +416,7 @@ async def failing_iterator(): @pytest.mark.anyio async def test_iterator_aclose_without_method(self): """Test aclose when iterator doesn't have aclose method.""" + class SimpleIterator: def __init__(self): self.index = 0 @@ -452,6 +429,7 @@ async def __anext__(self): raise StopAsyncIteration self.index += 1 return self.index + # Note: No aclose method simple = SimpleIterator() @@ -475,6 +453,7 @@ async def __anext__(self): @pytest.mark.anyio async def test_iterator_cancelation_without_buffer(self): """Test iterator cancelation when buffer_partial is False.""" + async def cancelling_iterator(): """Iterator that raises CancelledError.""" for i in range(100): @@ -487,9 +466,7 @@ async def cancelling_iterator(): try: async with cancelable: # buffer_partial=False means _buffer stays None - iterator = CancelableAsyncIterator( - cancelling_iterator(), cancelable, buffer_partial=False - ) + iterator = CancelableAsyncIterator(cancelling_iterator(), cancelable, buffer_partial=False) items = [] async for item in iterator: @@ -507,8 +484,8 @@ async def test_progress_callback_with_incomplete_meta_via_monkeypatch(self, monk This covers the defensive branch 82->exit where the condition (meta and "count" in meta and "latest_item" in meta) fails. """ + from hother.cancelable.utils.streams import cancelable_stream - from unittest.mock import AsyncMock callback_calls = [] @@ -540,7 +517,7 @@ async def patched_report_progress(self, msg, metadata=None): async for item in cancelable_stream( async_range(5), on_progress=on_progress, - report_interval=1 # Report every item + report_interval=1, # Report every item ): items.append(item) diff --git a/tests/unit/test_testing_utils.py b/tests/unit/test_testing_utils.py index 854c0a9..7c54785 100644 --- a/tests/unit/test_testing_utils.py +++ b/tests/unit/test_testing_utils.py @@ -4,13 +4,10 @@ These tests ensure the testing utilities themselves work correctly. """ -import asyncio -from datetime import UTC, datetime -from unittest.mock import AsyncMock, MagicMock - -import pytest +from datetime import datetime import anyio +import pytest from hother.cancelable import Cancelable from hother.cancelable.core.models import CancelationReason, OperationStatus @@ -235,8 +232,6 @@ async def test_create_slow_stream_with_cancelable(self): assert len(collected) > 0 assert len(collected) < len(items) - - @pytest.mark.anyio async def test_run_with_timeout_test_timeout(self): """Test run_with_timeout_test with timeout.""" @@ -298,7 +293,7 @@ async def test_cancelation_too_late(self): @pytest.mark.anyio async def test_no_cancelation(self): """Test when no cancelation occurs.""" - token = MockCancelationToken() + MockCancelationToken() with pytest.raises(AssertionError, match="Expected cancelation"): async with assert_cancelation_within(min_time=0.01, max_time=0.05): @@ -332,10 +327,7 @@ async def test_scenario_run(self): scenario = CancelationScenario("test_run") # Build scenario: delay then cancel - scenario.add_delay(0.05).add_cancelation( - reason=CancelationReason.MANUAL, - message="Test cancel" - ) + scenario.add_delay(0.05).add_cancelation(reason=CancelationReason.MANUAL, message="Test cancel") # NOTE: The scenario catches CancelledError, so the operation completes normally # even though it was cancelled. The test is checking that the scenario runs. @@ -408,10 +400,7 @@ async def test_scenario_immediate_cancelation(self): scenario = CancelationScenario("immediate_cancel") # Add cancelation with no delay first - scenario.add_cancelation( - reason=CancelationReason.MANUAL, - message="Immediate cancel" - ) + scenario.add_cancelation(reason=CancelationReason.MANUAL, message="Immediate cancel") async def long_operation(): await anyio.sleep(1.0) @@ -467,4 +456,4 @@ async def simple_operation(): # Should complete without error, unknown assertion is ignored recorder = await scenario.run(simple_operation) - assert len(recorder.operations) == 1 \ No newline at end of file + assert len(recorder.operations) == 1 diff --git a/tests/unit/test_thread_cancelation.py b/tests/unit/test_thread_cancelation.py index edc8ccb..b73fa33 100644 --- a/tests/unit/test_thread_cancelation.py +++ b/tests/unit/test_thread_cancelation.py @@ -12,7 +12,6 @@ import threading import time -from typing import Any import anyio import pytest @@ -20,7 +19,6 @@ from hother.cancelable import AnyioBridge, CancelationToken, call_soon_threadsafe from hother.cancelable.core.models import CancelationReason - pytestmark = pytest.mark.anyio @@ -125,9 +123,7 @@ async def test_cancel_sync_from_thread(self, bridge: AnyioBridge) -> None: # Cancel from thread def cancel_in_thread(): - result = token.cancel_sync( - reason=CancelationReason.MANUAL, message="Cancelled from thread" - ) + result = token.cancel_sync(reason=CancelationReason.MANUAL, message="Cancelled from thread") assert result is True thread = threading.Thread(target=cancel_in_thread) @@ -257,9 +253,7 @@ def on_key_press(self, key: str): """Simulates key press callback from pynput thread.""" if key == "SPACE": # Cancel token from keyboard thread - self.token.cancel_sync( - reason=CancelationReason.MANUAL, message="User pressed SPACE" - ) + self.token.cancel_sync(reason=CancelationReason.MANUAL, message="User pressed SPACE") def start(self): """Start simulated listener thread.""" diff --git a/tests/unit/test_threading_bridge.py b/tests/unit/test_threading_bridge.py index e0d8ed1..e335afa 100644 --- a/tests/unit/test_threading_bridge.py +++ b/tests/unit/test_threading_bridge.py @@ -15,7 +15,6 @@ def reset_singleton(): """Reset ThreadSafeRegistry singleton before test.""" # Save original instance - original = ThreadSafeRegistry._instance # Reset for test ThreadSafeRegistry._instance = None yield @@ -42,7 +41,7 @@ async def test_singleton_thread_safety(self, reset_singleton: None) -> None: # pass the outer check but only one creates the instance # Run multiple iterations to increase chance of hitting the race condition - for iteration in range(10): + for _iteration in range(10): # Reset singleton for each iteration ThreadSafeRegistry._instance = None @@ -88,7 +87,7 @@ async def test_singleton_double_check_locking(self, reset_singleton: None) -> No instances = [] thread1_in_lock = threading.Event() - thread2_can_enter = threading.Event() + threading.Event() # Save the original __init__ original_init = ThreadSafeRegistry.__init__ @@ -102,7 +101,7 @@ def slow_init(self): def thread1_func(): """First thread - creates the instance with delay.""" - with patch.object(ThreadSafeRegistry, '__init__', slow_init): + with patch.object(ThreadSafeRegistry, "__init__", slow_init): instance = ThreadSafeRegistry.get_instance() instances.append(instance) @@ -283,7 +282,7 @@ def thread_func(thread_id): try: for _ in range(50): ops = thread_registry.list_operations() - stats = thread_registry.get_statistics() + thread_registry.get_statistics() history = thread_registry.get_history(limit=5) results.append((thread_id, len(ops), len(history))) except Exception as e: @@ -319,9 +318,7 @@ async def run_test(): def thread_func(): thread_registry.cancel_operation( - cancelable.context.id, - reason=CancelationReason.MANUAL, - message="Cancelled from thread" + cancelable.context.id, reason=CancelationReason.MANUAL, message="Cancelled from thread" ) cancelled[0] = True @@ -392,9 +389,7 @@ async def run_test(): def thread_func(): thread_registry.cancel_all( - status=OperationStatus.RUNNING, - reason=CancelationReason.MANUAL, - message="Bulk cancel from thread" + status=OperationStatus.RUNNING, reason=CancelationReason.MANUAL, message="Bulk cancel from thread" ) cancelled[0] = True diff --git a/tests/unit/test_token.py b/tests/unit/test_token.py index 8cbe5e9..bd7f706 100644 --- a/tests/unit/test_token.py +++ b/tests/unit/test_token.py @@ -221,7 +221,7 @@ def test_token_equality(self): assert token1 != "not a token" assert token1 != 123 - assert token1 != None + assert token1 is not None def test_token_hashable(self): """Test that tokens are hashable and can be used in sets/dicts.""" diff --git a/tools/release.py b/tools/release.py index d2e5c0a..56ed8b8 100755 --- a/tools/release.py +++ b/tools/release.py @@ -1,14 +1,12 @@ #!/usr/bin/env python3 -""" -Compute next semantic version from Conventional Commits and create the tag. -Usage: hatch release {dev|rc|final} [SHA] +"""Compute next semantic version from Conventional Commits and create the tag. +Usage: hatch release {dev|rc|final} [SHA]. """ import subprocess import sys -from packaging.version import Version -from packaging.version import parse as version_parse +from packaging.version import Version, parse as version_parse def cmd(c: str) -> str: diff --git a/uv.lock b/uv.lock index f234740..d5f1585 100644 --- a/uv.lock +++ b/uv.lock @@ -44,14 +44,14 @@ wheels = [ [[package]] name = "basedpyright" -version = "1.31.7" +version = "1.36.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/8a/4c5d74314fe085f8f9b1a92b7c96e2a116651b6c7596e4def872d5d7abf0/basedpyright-1.36.2.tar.gz", hash = "sha256:b596b1a6e6006c7dfd483efc1d602574f238321e28f70bc66e87255784b70630", size = 22835798, upload-time = "2025-12-23T02:31:27.357Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" }, + { url = "https://files.pythonhosted.org/packages/69/88/0aaac8e5062cd83434ce41fac844646d0f285b574cda0eeb732e916db22b/basedpyright-1.36.2-py3-none-any.whl", hash = "sha256:8dfd74fad77fcccc066ea0af5fd07e920b6f88cb1b403936aa78ab5aaef51526", size = 11882631, upload-time = "2025-12-23T02:31:24.537Z" }, ] [[package]] @@ -537,7 +537,7 @@ provides-extras = ["examples", "fastapi"] [package.metadata.requires-dev] dev = [ - { name = "basedpyright", specifier = ">=1.23.0" }, + { name = "basedpyright", specifier = ">=1.36.0" }, { name = "coverage", extras = ["toml"], specifier = ">=7.10.7" }, { name = "detect-secrets", specifier = ">=1.5.0" }, { name = "dirty-equals", specifier = ">=0.9.0" },