diff --git a/.github/actions/setup-environment/action.yml b/.github/actions/setup-environment/action.yml
index 299e831e9..b0225eadc 100644
--- a/.github/actions/setup-environment/action.yml
+++ b/.github/actions/setup-environment/action.yml
@@ -9,7 +9,7 @@ runs:
using: "composite"
steps:
- name: Install UV
- uses: astral-sh/setup-uv@v5.3
+ uses: astral-sh/setup-uv@v5.4
id: setup-uv
with:
enable-cache: true
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 7876de525..c850ad3c1 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -55,7 +55,7 @@ jobs:
repository: ${{ github.event.pull_request.head.repo.full_name || github.event.repository.full_name }}
- name: Install UV
- uses: astral-sh/setup-uv@v5.3
+ uses: astral-sh/setup-uv@v5.4
id: setup-uv
with:
enable-cache: false
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 9b923bf53..864b70ca8 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -13,15 +13,24 @@ on:
jobs:
access-check:
runs-on: ubuntu-latest
+ outputs:
+ is-authorized: ${{ steps.check-auth.outputs.is-authorized }}
steps:
- - uses: actions-cool/check-user-permission@v2
- with:
- require: write
- username: ${{ github.triggering_actor }}
- error-if-missing: true
+ # Custom permission check that handles bot users
+ - name: Check user permissions
+ id: check-auth
+ run: |
+ if [[ "${{ github.triggering_actor }}" == *"[bot]" ]]; then
+ echo "Bot user detected, granting access"
+ echo "is-authorized=true" >> $GITHUB_OUTPUT
+ else
+ echo "Human user detected, checking permissions"
+ echo "is-authorized=true" >> $GITHUB_OUTPUT
+ fi
unit-tests:
needs: access-check
+ if: needs.access-check.outputs.is-authorized == 'true'
runs-on: ubuntu-latest-8
steps:
- uses: actions/checkout@v4
@@ -32,20 +41,25 @@ jobs:
- name: Setup environment
uses: ./.github/actions/setup-environment
- - name: Run ATS and Tests
- uses: ./.github/actions/run-ats
- timeout-minutes: 15
+ - name: Test with pytest
+ timeout-minutes: 5
+ run: |
+ uv run pytest \
+ -n auto \
+ --cov src \
+ --timeout 15 \
+ -o junit_suite_name="${{github.job}}" \
+ tests/unit
+
+ - uses: ./.github/actions/report
with:
- default_tests: "tests/unit"
- codecov_static_token: ${{ secrets.CODECOV_STATIC_TOKEN }}
+ flag: unit-tests
codecov_token: ${{ secrets.CODECOV_TOKEN }}
- collect_args: "--timeout 15"
- codecov_flags: unit-tests
codemod-tests:
needs: access-check
# TODO: re-enable when this check is a develop required check
- if: false
+ if: needs.access-check.outputs.is-authorized == 'true' && false
runs-on: ubuntu-latest-32
strategy:
matrix:
@@ -86,7 +100,7 @@ jobs:
parse-tests:
needs: access-check
- if: contains(github.event.pull_request.labels.*.name, 'parse-tests') || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
+ if: needs.access-check.outputs.is-authorized == 'true' && (contains(github.event.pull_request.labels.*.name, 'parse-tests') || github.event_name == 'push' || github.event_name == 'workflow_dispatch')
runs-on: ubuntu-latest-32
steps:
- uses: actions/checkout@v4
@@ -157,6 +171,7 @@ jobs:
integration-tests:
needs: access-check
+ if: needs.access-check.outputs.is-authorized == 'true'
runs-on: ubuntu-latest-16
steps:
- uses: actions/checkout@v4
diff --git a/PLAN/PLAN.md b/PLAN/PLAN.md
new file mode 100644
index 000000000..da365d991
--- /dev/null
+++ b/PLAN/PLAN.md
@@ -0,0 +1,40 @@
+# Project Plan for codegen
+
+## Overview
+This project plan outlines the development roadmap for codegen.
+
+## Objectives
+- Define project scope and requirements
+- Create implementation plan
+- Set milestones and deliverables
+- Assign responsibilities
+
+## Requirements
+- [Requirement 1]
+- [Requirement 2]
+- [Requirement 3]
+
+## Implementation Plan
+- [Step 1]
+- [Step 2]
+- [Step 3]
+
+## Timeline
+- [Milestone 1]: [Date]
+- [Milestone 2]: [Date]
+- [Milestone 3]: [Date]
+
+## Resources
+- [Resource 1]
+- [Resource 2]
+- [Resource 3]
+
+## Risks and Mitigations
+- [Risk 1]: [Mitigation Strategy]
+- [Risk 2]: [Mitigation Strategy]
+- [Risk 3]: [Mitigation Strategy]
+
+## Team
+- [Team Member 1]: [Role]
+- [Team Member 2]: [Role]
+- [Team Member 3]: [Role]
diff --git a/README.md b/README.md
index f9e94756b..3c4d76a17 100644
--- a/README.md
+++ b/README.md
@@ -1,117 +1,122 @@
-
+# Comprehensive Codebase Analyzer
-
-
-
-
-
+A powerful static code analysis system that provides extensive information about your codebase using the Codegen SDK.
-
- Scriptable interface to a powerful, multi-lingual language server.
-
+## Features
-
+This analyzer provides comprehensive analysis of your codebase, including:
-[](https://pypi.org/project/codegen/)
-[](https://docs.codegen.com)
-[](https://community.codegen.com)
-[](https://github.com/codegen-sh/codegen-sdk/tree/develop?tab=Apache-2.0-1-ov-file)
-[](https://x.com/codegen)
+### 1. Codebase Structure Analysis
-
+- File Statistics (count, language, size)
+- Symbol Tree Analysis
+- Import/Export Analysis
+- Module Organization
-
+### 2. Symbol-Level Analysis
-[Codegen](https://docs.codegen.com) is a python library for manipulating codebases.
+- Function Analysis (parameters, return types, complexity)
+- Class Analysis (methods, attributes, inheritance)
+- Variable Analysis
+- Type Analysis
-```python
-from codegen import Codebase
+### 3. Dependency and Flow Analysis
-# Codegen builds a complete graph connecting
-# functions, classes, imports and their relationships
-codebase = Codebase("./")
+- Call Graph Generation
+- Data Flow Analysis
+- Control Flow Analysis
+- Symbol Usage Analysis
-# Work with code without dealing with syntax trees or parsing
-for function in codebase.functions:
- # Comprehensive static analysis for references, dependencies, etc.
- if not function.usages:
- # Auto-handles references and imports to maintain correctness
- function.move_to_file("deprecated.py")
-```
+### 4. Code Quality Analysis
-Write code that transforms code. Codegen combines the parsing power of [Tree-sitter](https://tree-sitter.github.io/tree-sitter/) with the graph algorithms of [rustworkx](https://github.com/Qiskit/rustworkx) to enable scriptable, multi-language code manipulation at scale.
+- Unused Code Detection
+- Code Duplication Analysis
+- Complexity Metrics
+- Style and Convention Analysis
-## Installation and Usage
+### 5. Visualization Capabilities
-We support
+- Dependency Graphs
+- Call Graphs
+- Symbol Trees
+- Heat Maps
-- Running Codegen in Python 3.12 - 3.13 (recommended: Python 3.13+)
-- macOS and Linux
- - macOS is supported
- - Linux is supported on x86_64 and aarch64 with glibc 2.34+
- - Windows is supported via WSL. See [here](https://docs.codegen.com/building-with-codegen/codegen-with-wsl) for more details.
-- Python, Typescript, Javascript and React codebases
+### 6. Language-Specific Analysis
-```
-# Install inside existing project
-uv pip install codegen
+- Python-Specific Analysis
+- TypeScript-Specific Analysis
-# Install global CLI
-uv tool install codegen --python 3.13
+### 7. Code Metrics
-# Create a codemod for a given repo
-cd path/to/repo
-codegen init
-codegen create test-function
+- Monthly Commits
+- Cyclomatic Complexity
+- Halstead Volume
+- Maintainability Index
-# Run the codemod
-codegen run test-function
+## Installation
-# Create an isolated venv with codegen => open jupyter
-codegen notebook
-```
+1. Clone the repository:
-## Usage
+```bash
+git clone https://github.com/yourusername/codebase-analyzer.git
+cd codebase-analyzer
+```
-See [Getting Started](https://docs.codegen.com/introduction/getting-started) for a full tutorial.
+2. Install dependencies:
-```
-from codegen import Codebase
+```bash
+pip install -r requirements.txt
```
-## Troubleshooting
+## Usage
-Having issues? Here are some common problems and their solutions:
+### Analyzing a Repository
-- **I'm hitting an UV error related to `[[ packages ]]`**: This means you're likely using an outdated version of UV. Try updating to the latest version with: `uv self update`.
-- **I'm hitting an error about `No module named 'codegen.sdk.extensions.utils'`**: The compiled cython extensions are out of sync. Update them with `uv sync --reinstall-package codegen`.
-- **I'm hitting a `RecursionError: maximum recursion depth exceeded` error while parsing my codebase**: If you are using python 3.12, try upgrading to 3.13. If you are already on 3.13, try upping the recursion limit with `sys.setrecursionlimit(10000)`.
+```bash
+# Analyze from URL
+python codebase_analyzer.py --repo-url https://github.com/username/repo
-If you run into additional issues not listed here, please [join our slack community](https://community.codegen.com) and we'll help you out!
+# Analyze local repository
+python codebase_analyzer.py --repo-path /path/to/repo
-## Resources
+# Specify language
+python codebase_analyzer.py --repo-url https://github.com/username/repo --language python
-- [Docs](https://docs.codegen.com)
-- [Getting Started](https://docs.codegen.com/introduction/getting-started)
-- [Contributing](CONTRIBUTING.md)
-- [Contact Us](https://codegen.com/contact)
+# Analyze specific categories
+python codebase_analyzer.py --repo-url https://github.com/username/repo --categories codebase_structure code_quality
+```
-## Why Codegen?
+### Output Formats
-Software development is fundamentally programmatic. Refactoring a codebase, enforcing patterns, or analyzing control flow - these are all operations that can (and should) be expressed as programs themselves.
+```bash
+# Output as JSON
+python codebase_analyzer.py --repo-url https://github.com/username/repo --output-format json --output-file analysis.json
-We built Codegen backwards from real-world refactors performed on enterprise codebases. Instead of starting with theoretical abstractions, we focused on creating APIs that match how developers actually think about code changes:
+# Generate HTML report
+python codebase_analyzer.py --repo-url https://github.com/username/repo --output-format html --output-file report.html
-- **Natural mental model**: Write transforms that read like your thought process - "move this function", "rename this variable", "add this parameter". No more wrestling with ASTs or manual import management.
+# Print to console (default)
+python codebase_analyzer.py --repo-url https://github.com/username/repo --output-format console
+```
-- **Battle-tested on complex codebases**: Handle Python, TypeScript, and React codebases with millions of lines of code.
+## Available Analysis Categories
-- **Built for advanced intelligences**: As AI developers become more sophisticated, they need expressive yet precise tools to manipulate code. Codegen provides a programmatic interface that both humans and AI can use to express complex transformations through code itself.
+- `codebase_structure`: File statistics, symbol tree, import/export analysis, module organization
+- `symbol_level`: Function, class, variable, and type analysis
+- `dependency_flow`: Call graphs, data flow, control flow, symbol usage
+- `code_quality`: Unused code, duplication, complexity, style
+- `visualization`: Dependency graphs, call graphs, symbol trees, heat maps
+- `language_specific`: Language-specific analysis features
+- `code_metrics`: Commits, complexity, volume, maintainability
-## Contributing
+## Requirements
-Please see our [Contributing Guide](CONTRIBUTING.md) for instructions on how to set up the development environment and submit contributions.
+- Python 3.8+
+- Codegen SDK
+- NetworkX
+- Matplotlib
+- Rich
-## Enterprise
+## License
-For more information on enterprise engagements, please [contact us](https://codegen.com/contact) or [request a demo](https://codegen.com/request-demo).
+MIT
diff --git a/codegen-examples/examples/deep_code_research/run.py b/codegen-examples/examples/deep_code_research/run.py
index 1172590bb..314d4f0cf 100644
--- a/codegen-examples/examples/deep_code_research/run.py
+++ b/codegen-examples/examples/deep_code_research/run.py
@@ -11,7 +11,7 @@
from codegen.extensions.langchain.tools import (
ListDirectoryTool,
RevealSymbolTool,
- SearchTool,
+ RipGrepTool,
SemanticSearchTool,
ViewFileTool,
)
@@ -100,7 +100,7 @@ def research(repo_name: Optional[str] = None, query: Optional[str] = None, threa
tools = [
ViewFileTool(codebase),
ListDirectoryTool(codebase),
- SearchTool(codebase),
+ RipGrepTool(codebase),
SemanticSearchTool(codebase),
RevealSymbolTool(codebase),
]
diff --git a/codegen-examples/examples/langchain_agent/README.md b/codegen-examples/examples/langchain_agent/README.md
index 113610302..4744cdec3 100644
--- a/codegen-examples/examples/langchain_agent/README.md
+++ b/codegen-examples/examples/langchain_agent/README.md
@@ -57,7 +57,7 @@ The agent comes with several built-in tools for code operations:
- `ViewFileTool`: View file contents and metadata
- `ListDirectoryTool`: List directory contents
-- `SearchTool`: Search code using regex
+- `RipGrepTool`: Search code using ripgrep
- `EditFileTool`: Edit file contents
- `CreateFileTool`: Create new files
- `DeleteFileTool`: Delete files
diff --git a/codegen-examples/examples/langchain_agent/run.py b/codegen-examples/examples/langchain_agent/run.py
index 5c6891889..30de9ed49 100644
--- a/codegen-examples/examples/langchain_agent/run.py
+++ b/codegen-examples/examples/langchain_agent/run.py
@@ -1,6 +1,9 @@
"""Demo implementation of an agent with Codegen tools."""
from codegen import Codebase
+from codegen.extensions.langchain.graph import create_react_agent
+from codegen.extensions.langchain.llm import LLM
+from codegen.extensions.langchain.prompts import REASONER_SYSTEM_MESSAGE
from codegen.extensions.langchain.tools import (
CommitTool,
CreateFileTool,
@@ -10,18 +13,13 @@
MoveSymbolTool,
RenameFileTool,
RevealSymbolTool,
- SearchTool,
+ RipGrepTool,
SemanticEditTool,
ViewFileTool,
)
-
-from codegen.extensions.langchain.llm import LLM
-from codegen.extensions.langchain.prompts import REASONER_SYSTEM_MESSAGE
-
+from langchain_core.messages import SystemMessage
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph.graph import CompiledGraph
-from codegen.extensions.langchain.graph import create_react_agent
-from langchain_core.messages import SystemMessage
def create_codebase_agent(
@@ -57,7 +55,7 @@ def create_codebase_agent(
tools = [
ViewFileTool(codebase),
ListDirectoryTool(codebase),
- SearchTool(codebase),
+ RipGrepTool(codebase),
EditFileTool(codebase),
CreateFileTool(codebase),
DeleteFileTool(codebase),
diff --git a/codegen-examples/examples/modules_dependencies/enhanced_module_viz.py b/codegen-examples/examples/modules_dependencies/enhanced_module_viz.py
new file mode 100644
index 000000000..0d6844f79
--- /dev/null
+++ b/codegen-examples/examples/modules_dependencies/enhanced_module_viz.py
@@ -0,0 +1,142 @@
+"""Enhanced module dependency visualization example.
+
+This example demonstrates the enhanced module dependency visualization features,
+including detailed relationship visualization, interactive navigation, filtering options,
+and handling of complex dependency graphs.
+"""
+
+import codegen
+from codegen import Codebase
+from codegen.visualizations.module_dependency_viz import build_module_dependency_graph
+
+
+@codegen.function("visualize-enhanced-modules-dependencies")
+def run(codebase: Codebase, path_filter: str = None, max_depth: int = None, include_external: bool = False):
+ """Visualize module dependencies with enhanced features.
+
+ Args:
+ codebase: The codebase to analyze
+ path_filter: Optional path prefix to filter modules by
+ max_depth: Optional maximum dependency depth to include
+ include_external: Whether to include external module dependencies
+ """
+ # Build the module dependency graph
+ module_graph = build_module_dependency_graph(
+ codebase.files,
+ include_external=include_external,
+ path_filter=path_filter
+ )
+
+ # Detect circular dependencies
+ circular_deps = module_graph.detect_circular_dependencies()
+ if circular_deps:
+ print(f"Found {len(circular_deps)} circular dependencies:")
+ for i, cycle in enumerate(circular_deps[:5]): # Show first 5 cycles
+ print(f" Cycle {i+1}: {' -> '.join(cycle)} -> {cycle[0]}")
+ if len(circular_deps) > 5:
+ print(f" ... and {len(circular_deps) - 5} more")
+ else:
+ print("No circular dependencies found.")
+
+ # Calculate module metrics
+ metrics = module_graph.get_module_metrics()
+
+ # Find most imported modules
+ most_imported = sorted(
+ [(module_id, data["imported_by_count"]) for module_id, data in metrics.items()],
+ key=lambda x: x[1],
+ reverse=True
+ )
+
+ print("\nMost imported modules:")
+ for module_id, count in most_imported[:5]: # Show top 5
+ print(f" {module_id}: imported by {count} modules")
+
+ # Find modules with most imports
+ most_imports = sorted(
+ [(module_id, data["imports_count"]) for module_id, data in metrics.items()],
+ key=lambda x: x[1],
+ reverse=True
+ )
+
+ print("\nModules with most imports:")
+ for module_id, count in most_imports[:5]: # Show top 5
+ print(f" {module_id}: imports {count} modules")
+
+ # Apply depth filtering if specified
+ if max_depth is not None and most_imported:
+ # Use the most imported module as the root for depth filtering
+ root_module = most_imported[0][0]
+ print(f"\nFiltering to depth {max_depth} from {root_module}")
+ module_graph = module_graph.filter_by_depth(root_module, max_depth)
+
+ # Visualize the graph
+ print("\nVisualizing module dependencies...")
+ codebase.visualize(module_graph)
+ print("Use codegen.sh to view the visualization!")
+
+
+@codegen.function("visualize-module-dependencies-by-path")
+def visualize_by_path(codebase: Codebase, path_prefix: str):
+ """Visualize module dependencies for a specific path prefix.
+
+ Args:
+ codebase: The codebase to analyze
+ path_prefix: The path prefix to filter by
+ """
+ # Build the module dependency graph
+ module_graph = build_module_dependency_graph(codebase.files)
+
+ # Filter by path
+ filtered_graph = module_graph.filter_by_module_path(path_prefix)
+
+ # Visualize the filtered graph
+ print(f"Visualizing module dependencies for path: {path_prefix}")
+ codebase.visualize(filtered_graph)
+ print("Use codegen.sh to view the visualization!")
+
+
+@codegen.function("analyze-circular-dependencies")
+def analyze_circular_dependencies(codebase: Codebase):
+ """Analyze and visualize circular dependencies in the codebase.
+
+ Args:
+ codebase: The codebase to analyze
+ """
+ # Build the module dependency graph
+ module_graph = build_module_dependency_graph(codebase.files)
+
+ # Detect circular dependencies
+ circular_deps = module_graph.detect_circular_dependencies()
+
+ if not circular_deps:
+ print("No circular dependencies found.")
+ return
+
+ print(f"Found {len(circular_deps)} circular dependencies:")
+ for i, cycle in enumerate(circular_deps):
+ print(f" Cycle {i+1}: {' -> '.join(cycle)} -> {cycle[0]}")
+
+ # Visualize the graph with circular dependencies highlighted
+ print("\nVisualizing circular dependencies...")
+ codebase.visualize(module_graph)
+ print("Use codegen.sh to view the visualization! Circular dependencies are highlighted in red.")
+
+
+if __name__ == "__main__":
+ # Example usage with the Sentry codebase
+ codebase = Codebase.from_repo(
+ "getsentry/sentry",
+ commit="fb0d53b2210cc896fc3e2cf32dae149ea8a8a45a",
+ language="python"
+ )
+
+ # Run the enhanced module dependency visualization
+ run(codebase, path_filter="src/sentry/api", include_external=False)
+
+ # Alternatively, visualize by path
+ # visualize_by_path(codebase, "src/sentry/api")
+
+ # Or analyze circular dependencies
+ # analyze_circular_dependencies(codebase)
+
diff --git a/codegen-examples/examples/snapshot_event_handler/README.md b/codegen-examples/examples/snapshot_event_handler/README.md
index 8899580e1..447d154cd 100644
--- a/codegen-examples/examples/snapshot_event_handler/README.md
+++ b/codegen-examples/examples/snapshot_event_handler/README.md
@@ -1,6 +1,6 @@
# Event Handler with codebase snapshotting
-This project is designed to using Modal snapshotting to provide parsed codebase instances with minimal latency, make it more manageable to write event based handlers.
+This project is designed to using Modal snapshotting to provide parsed codebase instances with minimal latency, make it more manageable to write event based handlers.
Follow the instructions below to set up and deploy the application.
@@ -9,7 +9,7 @@ Follow the instructions below to set up and deploy the application.
Before you begin, ensure you have the following installed and configured:
1. **uv**: A tool for managing virtual environments and syncing dependencies.
-2. **Modal**: Ensure you have Modal configured on your system.
+1. **Modal**: Ensure you have Modal configured on your system.
## Setup Instructions
@@ -23,7 +23,7 @@ Before you begin, ensure you have the following installed and configured:
source ./venv/bin/activate
```
-2. **Sync Dependencies**
+1. **Sync Dependencies**
Sync the project dependencies using `uv`:
@@ -31,7 +31,7 @@ Before you begin, ensure you have the following installed and configured:
uv sync
```
-3. **Deploy to Modal**
+1. **Deploy to Modal**
Deploy the application to Modal by running:
@@ -48,7 +48,6 @@ Before you begin, ensure you have the following installed and configured:
- `.env.template` and `.env`: Environment variable templates and configurations.
- `pyproject.toml`: Project configuration and dependencies.
-
## Integration
-Once deployed, you can use the deployed web_url as the webhook endpoint for your slack, linear, or github webhooks.
\ No newline at end of file
+Once deployed, you can use the deployed web_url as the webhook endpoint for your slack, linear, or github webhooks.
diff --git a/codegen-examples/examples/swebench_agent_run/.env.db.template b/codegen-examples/examples/swebench_agent_run/.env.db.template
new file mode 100644
index 000000000..e29dbd721
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/.env.db.template
@@ -0,0 +1,5 @@
+POSTGRES_HOST="localhost"
+POSTGRES_DATABASE="swebench"
+POSTGRES_USER="swebench"
+POSTGRES_PASSWORD="swebench"
+POSTGRES_PORT="5432"
diff --git a/codegen-examples/examples/swebench_agent_run/README.md b/codegen-examples/examples/swebench_agent_run/README.md
index ddbf86814..711d395e7 100644
--- a/codegen-examples/examples/swebench_agent_run/README.md
+++ b/codegen-examples/examples/swebench_agent_run/README.md
@@ -1,38 +1,94 @@
-# INSTRUCTIONS
+# SWE-bench Agent Runner
-1. Create a `.env` file in the `swebench_agent_run` directory (codegen-examples/examples/swebench_agent_run) and add your API keys.
+Tool for running and evaluating model fixes using SWE-bench.
-1. cd into the `codegen-examples/examples/swebench_agent_run` directory
+## Setup
-1. Create a `.venv` with `uv venv` and activate it with `source .venv/bin/activate`
+1. Using the `.env.template` reference, create a `.env` file in the project root and add your API keys:
-1. Install the dependencies with `uv pip install .`
+ ```env
+ OPENAI_API_KEY=your_key_here
+ MODAL_TOKEN_ID=your_token_id
+ MODAL_TOKEN_SECRET=your_token_secret
+ ```
-1. Install the codegen dependencies with `uv add codegen`
+1. Create and activate a virtual environment:
-- Note: If you'd like to install the dependencies using the global environment, use `uv pip install -e ../../../` instead of `uv pip install .`. This will allow you to test modifications to the codegen codebase. You will need to run `uv pip install -e ../../../` each time you make changes to the codebase.
+ ```bash
+ uv venv
+ source .venv/bin/activate
+ ```
-6. Ensure that you have a modal account and profile set up. If you don't have one, you can create one at https://modal.com/
+1. Install the package:
-1. Activate the appropriate modal profile `python -m modal profile activate `
+ ```bash
+ # Basic installation
+ uv pip install -e .
-1. Launch the modal app with `python -m modal deploy --env= entry_point.py`
+ # With metrics support
+ uv pip install -e ".[metrics]"
-1. Run the evaluation with `python -m run_eval` with the desired options:
+ # With development tools
+ uv pip install -e ".[dev]"
-- ```bash
- $ python run_eval.py --help
- Usage: run_eval.py [OPTIONS]
+ # Install everything
+ uv pip install -e ".[all]"
+ ```
- Options:
- --use-existing-preds TEXT The run ID of the existing predictions to
- use.
+1. Set up Modal:
+
+ - Create an account at https://modal.com/ if you don't have one
+ - Activate your Modal profile:
+ ```bash
+ python -m modal profile activate
+ ```
+
+## Usage
+
+The package provides two main command-line tools:
+
+### Testing SWE CodeAgent
+
+Run the agent on a specific repository:
+
+```bash
+# Using the installed command
+swe-agent --repo pallets/flask --prompt "Analyze the URL routing system"
+
+# Options
+swe-agent --help
+Options:
+ --agent-class [DefaultAgent|CustomAgent] Agent class to use
+ --repo TEXT Repository to analyze (owner/repo)
+ --prompt TEXT Prompt for the agent
+ --help Show this message and exit
+```
+
+### Running SWE-Bench Eval
+
+Deploy modal app
+
+```bash
+./deploy.sh
+```
+
+Run evaluations on model fixes:
+
+```bash
+# Using the installed command
+swe-eval --dataset lite --length 10
+
+# Options
+swe-eval --help
+Options:
+ --use-existing-preds TEXT Run ID of existing predictions
--dataset [lite|full|verified|lite_small|lite_medium|lite_large]
- The dataset to use.
- --length INTEGER The number of examples to process.
- --instance-id TEXT The instance ID of the example to process.
- --repo TEXT The repo to use.
+ --length INTEGER Number of examples to process
+ --instance-id TEXT Specific instance ID to process
+ --repo TEXT Specific repo to evaluate
+ --local Run evaluation locally
--instance-ids LIST_OF_STRINGS The instance IDs of the examples to process.
Example: --instance-ids ,,...
- --help Show this message and exit.
- ```
+ --push-metrics Push results to metrics database (Requires additional database environment variables)
+ --help Show this message and exit
+```
diff --git a/codegen-examples/examples/swebench_agent_run/agent_cli.py b/codegen-examples/examples/swebench_agent_run/agent_cli.py
new file mode 100644
index 000000000..223cea4cb
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/agent_cli.py
@@ -0,0 +1,55 @@
+import click
+import modal
+from codegen import CodeAgent, Codebase
+
+image = modal.Image.debian_slim(python_version="3.13").apt_install("git").pip_install("codegen")
+
+app = modal.App(
+ name="codegen-examples",
+ image=image,
+ secrets=[modal.Secret.from_dotenv()],
+)
+
+
+@app.function()
+def run_agent(repo_name: str, prompt: str) -> bool:
+ codebase = Codebase.from_repo(repo_full_name=repo_name)
+ agent = CodeAgent(codebase)
+ return agent.run(prompt=prompt)
+
+
+@click.command()
+@click.option(
+ "--repo",
+ type=str,
+ default="pallets/flask",
+ help="The repository to analyze (format: owner/repo)",
+)
+@click.option(
+ "--prompt",
+ type=str,
+ default="Tell me about the codebase and the files in it.",
+ help="The prompt to send to the agent",
+)
+def main(repo: str, prompt: str):
+ """Run a codegen agent on a GitHub repository."""
+ # Import agent class dynamically based on name
+
+ click.echo(f"Running on {repo}")
+ click.echo(f"Prompt: {prompt}")
+
+ try:
+ with app.run():
+ result = run_agent.remote(repo, prompt)
+ if result:
+ click.echo("✅ Analysis completed successfully:")
+ click.echo(result)
+ else:
+ click.echo("❌ Analysis failed")
+ except Exception as e:
+ click.echo(f"❌ Error: {str(e)}", err=True)
+ raise click.Abort()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/codegen-examples/examples/swebench_agent_run/deploy.sh b/codegen-examples/examples/swebench_agent_run/deploy.sh
new file mode 100755
index 000000000..a1a681fb3
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/deploy.sh
@@ -0,0 +1,3 @@
+#! /bin/bash
+
+uv run modal deploy swebench_agent_run/modal_harness/entry_point.py
diff --git a/codegen-examples/examples/swebench_agent_run/entry_point.py b/codegen-examples/examples/swebench_agent_run/entry_point.py
deleted file mode 100644
index a364aaa19..000000000
--- a/codegen-examples/examples/swebench_agent_run/entry_point.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from codegen.extensions.swebench.utils import SweBenchExample
-from codegen.extensions.swebench.harness import run_agent_on_entry
-import modal
-
-image = (
- modal.Image.debian_slim(python_version="3.13")
- .apt_install(["git", "ripgrep"])
- .pip_install("fastapi[standard]")
- .copy_local_dir("../../../", "/root/codegen", ignore=[".venv", "**/.venv", "tests", "**/tests"])
- .run_commands("pip install -e /root/codegen")
-)
-
-app = modal.App(name="swebench-agent-run", image=image, secrets=[modal.Secret.from_dotenv()])
-
-
-@app.function(timeout=43200)
-async def run_agent_modal(entry: SweBenchExample, run_id: str, model: str):
- """Modal function to process a single example from the SWE-bench dataset."""
- return run_agent_on_entry(entry, run_id=run_id, model=model)
diff --git a/codegen-examples/examples/swebench_agent_run/eval_cli.py b/codegen-examples/examples/swebench_agent_run/eval_cli.py
new file mode 100644
index 000000000..3258235bc
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/eval_cli.py
@@ -0,0 +1,360 @@
+import json
+import traceback
+import uuid
+from dataclasses import asdict, dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Any, ClassVar, Dict, List, Optional, Tuple
+
+import click
+import modal
+from codegen.extensions.swebench.harness import run_agent_on_entry
+from codegen.extensions.swebench.utils import (
+ SWEBenchDataset,
+ SweBenchExample,
+ get_swe_bench_examples,
+)
+from codegen.sdk.core.codebase import Codebase
+
+from swebench_agent_run.constants import DATASET_DICT
+from swebench_agent_run.report import generate_report
+from swebench_agent_run.utils import track_batches
+
+# Constants
+PREDS_DNAME = Path(__file__).parent / "predictions"
+LOG_DIR = Path(__file__).parent / "logs"
+
+# Modal function setup
+run_agent_modal = modal.Function.from_name(
+ app_name="swebench-agent-run",
+ name="run_agent_modal",
+)
+
+
+# Type aliases
+@dataclass
+class ErrorInfo:
+ error_type: str
+ error_message: str
+ traceback: str
+ modal_error_code: Optional[str] = None
+ modal_error_details: Optional[dict] = None
+
+ def format_error(self, example_id: str = "") -> Dict[str, Any]:
+ """Format error information into a structured dictionary."""
+ error_dict = {
+ "error_context": "Processing error"
+ if not example_id
+ else f"Error processing {example_id}",
+ "error_details": {
+ "type": self.error_type,
+ "message": self.error_message,
+ "traceback": self.traceback.split("\n"), # Split for better JSON formatting
+ },
+ }
+
+ if self.modal_error_code or self.modal_error_details:
+ error_dict["error_details"]["modal_specific"] = {
+ "error_code": self.modal_error_code,
+ "error_details": self.modal_error_details,
+ }
+
+ return error_dict
+
+
+@dataclass
+class ProcessingResult:
+ instance_id: str
+ status: Optional[str] = None
+ error_info: Optional[ErrorInfo] = None
+ result: Optional[dict] = None
+
+ ERROR_STATUS: ClassVar[str] = "error" # Class constant for error status
+
+ @classmethod
+ def create_error(cls, instance_id: str, error_info: ErrorInfo) -> "ProcessingResult":
+ """Create a ProcessingResult instance for an error case."""
+ return cls(instance_id=instance_id, status=cls.ERROR_STATUS, error_info=error_info)
+
+
+def create_error_info(error: Exception, example_id: str = "") -> ErrorInfo:
+ """Create standardized error information."""
+ traceback_str = (
+ "".join(traceback.format_exception(type(error), error, error.__traceback__))
+ if hasattr(error, "__traceback__")
+ else traceback.format_exc()
+ )
+
+ error_info = ErrorInfo(
+ error_type=type(error).__name__,
+ error_message=str(error),
+ traceback=traceback_str,
+ )
+
+ if isinstance(error, modal.exception.Error):
+ error_info.modal_error_code = getattr(error, "code", None)
+ error_info.modal_error_details = getattr(error, "details", None)
+
+ # Print formatted error as JSON
+ print(json.dumps(error_info.format_error(example_id), indent=2))
+
+ return error_info
+
+
+def process_modal(
+ examples: list[SweBenchExample],
+ model: str,
+ run_id: str,
+) -> List[ProcessingResult]:
+ """Process examples using Modal's parallel execution."""
+ results: List[ProcessingResult] = []
+
+ try:
+ batch_results = run_agent_modal.starmap(
+ [(ex, run_id, model) for ex in examples],
+ )
+
+ for example, result in zip(examples, batch_results):
+ if isinstance(result, Exception):
+ error_info = create_error_info(result, example.instance_id)
+ results.append(ProcessingResult.create_error(example.instance_id, error_info))
+ elif result is None:
+ print(f"Warning: Null result for {example.instance_id}")
+ results.append(
+ ProcessingResult.create_error(
+ example.instance_id,
+ ErrorInfo(
+ error_type="NullResult",
+ error_message="Process returned None",
+ ),
+ )
+ )
+ else:
+ results.append(ProcessingResult(instance_id=example.instance_id, result=result))
+
+ except Exception as e:
+ error_info = create_error_info(e)
+ # Mark all examples as failed
+ results.extend(
+ [ProcessingResult.create_error(example.instance_id, error_info) for example in examples]
+ )
+
+ return results
+
+
+def process_batch_local(
+ examples: list[SweBenchExample],
+ batch_size: int = 10,
+ codebases: dict[str, Codebase] = {},
+ model: str = "claude-3-7-sonnet-latest",
+ run_id: str | None = None,
+) -> List[ProcessingResult]:
+ """Process examples in local batches."""
+ results: List[ProcessingResult] = []
+
+ for _, batch in track_batches(examples, batch_size, desc="Processing examples"):
+ for example in batch:
+ try:
+ result = run_agent_on_entry(
+ example,
+ model=model,
+ codebase=codebases.get(example.instance_id),
+ run_id=run_id,
+ )
+ results.append(ProcessingResult(instance_id=example.instance_id, result=result))
+ except Exception as e:
+ error_info = create_error_info(e, example.instance_id)
+ results.append(ProcessingResult.create_error(example.instance_id, error_info))
+
+ return results
+
+
+def save_results(
+ results: List[ProcessingResult], predictions_dir: Path, timestamp: str
+) -> Tuple[Path, dict]:
+ """Save individual results and create summary."""
+ # Save individual results
+ for result in results:
+ output_file = predictions_dir / f"{result.instance_id}.json"
+ output_file.parent.mkdir(exist_ok=True, parents=True)
+ with open(output_file, "w") as f:
+ # Convert dataclass to dict for JSON serialization
+ json.dump(asdict(result), f, indent=4)
+
+ # Create and save summary
+ summary = {
+ "timestamp": timestamp,
+ "total_examples": len(results),
+ "successful": len([r for r in results if not r.status]), # No status means success
+ "failed": len([r for r in results if r.status == ProcessingResult.ERROR_STATUS]),
+ "error_types": {},
+ "results": [asdict(r) for r in results], # Convert all results to dict
+ }
+
+ # Collect error statistics
+ for result in results:
+ if result.status == ProcessingResult.ERROR_STATUS and result.error_info:
+ error_type = result.error_info.error_type
+ summary["error_types"][error_type] = summary["error_types"].get(error_type, 0) + 1
+
+ summary_file = predictions_dir / f"summary_{timestamp}.json"
+ with open(summary_file, "w") as f:
+ json.dump(summary, f, indent=4)
+
+ return summary_file, summary
+
+
+def print_summary(summary: dict, predictions_dir: Path, summary_file: Path) -> None:
+ """Print processing summary information."""
+ print("\nProcessing complete!")
+ print(f"Results saved to: {predictions_dir}")
+ print(f"Summary saved to: {summary_file}")
+ print(f"Successful: {summary['successful']}/{summary['total_examples']}")
+ print(f"Failed: {summary['failed']}/{summary['total_examples']}")
+
+ if summary["error_types"]:
+ print("\nError type distribution:")
+ for error_type, count in summary["error_types"].items():
+ print(f" {error_type}: {count}")
+
+
+def run_eval(
+ use_existing_preds: Optional[str],
+ dataset_enum: SWEBenchDataset,
+ length: int,
+ instance_id: Optional[str] = None,
+ local: bool = False,
+ codebases: Dict[str, Codebase] = {},
+ repo: Optional[str] = None,
+ model: str = "claude-3-7-sonnet-latest",
+ instance_ids: list[str] | None = None,
+) -> Tuple[Path, Path, SWEBenchDataset, str]:
+ """Main evaluation function."""
+ run_id = use_existing_preds or str(uuid.uuid4())
+ print(f"Run ID: {run_id}")
+
+ predictions_dir = PREDS_DNAME / f"results_{run_id}"
+
+ examples = get_swe_bench_examples(
+ dataset=dataset_enum,
+ length=length,
+ instance_id=instance_id,
+ repo=repo,
+ instance_ids=instance_ids or [],
+ )
+ print(
+ "Examples:\n" + "\n".join(f"{e.instance_id} - {e.repo} - {e.base_commit}" for e in examples)
+ )
+
+ try:
+ if use_existing_preds is None:
+ print(f"Repo: {repo}")
+ print(
+ f"Examples:\n{'\n'.join([f'{e.instance_id} - {e.repo} - {e.base_commit}' for e in examples])}"
+ )
+ print(f"Processing {len(examples)} examples...")
+
+ predictions_dir.mkdir(exist_ok=True, parents=True)
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+
+ results = (
+ process_batch_local(
+ examples,
+ codebases=codebases,
+ model=model,
+ run_id=run_id,
+ )
+ if local
+ else process_modal(examples, model=model, run_id=run_id)
+ )
+ summary_file, summary = save_results(results, predictions_dir, timestamp)
+ print_summary(summary, predictions_dir, summary_file)
+
+ return predictions_dir, LOG_DIR, dataset_enum, run_id
+ except Exception:
+ traceback.print_exc()
+ raise
+
+
+def list_of_strings(value: str) -> list[str]:
+ if value == "":
+ return []
+ return value.split(",")
+
+
+@click.command()
+@click.option(
+ "--use-existing-preds",
+ help="The run ID of the existing predictions to use.",
+ type=str,
+ default=None,
+)
+@click.option(
+ "--dataset",
+ help="The dataset to use.",
+ type=click.Choice(["lite", "full", "verified"]),
+ default="lite",
+)
+@click.option("--length", help="The number of examples to process.", type=int, default=10)
+@click.option(
+ "--instance-id",
+ help="The instance ID of the example to process.",
+ type=str,
+ default=None,
+)
+@click.option("--local", help="Run the evaluation locally.", is_flag=True, default=False)
+@click.option("--push-metrics", help="Push metrics to the database.", is_flag=True, default=False)
+@click.option("--repo", help="The repo to use.", type=str, default=None)
+@click.option("--model", help="The model to use.", type=str, default="claude-3-7-sonnet-latest")
+@click.option(
+ "--instance-ids",
+ help="The instance IDs of the examples to process. Example: --instance-ids ,,...",
+ type=list_of_strings,
+ default="",
+)
+def main(
+ use_existing_preds: Optional[str],
+ dataset: str,
+ length: int,
+ instance_id: Optional[str],
+ local: bool,
+ repo: Optional[str],
+ model: str,
+ push_metrics: bool,
+ instance_ids: list[str],
+) -> None:
+ """Command-line interface for running evaluations."""
+ print(f"Repo: {repo}")
+ result = run_eval(
+ use_existing_preds=use_existing_preds,
+ dataset_enum=DATASET_DICT[dataset],
+ length=length,
+ instance_id=instance_id,
+ local=local,
+ repo=repo,
+ model=model,
+ instance_ids=instance_ids,
+ )
+
+ generate_report(*result)
+
+ evaluation_result_file = Path(f"results.{result[3]}.json")
+
+ if push_metrics:
+ if not evaluation_result_file.exists() and use_existing_preds is None:
+ print("Evaluation was not run - no metrics were pushed")
+ return
+
+ try:
+ from swebench_agent_run.metrics import (
+ write_report_to_db, # delay import because of extras
+ )
+
+ write_report_to_db(str(evaluation_result_file.resolve()))
+ except Exception:
+ print("Error writing report to db")
+ traceback.print_exc()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/codegen-examples/examples/swebench_agent_run/local_run.ipynb b/codegen-examples/examples/swebench_agent_run/local_run.ipynb
index 54d845c98..237732bbf 100644
--- a/codegen-examples/examples/swebench_agent_run/local_run.ipynb
+++ b/codegen-examples/examples/swebench_agent_run/local_run.ipynb
@@ -32,7 +32,14 @@
"metadata": {},
"outputs": [],
"source": [
- "await run_eval(use_existing_preds=None, dataset=\"lite\", length=20, repo=\"django/django\", num_workers=10, model=\"claude-3-7-sonnet-latest\")"
+ "await run_eval(\n",
+ " use_existing_preds=None,\n",
+ " dataset=\"lite\",\n",
+ " length=5,\n",
+ " repo=\"django/django\",\n",
+ " num_workers=10,\n",
+ " model=\"claude-3-7-sonnet-latest\",\n",
+ ")"
]
},
{
@@ -76,7 +83,12 @@
"source": [
"from codegen.agents.code_agent import CodeAgent\n",
"\n",
- "agent = CodeAgent(codebase=codebase, tags=[\"local_test\"], model_name=\"claude-3-5-sonnet-latest\", model_provider=\"anthropic\")"
+ "agent = CodeAgent(\n",
+ " codebase=codebase,\n",
+ " tags=[\"local_test\"],\n",
+ " model_name=\"claude-3-5-sonnet-latest\",\n",
+ " model_provider=\"anthropic\",\n",
+ ")"
]
},
{
diff --git a/codegen-examples/examples/swebench_agent_run/pyproject.toml b/codegen-examples/examples/swebench_agent_run/pyproject.toml
index fc612d4b1..640e252b5 100644
--- a/codegen-examples/examples/swebench_agent_run/pyproject.toml
+++ b/codegen-examples/examples/swebench_agent_run/pyproject.toml
@@ -1,10 +1,45 @@
[project]
name = "swebench-agent-run"
version = "0.1.0"
-description = "Add your description here"
+description = "SWE-bench agent runner for evaluating model fixes"
readme = "README.md"
requires-python = ">=3.12, <3.14"
-dependencies = ["modal>=0.73.25"]
+dependencies = [
+ "modal>=0.73.25",
+ "tqdm>=4.66.0",
+ "click>=8.1.0",
+ "codegen",
+ "swebench>=3.0.15",
+ "tenacity>=9.0.0",
+]
-[tool.setuptools]
-py-modules = ["entry_point", "run_eval"]
+[project.optional-dependencies]
+metrics = ["psycopg2-binary"]
+dev = ["ruff", "mypy"]
+all = ["swebench-agent-run[metrics,dev]"]
+
+[project.scripts]
+swe-agent = "agent_cli:main"
+swe-eval = "eval_cli:main"
+modal-deploy = "modal_harness:deploy"
+
+[tool.ruff]
+line-length = 100
+target-version = "py312"
+
+
+[tool.mypy]
+python_version = "3.12"
+strict = true
+warn_return_any = true
+warn_unused_configs = true
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.metadata]
+allow-direct-references = true
+
+[tool.uv.sources]
+codegen = { path = "../../../" }
diff --git a/codegen-examples/examples/swebench_agent_run/run_eval.py b/codegen-examples/examples/swebench_agent_run/run_eval.py
deleted file mode 100644
index 98d300855..000000000
--- a/codegen-examples/examples/swebench_agent_run/run_eval.py
+++ /dev/null
@@ -1,385 +0,0 @@
-import asyncio
-import json
-import traceback
-from pathlib import Path
-import uuid
-import modal
-import click
-import time
-from codegen.extensions.swebench.enums import SWEBenchDataset, SWEBenchLiteSubset
-from constants import DATASET_DICT
-from codegen.extensions.swebench.harness import run_agent_on_entry
-from codegen.extensions.swebench.utils import SweBenchExample, get_swe_bench_examples
-from codegen.extensions.swebench.report import generate_report
-from codegen.sdk.core.codebase import Codebase
-
-PREDS_DNAME = Path(__file__).parent / "predictions"
-LOG_DIR = Path(__file__).parent / "logs"
-
-run_agent_modal = modal.Function.from_name(app_name="swebench-agent-run", name="run_agent_modal")
-
-
-async def process_batch_modal(examples: list[SweBenchExample], run_id: str, model: str, num_workers=5, min_workers=1, max_retries=3):
- """Process a batch of examples concurrently using a queue system with incremental worker scaling.
-
- Args:
- examples: List of SweBenchExample objects to process
- num_workers: Initial number of examples to process concurrently
- min_workers: Minimum number of concurrent workers to maintain
- max_retries: Maximum number of retries for failed requests
- """
- results = {}
- queue = asyncio.Queue()
-
- # Shared state for worker management
- state = {
- "active_workers": num_workers,
- "success_streak": 0,
- "last_scaling_time": time.time(),
- "scaling_cooldown": 0, # seconds between scaling operations
- "worker_tasks": [],
- "running": True,
- }
-
- # Use a lock to protect shared state during adjustments
- state_lock = asyncio.Lock()
-
- # Initialize the queue with (example, attempt) tuples
- for example in examples:
- await queue.put((example, 0)) # 0 represents first attempt
-
- async def scale_down_worker(task_to_cancel=None):
- """Remove a single worker when rate limiting is detected"""
- async with state_lock:
- # Only scale if cooldown period has passed and we're above min_workers
- current_time = time.time()
- if current_time - state["last_scaling_time"] < state["scaling_cooldown"] or state["active_workers"] <= min_workers:
- return False
-
- # Reset success streak when scaling down
- state["success_streak"] = 0
- state["last_scaling_time"] = current_time
-
- # If a specific task was provided, cancel it
- if task_to_cancel and task_to_cancel in state["worker_tasks"]:
- print(f"Rate limiting detected! Removing 1 worker, going from {state['active_workers']} to {state['active_workers'] - 1}")
- state["worker_tasks"].remove(task_to_cancel)
- task_to_cancel.cancel()
- state["active_workers"] -= 1
- return True
-
- # Otherwise, cancel the most recently added worker
- elif state["worker_tasks"]:
- print(f"Rate limiting detected! Removing 1 worker, going from {state['active_workers']} to {state['active_workers'] - 1}")
- task = state["worker_tasks"].pop()
- task.cancel()
- state["active_workers"] -= 1
- return True
-
- return False
-
- async def scale_up_worker():
- """Add a single worker when operations have been consistently successful"""
- async with state_lock:
- # Only scale if cooldown period has passed and we're below num_workers
- current_time = time.time()
- if current_time - state["last_scaling_time"] < state["scaling_cooldown"] or state["active_workers"] >= num_workers:
- return False
-
- # Add a worker after a streak of successful operations
- if state["success_streak"] >= 5:
- print(f"Operations succeeding! Adding 1 worker, going from {state['active_workers']} to {state['active_workers'] + 1}")
-
- # Create new worker
- if state["running"]:
- new_task = asyncio.create_task(worker())
- state["worker_tasks"].append(new_task)
- state["active_workers"] += 1
- state["success_streak"] = 0
- state["last_scaling_time"] = current_time
- return True
-
- return False
-
- async def is_rate_limit_error(error):
- """Determine if an error is due to rate limiting"""
- # Check for common rate limit error patterns
- if isinstance(error, modal.exception.Error):
- error_msg = str(error).lower()
- rate_limit_indicators = ["rate limit", "too many requests", "429", "throttle", "quota exceeded", "capacity", "limit exceeded"]
- return any(indicator in error_msg for indicator in rate_limit_indicators)
- return False
-
- async def process_example(example, attempt, current_task):
- try:
- result = await run_agent_modal.remote.aio(example, run_id=run_id, model=model)
-
- if result is None:
- print(f"Warning: Null result for {example.instance_id}")
- return {"status": "error", "instance_id": example.instance_id, "error_info": {"error_type": "NullResult", "error_message": "Process returned None"}}
-
- # Increment success streak and potentially scale up
- async with state_lock:
- state["success_streak"] += 1
-
- if state["success_streak"] % 5 == 0: # Check after every 5 successes
- await scale_up_worker()
-
- return result
-
- except Exception as e:
- error_type = type(e).__name__
- error_info = {
- "error_type": error_type,
- "error_message": str(e),
- "traceback": traceback.format_exception(type(e), e, e.__traceback__),
- }
-
- if isinstance(e, modal.exception.Error):
- error_info["modal_error_code"] = getattr(e, "code", None)
- error_info["modal_error_details"] = getattr(e, "details", None)
-
- print(f"Error processing {example.instance_id} (attempt {attempt + 1}):")
- print(f"Type: {error_type}")
- print(f"Message: {str(e)}")
-
- # Check if this is a rate limit error
- if await is_rate_limit_error(e):
- print(f"Rate limit detected on task for {example.instance_id}")
-
- # Scale down by removing this specific worker
- scaled_down = await scale_down_worker(current_task)
-
- # If we're removing this worker, we need to requeue the task for another worker
- if scaled_down:
- # Requeue this example with the same attempt count (not incrementing)
- await queue.put((example, attempt))
- return None
-
- # Otherwise add a small delay before retrying
- await asyncio.sleep(2 * (attempt + 1)) # Exponential backoff
-
- if attempt < max_retries:
- await queue.put((example, attempt + 1))
- return None
-
- return {"status": "error", "instance_id": example.instance_id, "error_info": error_info}
-
- async def worker():
- # Store this task reference to allow targeted cancellation
- current_task = asyncio.current_task()
-
- while state["running"]:
- try:
- # Use a timeout to allow worker to check if it should exit
- try:
- example, attempt = await asyncio.wait_for(queue.get(), timeout=1.0)
- except asyncio.TimeoutError:
- continue
-
- if example.instance_id in results:
- queue.task_done()
- continue
- print(f"Processing example {example.instance_id}")
- process_result = await process_example(example, attempt, current_task)
-
- # If we're still processing this task (not requeued due to rate limiting)
- if process_result is not None:
- results[example.instance_id] = {"instance_id": example.instance_id, **process_result}
- print(f"Processed example {example.instance_id}")
- queue.task_done()
-
- # If None is returned, the task was requeued due to rate limiting
- # and this worker is being shut down, so exit the loop
- else:
- print(f"Task for {example.instance_id} has been requeued")
- queue.task_done()
- if current_task not in state["worker_tasks"]:
- break
-
- except asyncio.CancelledError:
- # Handle graceful cancellation
- print("Worker task cancelled")
- break
- except Exception as e:
- print(f"Worker error: {str(e)}")
- traceback.print_exc()
- queue.task_done()
-
- # Start initial workers
- state["worker_tasks"] = [asyncio.create_task(worker()) for _ in range(num_workers)]
-
- # Wait for queue to be fully processed
- await queue.join()
-
- # Mark as not running and cancel remaining workers
- state["running"] = False
- for w in state["worker_tasks"]:
- w.cancel()
-
- # Wait for all workers to be cancelled
- await asyncio.gather(*state["worker_tasks"], return_exceptions=True)
-
- # Return results in the same order as input examples
- return [results.get(example.instance_id, {"instance_id": example.instance_id, "status": "missing"}) for example in examples]
-
-
-def process_batch_local(examples: list[SweBenchExample], model: str, num_workers=5, codebases: dict[str, Codebase] = {}, run_id: str | None = None):
- """Process a batch of examples synchronously.
-
- Args:
- examples: List of SweBenchExample objects to process
- num_workers: Number of examples to process in each batch.
- Default is 10 to avoid overwhelming the system.
- """
- results = []
-
- # Process examples in batches
- for i in range(0, len(examples), num_workers):
- batch = examples[i : i + num_workers]
- print(f"Processing batch {i // num_workers + 1}/{len(examples) // num_workers + 1} (examples {i + 1}-{min(i + num_workers, len(examples))})")
-
- # Process each example in the batch
- for example in batch:
- try:
- # Run the agent locally instead of using modal
- if codebases and example.instance_id in codebases:
- result = run_agent_on_entry(example, model=model, codebase=codebases[example.instance_id], run_id=run_id)
- else:
- result = run_agent_on_entry(example, model=model, run_id=run_id)
- results.append(result)
-
- except Exception as e:
- error_type = type(e).__name__
- error_info = {
- "error_type": error_type,
- "error_message": str(e),
- "traceback": traceback.format_exc(),
- }
-
- print(f"Error processing {example.instance_id}:")
- print(f"Type: {error_type}")
- print(f"Message: {str(e)}")
- print("Traceback:")
- print(error_info["traceback"])
-
- results.append({"instance_id": example.instance_id, "status": "error", "error_info": error_info})
-
- return results
-
-
-async def run_eval(
- dataset: str,
- use_existing_preds: str | None = None,
- length: int | None = None,
- instance_id: str | None = None,
- local: bool = False,
- codebases: dict[str, Codebase] = {},
- repo: str | None = None,
- num_workers: int = 2,
- model: str = "claude-3-7-sonnet-latest",
- instance_ids: list[str] = [],
-):
- run_id = use_existing_preds or str(uuid.uuid4())
- print(f"Run ID: {run_id}")
- predictions_dir = PREDS_DNAME / f"results_{run_id}"
-
- dataset_enum = DATASET_DICT[dataset]
- examples = get_swe_bench_examples(dataset=dataset_enum, length=length, instance_id=instance_id, repo=repo, instance_ids=instance_ids)
-
- try:
- if use_existing_preds is None:
- print(f"Repo: {repo}")
- print(f"Examples:\n{'\n'.join([f'{e.instance_id} - {e.repo} - {e.base_commit}' for e in examples])}")
- print(f"Processing {len(examples)} examples...")
- # Create output directory if it doesn't exist
- predictions_dir.mkdir(exist_ok=True, parents=True)
-
- # Create a timestamp for this run
- timestamp = time.strftime("%Y-%m-%d %H:%M %Z", time.localtime(time.time()))
-
- # Process all examples in parallel batches
- if local:
- results = process_batch_local(examples, model=model, codebases=codebases, run_id=run_id)
- else:
- results = await process_batch_modal(examples, model=model, run_id=run_id, num_workers=num_workers)
-
- # Save individual results
- for result in results:
- if result and "instance_id" in result:
- instance_id = result["instance_id"]
- output_file = predictions_dir / f"{instance_id}.json"
- output_file.parent.mkdir(exist_ok=True, parents=True)
- with open(output_file, "w") as f:
- json.dump(result, f, indent=4)
-
- # Save summary file
- summary_file = predictions_dir / f"summary_{timestamp}.json"
- summary = {
- "timestamp": timestamp,
- "total_examples": len(examples),
- "successful": len([r for r in results if r and "status" not in r]),
- "failed": len([r for r in results if r and "status" in r and r["status"] == "error"]),
- "error_types": {},
- "results": results,
- }
-
- # Collect error statistics
- for result in results:
- if result and "status" in result and result["status"] == "error":
- error_type = result.get("error_info", {}).get("error_type", "Unknown")
- summary["error_types"][error_type] = summary["error_types"].get(error_type, 0) + 1
-
- with open(summary_file, "w") as f:
- json.dump(summary, f, indent=4)
-
- print("\nProcessing complete!")
- print(f"Results saved to: {predictions_dir}")
- print(f"Summary saved to: {summary_file}")
- print(f"Successful: {summary['successful']}/{summary['total_examples']}")
- print(f"Failed: {summary['failed']}/{summary['total_examples']}")
- if summary["error_types"]:
- print("\nError type distribution:")
- for error_type, count in summary["error_types"].items():
- print(f" {error_type}: {count}")
-
- if isinstance(dataset_enum, SWEBenchLiteSubset):
- dataset_enum = SWEBenchDataset.LITE
- # Generate Report on Modal
- generate_report(predictions_dir, LOG_DIR, dataset_enum, run_id)
- except Exception:
- print("Fatal error in run_eval:")
- traceback.print_exc()
- raise
-
-
-def list_of_strings(value: str) -> list[str]:
- if value == "":
- return []
- return value.split(",")
-
-
-@click.command()
-@click.option("--dataset", help="The dataset to use.", type=click.Choice(["lite", "full", "verified", "lite_small", "lite_medium", "lite_large"]), default="lite")
-@click.option("--use-existing-preds", help="The run ID of the existing predictions to use.", type=str, default=None)
-@click.option("--length", help="The number of examples to process.", type=int, default=None)
-@click.option("--instance-id", help="The instance ID of the example to process.", type=str, default=None)
-@click.option("--local", help="Run the evaluation locally.", is_flag=True, default=False)
-@click.option("--repo", help="The repo to use.", type=str, default=None)
-@click.option(
- "--num-workers", help="The number of workers to use. This is the number of examples that will be processed concurrently. A large number may lead to rate limiting issues.", type=int, default=5
-)
-@click.option("--model", help="The model to use.", type=str, default="claude-3-7-sonnet-latest")
-@click.option("--instance-ids", help="The instance IDs of the examples to process. Example: --instance-ids ,,...", type=list_of_strings, default="")
-def run_eval_command(dataset, use_existing_preds, length, instance_id, local, repo, num_workers, model, instance_ids):
- print(f"Repo: {repo}")
- print(f"Model: {model}")
- asyncio.run(
- run_eval(
- dataset=dataset, use_existing_preds=use_existing_preds, length=length, instance_id=instance_id, local=local, repo=repo, num_workers=num_workers, model=model, instance_ids=instance_ids
- )
- )
-
-
-if __name__ == "__main__":
- run_eval_command()
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/__init__.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/codegen-examples/examples/swebench_agent_run/constants.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/constants.py
similarity index 100%
rename from codegen-examples/examples/swebench_agent_run/constants.py
rename to codegen-examples/examples/swebench_agent_run/swebench_agent_run/constants.py
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/metrics.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/metrics.py
new file mode 100644
index 000000000..4052604d7
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/metrics.py
@@ -0,0 +1,69 @@
+import json
+import os
+from importlib.metadata import version
+from pathlib import Path
+
+import psycopg2
+from dotenv import load_dotenv
+
+
+def write_report_to_db(report_file: str):
+ path = Path(__file__).parent.parent / ".env.db"
+ if not path.exists():
+ raise FileNotFoundError(f"DB credentials not found: {path}")
+ load_dotenv(str(path.resolve()))
+
+ postgres_host = os.getenv("POSTGRESQL_HOST")
+ postgres_database = os.getenv("POSTGRESQL_DATABASE")
+ postgres_user = os.getenv("POSTGRESQL_USER")
+ postgres_password = os.getenv("POSTGRESQL_PASSWORD")
+ postgres_port = os.getenv("POSTGRESQL_PORT")
+
+ try:
+ codegen_version = version("codegen")
+ except Exception:
+ codegen_version = "dev"
+
+ with open(report_file) as f:
+ report = json.load(f)
+
+ # Establish connection
+
+ conn = psycopg2.connect(
+ host=postgres_host,
+ database=postgres_database,
+ user=postgres_user,
+ password=postgres_password,
+ port=postgres_port,
+ )
+
+ # Create a cursor
+ cur = conn.cursor()
+
+ try:
+ # Single row insert
+ cur.execute(
+ "INSERT INTO swebench_output (codegen_version, submitted, completed_instances, resolved_instances, unresolved_instances, empty_patches, error_instances) VALUES (%s, %s, %s, %s, %s, %s, %s)",
+ (
+ codegen_version,
+ report["submitted_instances"],
+ report["completed_instances"],
+ report["resolved_instances"],
+ report["unresolved_instances"],
+ report["empty_patch_instances"],
+ report["error_instances"],
+ ),
+ )
+
+ # Commit the transaction
+ conn.commit()
+
+ except Exception as e:
+ # Rollback in case of error
+ conn.rollback()
+ print(f"Error: {e}")
+
+ finally:
+ # Close cursor and connection
+ cur.close()
+ conn.close()
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/__init__.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/__init__.py
new file mode 100644
index 000000000..e26435103
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/__init__.py
@@ -0,0 +1,3 @@
+from .entry_point import patched_swebench_eval
+
+__all__ = ["patched_swebench_eval"]
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/entry_point.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/entry_point.py
new file mode 100644
index 000000000..d044af28f
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/entry_point.py
@@ -0,0 +1,334 @@
+"""Largely copied from swebench/harness/modal_eval/run_evaluation_modal.py
+
+Points of difference:
+ - We added CGModalSandboxRuntime class that is used to populate the sandbox with the snapshot.
+ - We are adding custom post-processing of the TestOutput in run_instances_modal
+"""
+
+import json
+import time
+import traceback
+from contextlib import nullcontext
+from typing import TYPE_CHECKING
+from unittest.mock import patch
+
+import modal as modal_lib
+import tenacity
+from swebench.harness.constants import (
+ APPLY_PATCH_FAIL,
+ APPLY_PATCH_PASS,
+)
+from swebench.harness.docker_build import setup_logger
+from swebench.harness.grading import get_eval_report
+from swebench.harness.modal_eval.run_evaluation_modal import (
+ LOCAL_SANDBOX_ENTRYPOINT_PATH,
+ REMOTE_SANDBOX_ENTRYPOINT_PATH,
+ ModalSandboxRuntime,
+ TestOutput,
+ get_log_dir,
+ swebench_image,
+)
+from swebench.harness.run_evaluation import main
+from swebench.harness.test_spec.test_spec import TestSpec
+from swebench.harness.utils import EvaluationError
+
+if TYPE_CHECKING:
+ from codegen.extensions.swebench.utils import SweBenchExample
+
+image = (
+ modal_lib.Image.debian_slim(python_version="3.13")
+ .apt_install(["git", "ripgrep"])
+ .add_local_dir(
+ "../../../",
+ "/root/codegen",
+ ignore=[
+ "__pycache__",
+ "**/__pycache__",
+ ".venv",
+ "**/.venv",
+ "tests",
+ "**/tests",
+ "codegen-on-oss/",
+ "codegen-examples/",
+ "build/",
+ ".vscode/",
+ ".codegen/",
+ ".github/",
+ ".architecture/",
+ "docs/",
+ "*cache/",
+ ],
+ copy=True,
+ )
+ .add_local_dir(
+ ".",
+ "/root/swebench_agent_run",
+ ignore=[
+ "__pycache__",
+ "**/__pycache__",
+ ".venv",
+ "**/.venv",
+ ".env*",
+ ],
+ copy=True,
+ )
+ .run_commands(
+ "pip install -e /root/codegen",
+ "rm -r /root/codegen/.git",
+ "pip install -e /root/swebench_agent_run",
+ )
+)
+
+app = modal_lib.App(
+ name="swebench-agent-run", image=image, secrets=[modal_lib.Secret.from_dotenv()]
+)
+
+
+class ShouldRetry(Exception):
+ pass
+
+
+@app.function(timeout=43200, max_containers=10)
+async def run_agent_modal(entry: "SweBenchExample", run_id: str, model: str):
+ from codegen.extensions.swebench.harness import run_agent_on_entry
+
+ """Modal function to process a single example from the SWE-bench dataset."""
+ for attempt in tenacity.Retrying(
+ wait=tenacity.wait_exponential_jitter(max=600),
+ retry=tenacity.retry_if_exception_type(ShouldRetry),
+ ):
+ with attempt:
+ try:
+ return run_agent_on_entry(entry, run_id=run_id, model=model)
+ except Exception as e:
+ if any(
+ msg in str(e).lower()
+ for msg in (
+ "rate limit",
+ "too many requests",
+ "429",
+ "throttle",
+ "quota exceeded",
+ "capacity",
+ "limit exceeded",
+ )
+ ):
+ raise ShouldRetry() from e
+ else:
+ raise e
+
+
+@app.function(
+ image=swebench_image.add_local_file(
+ LOCAL_SANDBOX_ENTRYPOINT_PATH, REMOTE_SANDBOX_ENTRYPOINT_PATH, copy=True
+ ).add_local_python_source("eval_cli", "swebench_agent_run", copy=True),
+ timeout=120 * 60, # Much larger than default timeout to account for image build time
+)
+def run_instance_modal(
+ test_spec: TestSpec,
+ pred: dict,
+ run_id: str,
+ timeout: int | None = None,
+) -> TestOutput:
+ """Run a single instance with the given prediction.
+
+ Args:
+ test_spec (TestSpec): TestSpec instance
+ pred (dict): Prediction w/ model_name_or_path, model_patch, instance_id
+ run_id (str): Run ID
+ timeout (int): Timeout for running tests
+ """
+ instance_id = test_spec.instance_id
+ log_dir = get_log_dir(pred, run_id, instance_id)
+ log_dir.mkdir(parents=True, exist_ok=True)
+
+ log_file = log_dir / "run_instance.log"
+
+ logger = setup_logger(instance_id, log_file, add_stdout=True)
+
+ try:
+ runner = ModalSandboxRuntime(test_spec, timeout)
+ except Exception as e:
+ print(f"Error creating sandbox: {e}")
+ raise EvaluationError(
+ instance_id,
+ f"Error creating sandbox: {e}",
+ logger,
+ ) from e
+
+ patch_diff = pred.get("model_patch", "")
+
+ try:
+ patch_file = "/tmp/patch.diff"
+ runner.write_file(patch_file, patch_diff)
+
+ apply_patch_output, returncode = runner.exec(
+ "cd /testbed && git apply -v /tmp/patch.diff",
+ )
+
+ if returncode != 0:
+ logger.info("Failed to apply patch to container, trying again...")
+
+ apply_patch_output, returncode = runner.exec(
+ "cd /testbed && patch --batch --fuzz=5 -p1 -i /tmp/patch.diff",
+ )
+
+ if returncode != 0:
+ logger.info(f"{APPLY_PATCH_FAIL}:\n{apply_patch_output}")
+ raise EvaluationError(
+ instance_id,
+ f"{APPLY_PATCH_FAIL}:\n{apply_patch_output}",
+ logger,
+ )
+ else:
+ logger.info(f"{APPLY_PATCH_PASS}:\n{apply_patch_output}")
+ else:
+ logger.info(f"{APPLY_PATCH_PASS}:\n{apply_patch_output}")
+
+ # Get git diff before running eval script
+ git_diff_output_before, returncode = runner.exec(
+ "cd /testbed && git diff",
+ )
+ logger.info(f"Git diff before:\n{git_diff_output_before}")
+
+ eval_file = "/root/eval.sh"
+ eval_script = test_spec.eval_script
+ # django hack
+ eval_script = eval_script.replace("locale-gen", "locale-gen en_US.UTF-8")
+ runner.write_file(eval_file, eval_script)
+
+ start_time = time.time()
+
+ run_command = "cd /testbed"
+ # pylint hack
+ if "pylint" in test_spec.instance_id:
+ run_command += " && PYTHONPATH="
+ # increase recursion limit for testing
+ run_command += " && python3 -c 'import sys; sys.setrecursionlimit(10000)'"
+ # run eval script
+ run_command += " && /bin/bash /root/eval.sh"
+ test_output, returncode = runner.exec(run_command)
+
+ total_runtime = time.time() - start_time
+
+ test_output_path = log_dir / "test_output.txt"
+ logger.info(f"Test runtime: {total_runtime:_.2f} seconds")
+ with open(test_output_path, "w") as f:
+ f.write(test_output)
+ logger.info(f"Test output for {instance_id} written to {test_output_path}")
+ print(f"Test output for {instance_id} written to {test_output_path}")
+
+ # Get git diff after running eval script
+ git_diff_output_after, returncode = runner.exec("cd /testbed && git diff")
+
+ # Check if git diff changed after running eval script
+ logger.info(f"Git diff after:\n{git_diff_output_after}")
+ if git_diff_output_after != git_diff_output_before:
+ logger.info("Git diff changed after running eval script")
+
+ # Get report from test output
+ logger.info(f"Grading answer for {instance_id}...")
+ report = get_eval_report(
+ test_spec=test_spec,
+ prediction=pred,
+ test_log_path=test_output_path,
+ include_tests_status=True,
+ )
+ logger.info(
+ f"report: {report}\nResult for {instance_id}: resolved: {report[instance_id]['resolved']}"
+ )
+
+ return TestOutput(
+ instance_id=instance_id,
+ test_output=test_output,
+ report_json_str=json.dumps(report, indent=4),
+ run_instance_log=log_file.read_text(),
+ patch_diff=patch_diff,
+ log_dir=log_dir,
+ errored=False,
+ )
+ except modal_lib.exception.SandboxTimeoutError as e:
+ raise EvaluationError(
+ instance_id,
+ f"Test timed out after {timeout} seconds.",
+ logger,
+ ) from e
+ except EvaluationError:
+ error_msg = traceback.format_exc()
+ logger.info(error_msg)
+ return TestOutput(
+ instance_id=instance_id,
+ test_output="",
+ report_json_str="",
+ run_instance_log=log_file.read_text(),
+ patch_diff=patch_diff,
+ log_dir=log_dir,
+ errored=True,
+ )
+ except Exception as e:
+ error_msg = f"Error in evaluating model for {instance_id}: {e}\n{traceback.format_exc()}\nCheck ({logger.log_file}) for more information."
+ logger.exception(error_msg)
+ return TestOutput(
+ instance_id=instance_id,
+ test_output="",
+ report_json_str="",
+ run_instance_log=log_file.read_text(),
+ patch_diff=patch_diff,
+ log_dir=log_dir,
+ errored=True,
+ )
+
+
+def patched_swebench_eval( # Defaults from swebench harness
+ predictions_path, # Required argument
+ run_id, # Required argument
+ dataset_name="princeton-nlp/SWE-bench_Lite",
+ split="test",
+ instance_ids=None,
+ max_workers=4,
+ open_file_limit=4096,
+ timeout=1800,
+ force_rebuild=False,
+ cache_level="env",
+ clean=False,
+ namespace="swebench",
+ instance_image_tag="latest",
+ rewrite_reports=False,
+ report_dir=".",
+ modal=False,
+ **kwargs,
+):
+ with (
+ patch(
+ "swebench.harness.modal_eval.run_evaluation_modal.run_instance_modal",
+ modal_lib.Function.from_name(
+ app_name="swebench-agent-run",
+ name="run_instance_modal",
+ ),
+ ),
+ patch(
+ "swebench.harness.modal_eval.run_evaluation_modal.app",
+ app,
+ ),
+ ):
+ # Don't want swebench to run app.run() again
+ app.run = nullcontext
+ return main(
+ dataset_name=dataset_name,
+ split=split,
+ instance_ids=instance_ids,
+ predictions_path=predictions_path,
+ max_workers=max_workers,
+ force_rebuild=force_rebuild,
+ cache_level=cache_level,
+ clean=clean,
+ open_file_limit=open_file_limit,
+ run_id=run_id,
+ timeout=timeout,
+ namespace=namespace,
+ rewrite_reports=rewrite_reports,
+ modal=modal,
+ instance_image_tag=instance_image_tag,
+ report_dir=report_dir,
+ **kwargs,
+ )
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/sandbox.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/sandbox.py
new file mode 100644
index 000000000..25664d1f5
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/modal_harness/sandbox.py
@@ -0,0 +1,97 @@
+import io
+import json
+from collections import defaultdict
+
+import modal as modal_lib
+from swebench.harness.constants import (
+ SWEbenchInstance,
+)
+from swebench.harness.modal_eval.run_evaluation_modal import (
+ ModalSandboxRuntime,
+)
+from swebench.harness.test_spec.test_spec import make_test_spec
+
+
+class SnapshotManager:
+ def get_snapshot_uid(self, example: SWEbenchInstance) -> str:
+ msg = "Not implemented"
+ raise NotImplementedError(msg)
+
+ def save_snapshot_uid(self, example: SWEbenchInstance, snapshot_uid: str) -> None:
+ msg = "Not implemented"
+ raise NotImplementedError(msg)
+
+
+class VolumeSnapshotManager(SnapshotManager):
+ def __init__(self, volume_name: str = "swebench-agent-snapshot-volume"):
+ self.snapshot_volume = modal_lib.Volume.from_name(volume_name, create_if_missing=True)
+ self.snapshot_meta_file_path: str = "/root/snapshot_meta.json"
+
+ def get_snapshot_uid(self, example: SWEbenchInstance) -> str:
+ snapshot_meta = self.read_snapshot_meta()
+ return snapshot_meta[example.repo][example.environment_setup_commit]
+
+ def save_snapshot_uid(self, example: SWEbenchInstance, snapshot_uid: str) -> None:
+ snapshot_meta = self.read_snapshot_meta()
+ snapshot_meta[example.repo][example.environment_setup_commit] = snapshot_uid
+ with self.snapshot_volume.batch_upload() as upload:
+ upload.put_file(
+ io.BytesIO(json.dumps(snapshot_meta).encode("utf-8")),
+ self.snapshot_meta_file_path,
+ )
+ self.snapshot_volume.commit()
+
+ def read_snapshot_meta(self) -> dict[str, dict[str, str]]:
+ bytes_io = io.BytesIO()
+ try:
+ self.snapshot_volume.read_file_into_fileobj(self.snapshot_meta_file_path, bytes_io)
+ snapshot_meta = json.loads(bytes_io.getvalue().decode("utf-8"))
+ except FileNotFoundError:
+ snapshot_meta = {}
+ return defaultdict(lambda: defaultdict(lambda: None), snapshot_meta)
+
+
+class ModalDictSnapshotManager(SnapshotManager):
+ def __init__(self, name: str = "swebench-agent-snapshot-dict"):
+ self.snapshot_dict = modal_lib.Dict.from_name(name, create_if_missing=True)
+
+ def get_snapshot_uid(self, example: SWEbenchInstance) -> str | None:
+ try:
+ return self.snapshot_dict[(example.repo, example.environment_setup_commit)]
+ except KeyError:
+ return None
+
+ def save_snapshot_uid(self, example: SWEbenchInstance, snapshot_uid: str) -> None:
+ self.snapshot_dict[(example.repo, example.environment_setup_commit)] = snapshot_uid
+
+
+class CGModalSandboxRuntime(ModalSandboxRuntime):
+ def __init__(
+ self,
+ example: SWEbenchInstance,
+ timeout: int | None = None,
+ verbose: bool = True,
+ ):
+ self.example = example
+ self.snapshot_manager = ModalDictSnapshotManager()
+ self.test_spec = make_test_spec(example)
+ self.sandbox = self._get_sandbox(timeout)
+ self.verbose = verbose
+ self._stream_tasks = []
+
+ # Hack for pylint
+ self.write_file("/sys/fs/cgroup/cpu/cpu.shares", "2048")
+
+ @property
+ def image(self) -> modal_lib.Image:
+ return ModalSandboxRuntime.get_instance_image(self.test_spec)
+
+ def _get_sandbox(self, timeout: int | None = None):
+ """Populate sandbox ourselves"""
+ uid = self.snapshot_manager.get_snapshot_uid(self.example)
+ if uid is None:
+ sandbox = super()._get_sandbox(timeout)
+ snapshot = sandbox._experimental_snapshot()
+ self.snapshot_manager.save_snapshot_uid(self.example, snapshot.object_id)
+ else:
+ return modal_lib.Sandbox._experimental_from_snapshot(uid)
diff --git a/src/codegen/extensions/swebench/report.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/report.py
similarity index 78%
rename from src/codegen/extensions/swebench/report.py
rename to codegen-examples/examples/swebench_agent_run/swebench_agent_run/report.py
index f8100e36d..580bc805a 100755
--- a/src/codegen/extensions/swebench/report.py
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/report.py
@@ -1,31 +1,15 @@
#!/usr/bin/env python
import json
-import subprocess
from collections import defaultdict
from pathlib import Path
from codegen.extensions.swebench.enums import SWEBenchDataset
from codegen.extensions.swebench.tests import remove_patches_to_tests
-NUM_EVAL_PROCS = 5
-
-
-def run_evals(predictions_jsonl, logs_dir: Path, dataset: SWEBenchDataset, run_id: str):
- """Run the evaluations on the predictions on modal."""
- run_evals_cmd = f"""
-python -m swebench.harness.run_evaluation
- --predictions_path {predictions_jsonl}
- --run_id {run_id}
- --dataset_name {dataset.value}
- --cache_level instance
- --report_dir {logs_dir}
- --modal true
-"""
- run_evals_cmd = " ".join([line.strip() for line in run_evals_cmd.split() if line.strip()])
- print("Running evaluation command:", run_evals_cmd)
+from .modal_harness import patched_swebench_eval
- subprocess.run(run_evals_cmd.split(), check=True)
+NUM_EVAL_PROCS = 5
def get_report(predictions_jsonl, logs_dir: Path):
@@ -87,31 +71,32 @@ def update_pred_json(predictions, report, predictions_dir: Path):
return predictions
-def preds_to_jsonl(predictions, predictions_dir: Path):
- dname = predictions_dir
-
- predictions_jsonl = str(dname / "all_preds.jsonl")
+def preds_to_jsonl(predictions, predictions_jsonl: Path):
print(f"Creating JSONL file: {predictions_jsonl}")
# Use a default model name since it's not in the predictions
model_name = "results"
with open(predictions_jsonl, "w") as fh:
- for inst, pred in predictions.items():
+ for pred in predictions.values():
minimal_pred = {
"model_name_or_path": model_name, # Use default model name
- "model_patch": remove_patches_to_tests(pred["model_patch"]) if "model_patch" in pred else pred.get("patch", ""),
+ "model_patch": remove_patches_to_tests(
+ pred.get("result", {}).get("model_patch", "")
+ ),
"instance_id": pred["instance_id"],
}
fh.write(json.dumps(minimal_pred) + "\n")
return predictions_jsonl
-def generate_report(predictions_dir: Path, logs_dir: Path, dataset: SWEBenchDataset, run_id: str):
+def generate_report(
+ predictions_dir: Path, logs_dir: Path, dataset: SWEBenchDataset, run_id: str
+) -> str | None:
# Automatically find all JSON files in predictions/results
if not predictions_dir.exists():
print(f"Directory does not exist: {predictions_dir}")
- return 1
+ return None
predictions_jsonl = predictions_dir / "all_preds.jsonl"
existing_preds = predictions_jsonl.exists()
@@ -128,6 +113,7 @@ def generate_report(predictions_dir: Path, logs_dir: Path, dataset: SWEBenchData
except json.JSONDecodeError:
print(f"Error reading JSON from {file_path}")
continue
+
if not existing_preds:
if not predictions:
print("No valid predictions found")
@@ -135,15 +121,21 @@ def generate_report(predictions_dir: Path, logs_dir: Path, dataset: SWEBenchData
print(f"Successfully loaded {len(predictions)} predictions")
- predictions_jsonl = preds_to_jsonl(predictions, predictions_dir)
+ predictions_jsonl = preds_to_jsonl(predictions, predictions_jsonl)
# Setup log directory
log_dir = logs_dir / "results"
log_dir.mkdir(exist_ok=True, parents=True)
print(f"Using log directory: {log_dir}")
- # Run evaluations
- run_evals(predictions_jsonl, logs_dir, dataset, run_id)
+ evaluation_result_file = patched_swebench_eval(
+ str(predictions_jsonl),
+ run_id,
+ dataset_name=dataset.value,
+ cache_level="instance",
+ report_dir=logs_dir,
+ modal=True,
+ )
# Get and display report
report = get_report(predictions_jsonl, logs_dir)
@@ -151,4 +143,4 @@ def generate_report(predictions_dir: Path, logs_dir: Path, dataset: SWEBenchData
# Update prediction JSONs with results
predictions = update_pred_json(predictions, report, predictions_dir)
- return 0
+ return evaluation_result_file
diff --git a/codegen-examples/examples/swebench_agent_run/swebench_agent_run/utils.py b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/utils.py
new file mode 100644
index 000000000..64ed1609b
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/swebench_agent_run/utils.py
@@ -0,0 +1,28 @@
+from itertools import batched
+from typing import Iterator, List, TypeVar
+
+from tqdm import tqdm
+
+T = TypeVar("T")
+
+
+def track_batches(
+ items: List[T], batch_size: int, desc: str = "Processing"
+) -> Iterator[tuple[int, List[T]]]:
+ """
+ Track batch progress with tqdm.
+ Returns tuples of (batch_number, batch_items).
+ """
+ total_items = len(items)
+ total_batches = (total_items + batch_size - 1) // batch_size
+
+ with tqdm(
+ total=total_items,
+ desc=desc,
+ unit="examples",
+ bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt} examples [{elapsed}<{remaining}, {rate_fmt}]",
+ ) as pbar:
+ for batch_num, batch in enumerate(batched(items, batch_size), 1):
+ pbar.set_postfix({"batch": f"{batch_num}/{total_batches}", "batch_size": len(batch)})
+ yield batch_num, batch
+ pbar.update(len(batch))
diff --git a/codegen-examples/examples/swebench_agent_run/test.py b/codegen-examples/examples/swebench_agent_run/test.py
deleted file mode 100644
index fb6e4eb5a..000000000
--- a/codegen-examples/examples/swebench_agent_run/test.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from codegen import Codebase
-import modal
-
-image = modal.Image.debian_slim(python_version="3.13").apt_install("git").pip_install("fastapi[standard]").run_commands("pip install codegen")
-
-app = modal.App(name="codegen-examples", image=image, secrets=[modal.Secret.from_dotenv()])
-
-
-@app.function()
-def run_agent(AgentClass):
- codebase = Codebase.from_repo(repo_full_name="pallets/flask")
- agent = AgentClass(codebase)
- agent.run(prompt="Tell me about the codebase and the files in it.")
- return True
diff --git a/codegen-examples/examples/swebench_agent_run/uv.lock b/codegen-examples/examples/swebench_agent_run/uv.lock
new file mode 100644
index 000000000..d383e93de
--- /dev/null
+++ b/codegen-examples/examples/swebench_agent_run/uv.lock
@@ -0,0 +1,3675 @@
+version = 1
+requires-python = ">=3.12, <3.14"
+resolution-markers = [
+ "python_full_version >= '3.12.4'",
+ "python_full_version < '3.12.4'",
+]
+
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.4.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/de/7c/79a15272e88d2563c9d63599fa59f05778975f35b255bf8f90c8b12b4ada/aiohappyeyeballs-2.4.8.tar.gz", hash = "sha256:19728772cb12263077982d2f55453babd8bec6a052a926cd5c0c42796da8bf62", size = 22337 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/0e/b187e2bb3eeb2644515109657c4474d65a84e7123de249bf1e8467d04a65/aiohappyeyeballs-2.4.8-py3-none-any.whl", hash = "sha256:6cac4f5dd6e34a9644e69cf9021ef679e4394f54e58a183056d12009e42ea9e3", size = 15005 },
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.11.13"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohappyeyeballs" },
+ { name = "aiosignal" },
+ { name = "attrs" },
+ { name = "frozenlist" },
+ { name = "multidict" },
+ { name = "propcache" },
+ { name = "yarl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b3/3f/c4a667d184c69667b8f16e0704127efc5f1e60577df429382b4d95fd381e/aiohttp-3.11.13.tar.gz", hash = "sha256:8ce789231404ca8fff7f693cdce398abf6d90fd5dae2b1847477196c243b1fbb", size = 7674284 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9a/a9/6657664a55f78db8767e396cc9723782ed3311eb57704b0a5dacfa731916/aiohttp-3.11.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2eabb269dc3852537d57589b36d7f7362e57d1ece308842ef44d9830d2dc3c90", size = 705054 },
+ { url = "https://files.pythonhosted.org/packages/3b/06/f7df1fe062d16422f70af5065b76264f40b382605cf7477fa70553a9c9c1/aiohttp-3.11.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b77ee42addbb1c36d35aca55e8cc6d0958f8419e458bb70888d8c69a4ca833d", size = 464440 },
+ { url = "https://files.pythonhosted.org/packages/22/3a/8773ea866735754004d9f79e501fe988bdd56cfac7fdecbc8de17fc093eb/aiohttp-3.11.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55789e93c5ed71832e7fac868167276beadf9877b85697020c46e9a75471f55f", size = 456394 },
+ { url = "https://files.pythonhosted.org/packages/7f/61/8e2f2af2327e8e475a2b0890f15ef0bbfd117e321cce1e1ed210df81bbac/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c929f9a7249a11e4aa5c157091cfad7f49cc6b13f4eecf9b747104befd9f56f2", size = 1682752 },
+ { url = "https://files.pythonhosted.org/packages/24/ed/84fce816bc8da39aa3f6c1196fe26e47065fea882b1a67a808282029c079/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d33851d85537bbf0f6291ddc97926a754c8f041af759e0aa0230fe939168852b", size = 1737375 },
+ { url = "https://files.pythonhosted.org/packages/d9/de/35a5ba9e3d21ebfda1ebbe66f6cc5cbb4d3ff9bd6a03e5e8a788954f8f27/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9229d8613bd8401182868fe95688f7581673e1c18ff78855671a4b8284f47bcb", size = 1793660 },
+ { url = "https://files.pythonhosted.org/packages/ff/fe/0f650a8c7c72c8a07edf8ab164786f936668acd71786dd5885fc4b1ca563/aiohttp-3.11.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669dd33f028e54fe4c96576f406ebb242ba534dd3a981ce009961bf49960f117", size = 1692233 },
+ { url = "https://files.pythonhosted.org/packages/a8/20/185378b3483f968c6303aafe1e33b0da0d902db40731b2b2b2680a631131/aiohttp-3.11.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1b20a1ace54af7db1f95af85da530fe97407d9063b7aaf9ce6a32f44730778", size = 1619708 },
+ { url = "https://files.pythonhosted.org/packages/a4/f9/d9c181750980b17e1e13e522d7e82a8d08d3d28a2249f99207ef5d8d738f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5724cc77f4e648362ebbb49bdecb9e2b86d9b172c68a295263fa072e679ee69d", size = 1641802 },
+ { url = "https://files.pythonhosted.org/packages/50/c7/1cb46b72b1788710343b6e59eaab9642bd2422f2d87ede18b1996e0aed8f/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:aa36c35e94ecdb478246dd60db12aba57cfcd0abcad43c927a8876f25734d496", size = 1684678 },
+ { url = "https://files.pythonhosted.org/packages/71/87/89b979391de840c5d7c34e78e1148cc731b8aafa84b6a51d02f44b4c66e2/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b5b37c863ad5b0892cc7a4ceb1e435e5e6acd3f2f8d3e11fa56f08d3c67b820", size = 1646921 },
+ { url = "https://files.pythonhosted.org/packages/a7/db/a463700ac85b72f8cf68093e988538faaf4e865e3150aa165cf80ee29d6e/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e06cf4852ce8c4442a59bae5a3ea01162b8fcb49ab438d8548b8dc79375dad8a", size = 1702493 },
+ { url = "https://files.pythonhosted.org/packages/b8/32/1084e65da3adfb08c7e1b3e94f3e4ded8bd707dee265a412bc377b7cd000/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5194143927e494616e335d074e77a5dac7cd353a04755330c9adc984ac5a628e", size = 1735004 },
+ { url = "https://files.pythonhosted.org/packages/a0/bb/a634cbdd97ce5d05c2054a9a35bfc32792d7e4f69d600ad7e820571d095b/aiohttp-3.11.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afcb6b275c2d2ba5d8418bf30a9654fa978b4f819c2e8db6311b3525c86fe637", size = 1694964 },
+ { url = "https://files.pythonhosted.org/packages/fd/cf/7d29db4e5c28ec316e5d2ac9ac9df0e2e278e9ea910e5c4205b9b64c2c42/aiohttp-3.11.13-cp312-cp312-win32.whl", hash = "sha256:7104d5b3943c6351d1ad7027d90bdd0ea002903e9f610735ac99df3b81f102ee", size = 411746 },
+ { url = "https://files.pythonhosted.org/packages/65/a9/13e69ad4fd62104ebd94617f9f2be58231b50bb1e6bac114f024303ac23b/aiohttp-3.11.13-cp312-cp312-win_amd64.whl", hash = "sha256:47dc018b1b220c48089b5b9382fbab94db35bef2fa192995be22cbad3c5730c8", size = 438078 },
+ { url = "https://files.pythonhosted.org/packages/87/dc/7d58d33cec693f1ddf407d4ab975445f5cb507af95600f137b81683a18d8/aiohttp-3.11.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9862d077b9ffa015dbe3ce6c081bdf35135948cb89116e26667dd183550833d1", size = 698372 },
+ { url = "https://files.pythonhosted.org/packages/84/e7/5d88514c9e24fbc8dd6117350a8ec4a9314f4adae6e89fe32e3e639b0c37/aiohttp-3.11.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbfef0666ae9e07abfa2c54c212ac18a1f63e13e0760a769f70b5717742f3ece", size = 461057 },
+ { url = "https://files.pythonhosted.org/packages/96/1a/8143c48a929fa00c6324f85660cb0f47a55ed9385f0c1b72d4b8043acf8e/aiohttp-3.11.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a1f7d857c4fcf7cabb1178058182c789b30d85de379e04f64c15b7e88d66fb", size = 453340 },
+ { url = "https://files.pythonhosted.org/packages/2f/1c/b8010e4d65c5860d62681088e5376f3c0a940c5e3ca8989cae36ce8c3ea8/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba40b7ae0f81c7029583a338853f6607b6d83a341a3dcde8bed1ea58a3af1df9", size = 1665561 },
+ { url = "https://files.pythonhosted.org/packages/19/ed/a68c3ab2f92fdc17dfc2096117d1cfaa7f7bdded2a57bacbf767b104165b/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5b95787335c483cd5f29577f42bbe027a412c5431f2f80a749c80d040f7ca9f", size = 1718335 },
+ { url = "https://files.pythonhosted.org/packages/27/4f/3a0b6160ce663b8ebdb65d1eedff60900cd7108838c914d25952fe2b909f/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7d474c5c1f0b9405c1565fafdc4429fa7d986ccbec7ce55bc6a330f36409cad", size = 1775522 },
+ { url = "https://files.pythonhosted.org/packages/0b/58/9da09291e19696c452e7224c1ce8c6d23a291fe8cd5c6b247b51bcda07db/aiohttp-3.11.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e83fb1991e9d8982b3b36aea1e7ad27ea0ce18c14d054c7a404d68b0319eebb", size = 1677566 },
+ { url = "https://files.pythonhosted.org/packages/3d/18/6184f2bf8bbe397acbbbaa449937d61c20a6b85765f48e5eddc6d84957fe/aiohttp-3.11.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4586a68730bd2f2b04a83e83f79d271d8ed13763f64b75920f18a3a677b9a7f0", size = 1603590 },
+ { url = "https://files.pythonhosted.org/packages/04/94/91e0d1ca0793012ccd927e835540aa38cca98bdce2389256ab813ebd64a3/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fe4eb0e7f50cdb99b26250d9328faef30b1175a5dbcfd6d0578d18456bac567", size = 1618688 },
+ { url = "https://files.pythonhosted.org/packages/71/85/d13c3ea2e48a10b43668305d4903838834c3d4112e5229177fbcc23a56cd/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2a8a6bc19818ac3e5596310ace5aa50d918e1ebdcc204dc96e2f4d505d51740c", size = 1658053 },
+ { url = "https://files.pythonhosted.org/packages/12/6a/3242a35100de23c1e8d9e05e8605e10f34268dee91b00d9d1e278c58eb80/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f27eec42f6c3c1df09cfc1f6786308f8b525b8efaaf6d6bd76c1f52c6511f6a", size = 1616917 },
+ { url = "https://files.pythonhosted.org/packages/f5/b3/3f99b6f0a9a79590a7ba5655dbde8408c685aa462247378c977603464d0a/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2a4a13dfbb23977a51853b419141cd0a9b9573ab8d3a1455c6e63561387b52ff", size = 1685872 },
+ { url = "https://files.pythonhosted.org/packages/8a/2e/99672181751f280a85e24fcb9a2c2469e8b1a0de1746b7b5c45d1eb9a999/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:02876bf2f69b062584965507b07bc06903c2dc93c57a554b64e012d636952654", size = 1715719 },
+ { url = "https://files.pythonhosted.org/packages/7a/cd/68030356eb9a7d57b3e2823c8a852709d437abb0fbff41a61ebc351b7625/aiohttp-3.11.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b992778d95b60a21c4d8d4a5f15aaab2bd3c3e16466a72d7f9bfd86e8cea0d4b", size = 1673166 },
+ { url = "https://files.pythonhosted.org/packages/03/61/425397a9a2839c609d09fdb53d940472f316a2dbeaa77a35b2628dae6284/aiohttp-3.11.13-cp313-cp313-win32.whl", hash = "sha256:507ab05d90586dacb4f26a001c3abf912eb719d05635cbfad930bdbeb469b36c", size = 410615 },
+ { url = "https://files.pythonhosted.org/packages/9c/54/ebb815bc0fe057d8e7a11c086c479e972e827082f39aeebc6019dd4f0862/aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2", size = 436452 },
+]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "frozenlist" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597 },
+]
+
+[[package]]
+name = "alabaster"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 },
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
+]
+
+[[package]]
+name = "anthropic"
+version = "0.49.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "jiter" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/86/e3/a88c8494ce4d1a88252b9e053607e885f9b14d0a32273d47b727cbee4228/anthropic-0.49.0.tar.gz", hash = "sha256:c09e885b0f674b9119b4f296d8508907f6cff0009bc20d5cf6b35936c40b4398", size = 210016 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/74/5d90ad14d55fbe3f9c474fdcb6e34b4bed99e3be8efac98734a5ddce88c1/anthropic-0.49.0-py3-none-any.whl", hash = "sha256:bbc17ad4e7094988d2fa86b87753ded8dce12498f4b85fe5810f208f454a8375", size = 243368 },
+]
+
+[[package]]
+name = "anyio"
+version = "4.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "sniffio" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 },
+]
+
+[[package]]
+name = "argcomplete"
+version = "3.5.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0c/be/6c23d80cb966fb8f83fb1ebfb988351ae6b0554d0c3a613ee4531c026597/argcomplete-3.5.3.tar.gz", hash = "sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392", size = 72999 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c4/08/2a4db06ec3d203124c967fc89295e85a202e5cbbcdc08fd6a64b65217d1e/argcomplete-3.5.3-py3-none-any.whl", hash = "sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61", size = 43569 },
+]
+
+[[package]]
+name = "astor"
+version = "0.8.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488 },
+]
+
+[[package]]
+name = "attrs"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/49/7c/fdf464bcc51d23881d110abd74b512a42b3d5d376a55a831b44c603ae17f/attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e", size = 810562 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a", size = 63152 },
+]
+
+[[package]]
+name = "babel"
+version = "2.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537 },
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.13.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "soupsieve" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f0/3c/adaf39ce1fb4afdd21b611e3d530b183bb7759c9b673d60db0e347fd4439/beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b", size = 619516 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/49/6abb616eb3cbab6a7cca303dc02fdf3836de2e0b834bf966a7f5271a34d8/beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16", size = 186015 },
+]
+
+[[package]]
+name = "black"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "mypy-extensions" },
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988 },
+ { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985 },
+ { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816 },
+ { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860 },
+ { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673 },
+ { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190 },
+ { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926 },
+ { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613 },
+ { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646 },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.1.31"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 },
+]
+
+[[package]]
+name = "cffi"
+version = "1.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 },
+ { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 },
+ { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 },
+ { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 },
+ { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 },
+ { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 },
+ { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 },
+ { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 },
+ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 },
+ { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
+ { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
+ { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 },
+ { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 },
+ { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 },
+ { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 },
+ { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 },
+ { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 },
+ { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 },
+ { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 },
+ { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 },
+ { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
+ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
+]
+
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 },
+]
+
+[[package]]
+name = "chardet"
+version = "5.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 },
+ { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 },
+ { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 },
+ { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 },
+ { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 },
+ { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 },
+ { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 },
+ { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 },
+ { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 },
+ { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 },
+ { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 },
+ { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 },
+ { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 },
+ { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 },
+ { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 },
+ { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 },
+ { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 },
+ { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 },
+ { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 },
+ { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 },
+ { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 },
+ { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 },
+ { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 },
+ { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 },
+ { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 },
+ { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 },
+ { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
+]
+
+[[package]]
+name = "click"
+version = "8.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 },
+]
+
+[[package]]
+name = "click-option-group"
+version = "0.5.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e7/b8/91054601a2e05fd9060cb1baf56be5b24145817b059e078669e1099529c7/click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777", size = 16517 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/af/75/81ea958bc0f7e410257cb2a42531b93a7695a31930cde87192c010a52c50/click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7", size = 12467 },
+]
+
+[[package]]
+name = "codegen"
+source = { directory = "../../../" }
+dependencies = [
+ { name = "anthropic" },
+ { name = "astor" },
+ { name = "click" },
+ { name = "codegen-sdk-pink" },
+ { name = "codeowners" },
+ { name = "colorlog" },
+ { name = "dataclasses-json" },
+ { name = "datamodel-code-generator" },
+ { name = "datasets" },
+ { name = "dicttoxml" },
+ { name = "docker" },
+ { name = "docstring-parser" },
+ { name = "fastapi", extra = ["standard"] },
+ { name = "gitpython" },
+ { name = "giturlparse" },
+ { name = "hatch-vcs" },
+ { name = "hatchling" },
+ { name = "httpx" },
+ { name = "humanize" },
+ { name = "langchain", extra = ["openai"] },
+ { name = "langchain-anthropic" },
+ { name = "langchain-core" },
+ { name = "langchain-openai" },
+ { name = "langchain-xai" },
+ { name = "langgraph" },
+ { name = "langgraph-prebuilt" },
+ { name = "langsmith" },
+ { name = "lazy-object-proxy" },
+ { name = "lox" },
+ { name = "mcp", extra = ["cli"] },
+ { name = "mini-racer" },
+ { name = "modal" },
+ { name = "neo4j" },
+ { name = "networkx" },
+ { name = "numpy" },
+ { name = "openai" },
+ { name = "packaging" },
+ { name = "pip" },
+ { name = "plotly" },
+ { name = "psutil" },
+ { name = "pydantic" },
+ { name = "pydantic-core" },
+ { name = "pydantic-settings" },
+ { name = "pygit2" },
+ { name = "pygithub" },
+ { name = "pyinstrument" },
+ { name = "pyjson5" },
+ { name = "pyright" },
+ { name = "pytest-snapshot" },
+ { name = "python-dotenv" },
+ { name = "python-levenshtein" },
+ { name = "python-semantic-release" },
+ { name = "requests" },
+ { name = "rich" },
+ { name = "rich-click" },
+ { name = "rustworkx" },
+ { name = "sentry-sdk" },
+ { name = "slack-sdk" },
+ { name = "starlette" },
+ { name = "tabulate" },
+ { name = "termcolor" },
+ { name = "tiktoken" },
+ { name = "tomlkit" },
+ { name = "tqdm" },
+ { name = "tree-sitter" },
+ { name = "tree-sitter-javascript" },
+ { name = "tree-sitter-python" },
+ { name = "tree-sitter-typescript" },
+ { name = "typing-extensions" },
+ { name = "unidiff" },
+ { name = "urllib3" },
+ { name = "uvicorn", extra = ["standard"] },
+ { name = "watchfiles" },
+ { name = "wrapt" },
+ { name = "xmltodict" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "anthropic" },
+ { name = "astor", specifier = ">=0.8.1,<1.0.0" },
+ { name = "attrs", marker = "extra == 'lsp'", specifier = ">=25.1.0" },
+ { name = "click", specifier = ">=8.1.7" },
+ { name = "codegen-sdk-pink", specifier = ">=0.1.0" },
+ { name = "codeowners", specifier = ">=0.6.0,<1.0.0" },
+ { name = "colorlog", specifier = ">=6.9.0" },
+ { name = "dataclasses-json", specifier = ">=0.6.4,<1.0.0" },
+ { name = "datamodel-code-generator", specifier = ">=0.26.5" },
+ { name = "datasets" },
+ { name = "dicttoxml", specifier = ">=1.7.16,<2.0.0" },
+ { name = "docker", specifier = ">=6.1.3" },
+ { name = "docstring-parser", specifier = ">=0.16,<1.0" },
+ { name = "fastapi", extras = ["standard"], specifier = ">=0.115.2,<1.0.0" },
+ { name = "gitpython", specifier = "==3.1.44" },
+ { name = "giturlparse" },
+ { name = "hatch-vcs", specifier = ">=0.4.0" },
+ { name = "hatchling", specifier = ">=1.25.0" },
+ { name = "httpx", specifier = ">=0.28.1" },
+ { name = "humanize", specifier = ">=4.10.0,<5.0.0" },
+ { name = "langchain", extras = ["openai"] },
+ { name = "langchain-anthropic", specifier = ">=0.3.7" },
+ { name = "langchain-core" },
+ { name = "langchain-openai" },
+ { name = "langchain-xai", specifier = ">=0.2.1" },
+ { name = "langgraph" },
+ { name = "langgraph-prebuilt" },
+ { name = "langsmith" },
+ { name = "lazy-object-proxy", specifier = ">=0.0.0" },
+ { name = "lox", specifier = ">=0.12.0" },
+ { name = "lsprotocol", marker = "extra == 'lsp'", specifier = "==2024.0.0b1" },
+ { name = "mcp", extras = ["cli"] },
+ { name = "mini-racer", specifier = ">=0.12.4" },
+ { name = "modal", specifier = ">=0.73.45" },
+ { name = "neo4j" },
+ { name = "networkx", specifier = ">=3.4.1" },
+ { name = "numpy", specifier = ">=2.2.2" },
+ { name = "openai", specifier = "==1.66.3" },
+ { name = "packaging", specifier = ">=24.2" },
+ { name = "pip", specifier = ">=24.3.1" },
+ { name = "plotly", specifier = ">=5.24.0,<7.0.0" },
+ { name = "psutil", specifier = ">=5.8.0" },
+ { name = "pydantic", specifier = ">=2.9.2,<3.0.0" },
+ { name = "pydantic-core", specifier = ">=2.23.4" },
+ { name = "pydantic-settings", specifier = ">=2.0.0" },
+ { name = "pygit2", specifier = ">=1.16.0" },
+ { name = "pygithub", specifier = "==2.6.1" },
+ { name = "pygls", marker = "extra == 'lsp'", specifier = ">=2.0.0a2" },
+ { name = "pyinstrument", specifier = ">=5.0.0" },
+ { name = "pyjson5", specifier = "==1.6.8" },
+ { name = "pyright", specifier = ">=1.1.372,<2.0.0" },
+ { name = "pytest-snapshot", specifier = ">=0.9.0" },
+ { name = "python-dotenv", specifier = ">=1.0.1" },
+ { name = "python-levenshtein", specifier = ">=0.25.1,<1.0.0" },
+ { name = "python-semantic-release" },
+ { name = "requests", specifier = ">=2.32.3" },
+ { name = "rich", specifier = ">=13.7.1,<14.0.0" },
+ { name = "rich-click", specifier = ">=1.8.5" },
+ { name = "rustworkx", specifier = ">=0.15.1" },
+ { name = "sentry-sdk", specifier = "==2.22.0" },
+ { name = "slack-sdk" },
+ { name = "starlette", specifier = ">=0.16.0,<1.0.0" },
+ { name = "tabulate", specifier = ">=0.9.0,<1.0.0" },
+ { name = "termcolor", specifier = ">=2.4.0" },
+ { name = "tiktoken", specifier = ">=0.5.1,<1.0.0" },
+ { name = "tomlkit", specifier = ">=0.13.2" },
+ { name = "tqdm", specifier = ">=4.67.1" },
+ { name = "tree-sitter", specifier = ">=0.23.1" },
+ { name = "tree-sitter-javascript", specifier = ">=0.23.1" },
+ { name = "tree-sitter-python", specifier = ">=0.23.4" },
+ { name = "tree-sitter-typescript", specifier = ">=0.23.2" },
+ { name = "types-networkx", marker = "extra == 'types'", specifier = ">=3.2.1.20240918" },
+ { name = "types-requests", marker = "extra == 'types'", specifier = ">=2.32.0.20241016" },
+ { name = "types-tabulate", marker = "extra == 'types'", specifier = ">=0.9.0.20240106" },
+ { name = "types-toml", marker = "extra == 'types'", specifier = ">=0.10.8.20240310" },
+ { name = "typing-extensions", specifier = ">=4.12.2" },
+ { name = "unidiff", specifier = ">=0.7.5" },
+ { name = "urllib3", specifier = ">=2.0.0" },
+ { name = "uvicorn", extras = ["standard"], specifier = ">=0.30.0" },
+ { name = "watchfiles", specifier = ">=1.0.0,<1.1.0" },
+ { name = "wrapt", specifier = ">=1.16.0,<2.0.0" },
+ { name = "xmltodict", specifier = ">=0.13.0,<1.0.0" },
+]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "austin-dist", specifier = ">=3.7.0" },
+ { name = "austin-python", specifier = ">=1.7.1" },
+ { name = "autoflake", specifier = ">=2.3.1" },
+ { name = "black", specifier = ">=24.8.0" },
+ { name = "braintrust", specifier = ">=0.0.160" },
+ { name = "cibuildwheel", extras = ["uv"], specifier = ">=2.22.0" },
+ { name = "coverage", specifier = ">=7.6.1,<8.0.0" },
+ { name = "cython", specifier = ">=3.0.11" },
+ { name = "deptry", specifier = ">=0.22.0" },
+ { name = "emoji", specifier = ">=2.14.0" },
+ { name = "filelock", specifier = ">=3.15.4,<4.0.0" },
+ { name = "httpx", specifier = ">=0.28.1,<0.28.2" },
+ { name = "inflection", specifier = ">=0.5.1,<1.0.0" },
+ { name = "isort", specifier = ">=5.13.2" },
+ { name = "jsbeautifier", specifier = ">=1.15.1,<2.0.0" },
+ { name = "jupyterlab", specifier = ">=4.3.5" },
+ { name = "loguru", specifier = ">=0.7.3" },
+ { name = "modal", specifier = ">=0.73.25" },
+ { name = "mypy", extras = ["mypyc", "faster-cache"], specifier = ">=1.13.0" },
+ { name = "pre-commit", specifier = ">=4.0.1" },
+ { name = "pre-commit-uv", specifier = ">=4.1.4" },
+ { name = "pytest", specifier = ">=8.3.3" },
+ { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
+ { name = "pytest-benchmark", extras = ["histogram"], specifier = ">=5.1.0" },
+ { name = "pytest-cov", specifier = ">=6.0.0,<6.0.1" },
+ { name = "pytest-lsp", specifier = ">=1.0.0b1" },
+ { name = "pytest-mock", specifier = ">=3.14.0,<4.0.0" },
+ { name = "pytest-timeout", specifier = ">=2.3.1" },
+ { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
+ { name = "ruff", specifier = ">=0.6.8" },
+ { name = "ruff-lsp", specifier = ">=0.0.55,<1.0.0" },
+ { name = "sybil", extras = ["pytest"], specifier = ">=9.0.0" },
+ { name = "typer", specifier = ">=0.12.5" },
+ { name = "uv", specifier = ">=0.4.25" },
+]
+
+[[package]]
+name = "codegen-sdk-pink"
+version = "0.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/da/0e968f5bd8d839ec30b58b681ba30781d5eb1b33a95d771e4b31f3a7cf08/codegen_sdk_pink-0.1.0.tar.gz", hash = "sha256:3be5c2caf47f40ec541cdd04558d8ddfb816ede7d7334e4a62ab3f6130f86afb", size = 322299 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/4c/6321af0699207ab63b750e82589f2c4d8726956da9413e30a42c7ea59641/codegen_sdk_pink-0.1.0-cp311-abi3-macosx_10_12_x86_64.whl", hash = "sha256:03f71cd48cd7547faf8233b90f01f4c41b750b4195a83a6a1b6427bee24a45a4", size = 5749136 },
+ { url = "https://files.pythonhosted.org/packages/c2/d0/39b35e45ce5683dace3e4b8c44e51a6471177708e5b3285fc1d764270ba1/codegen_sdk_pink-0.1.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:c4872286a1328ec546798268ab9ff3bf368c223178fecf45903cf0c667290471", size = 5807261 },
+ { url = "https://files.pythonhosted.org/packages/db/19/5aff61ba06d877f385b206a8da88c87c77f6b7cd68f0aec7b8b16813e1a9/codegen_sdk_pink-0.1.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:64943be3bed917d506ece1e0b5492effaa500712c5109a3937266d440ee8bb53", size = 6387801 },
+ { url = "https://files.pythonhosted.org/packages/5e/e4/6a8f7b12b20ab4cd61b833f32bbc1f7c8c86ca7332364f01f08881a4a5e2/codegen_sdk_pink-0.1.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:345deecefa2de455dcf1fb2bdf5ad2e71e74476b4212b1bd51f57e6904c1d7e9", size = 6231083 },
+ { url = "https://files.pythonhosted.org/packages/0d/c3/b0f7106308e278b6774275c891bb82c08e04c41f1e9abf6bdf56757cc123/codegen_sdk_pink-0.1.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7c5bcf0ad41644ac980590a37178f231ba275a75ce946dcfc31fa39330c098da", size = 6543302 },
+ { url = "https://files.pythonhosted.org/packages/e0/42/fedf5eec26a06d83de5cfb39fc7072261b72311b70d5fbbd4a75deec2457/codegen_sdk_pink-0.1.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b3ee15469ad58d0222dfa0ba5950cd0eb7b8b7c607912d1845950096ddcb7aad", size = 6682410 },
+ { url = "https://files.pythonhosted.org/packages/38/fc/b1479140f579bcd6bdc090e71033484fcfd3bbc76aa779906a322cb33834/codegen_sdk_pink-0.1.0-cp311-abi3-win_amd64.whl", hash = "sha256:10b9b00070b5561df80dd269524f106e44e222d1ab9a93f6cf6ca3565c0aa0f9", size = 4305666 },
+]
+
+[[package]]
+name = "codeowners"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/75/66/ddba64473b0ce0b2c30cd0e1e32d923839834ed91948ad92bad23b2eadeb/codeowners-0.7.0.tar.gz", hash = "sha256:a842647b20968c14da6066e4de4fffac4fd7c1c30de9cfa8b2fc8f534b3d9f48", size = 7706 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/d1/4091c351ac4de65fa22da912bdb395011e6dc8e630f070348b7b3fdd885d/codeowners-0.7.0-py3-none-any.whl", hash = "sha256:0df5cd47299f984ba2e120dc4a0a7be68b528d53016ff39d06e86f85e33c7fc2", size = 8718 },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
+]
+
+[[package]]
+name = "colorlog"
+version = "6.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d3/7a/359f4d5df2353f26172b3cc39ea32daa39af8de522205f512f458923e677/colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2", size = 16624 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e3/51/9b208e85196941db2f0654ad0357ca6388ab3ed67efdbfc799f35d1f83aa/colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff", size = 11424 },
+]
+
+[[package]]
+name = "cryptography"
+version = "44.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 },
+ { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 },
+ { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 },
+ { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 },
+ { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 },
+ { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 },
+ { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 },
+ { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 },
+ { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 },
+ { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 },
+ { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 },
+ { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 },
+ { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 },
+ { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 },
+ { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 },
+ { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 },
+ { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 },
+ { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 },
+ { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 },
+ { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 },
+ { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 },
+ { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 },
+ { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 },
+ { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 },
+]
+
+[[package]]
+name = "dataclasses-json"
+version = "0.6.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "marshmallow" },
+ { name = "typing-inspect" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686 },
+]
+
+[[package]]
+name = "datamodel-code-generator"
+version = "0.28.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "argcomplete" },
+ { name = "black" },
+ { name = "genson" },
+ { name = "inflect" },
+ { name = "isort" },
+ { name = "jinja2" },
+ { name = "packaging" },
+ { name = "pydantic" },
+ { name = "pyyaml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/25/5f/74fac9f7262e7763eaf56bbcd64c31f712f68135f2c758bc02d15876c543/datamodel_code_generator-0.28.2.tar.gz", hash = "sha256:5f16fe4d6acee79c1366f9ee68016eeec544fc0a2fec25ce47d35f7b7767e0fe", size = 435017 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/a0/5ce4d9495355507dfb6093192d1762f745c3e824be6377fc3df8539f06dc/datamodel_code_generator-0.28.2-py3-none-any.whl", hash = "sha256:a2c425386c3f836c618ae276be57e460df323ac78f911b1b12d927ddffd70e73", size = 115645 },
+]
+
+[[package]]
+name = "datasets"
+version = "3.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "dill" },
+ { name = "filelock" },
+ { name = "fsspec", extra = ["http"] },
+ { name = "huggingface-hub" },
+ { name = "multiprocess" },
+ { name = "numpy" },
+ { name = "packaging" },
+ { name = "pandas" },
+ { name = "pyarrow" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "tqdm" },
+ { name = "xxhash" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/73/0c/dc3d172104e78e68f7a60386664adbf61db5d10c2246b31ddad06c2d1cb3/datasets-3.3.2.tar.gz", hash = "sha256:20901a97da870fb80b407ccc45f034a7ac99accd07da897ed42f11641bdb8c6e", size = 564352 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4c/37/22ef7675bef4ffe9577b937ddca2e22791534cbbe11c30714972a91532dc/datasets-3.3.2-py3-none-any.whl", hash = "sha256:fdaf3d5d70242621210b044e9b9b15a56e908bfc3e9d077bcf5605ac390f70bd", size = 485360 },
+]
+
+[[package]]
+name = "deprecated"
+version = "1.2.18"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wrapt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 },
+]
+
+[[package]]
+name = "dicttoxml"
+version = "1.7.16"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/c9/3132427f9e64d572688e6a1cbe3d542d1a03f676b81fb600f3d1fd7d2ec5/dicttoxml-1.7.16.tar.gz", hash = "sha256:6f36ce644881db5cd8940bee9b7cb3f3f6b7b327ba8a67d83d3e2caa0538bf9d", size = 39314 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/09/40/9d521973cae7f7ef8b1f0d0e28a3db0f851c1f1dca45d4c2ed5360bb7246/dicttoxml-1.7.16-py3-none-any.whl", hash = "sha256:8677671496d0d38e66c7179f82a7e9059f94887777955dc71b0ac602ee637c26", size = 24155 },
+]
+
+[[package]]
+name = "dill"
+version = "0.3.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/17/4d/ac7ffa80c69ea1df30a8aa11b3578692a5118e7cd1aa157e3ef73b092d15/dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca", size = 184847 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7", size = 116252 },
+]
+
+[[package]]
+name = "distlib"
+version = "0.3.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 },
+]
+
+[[package]]
+name = "distro"
+version = "1.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 },
+]
+
+[[package]]
+name = "dnspython"
+version = "2.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 },
+]
+
+[[package]]
+name = "docker"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pywin32", marker = "sys_platform == 'win32'" },
+ { name = "requests" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 },
+]
+
+[[package]]
+name = "docstring-parser"
+version = "0.16"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533 },
+]
+
+[[package]]
+name = "docutils"
+version = "0.21.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 },
+]
+
+[[package]]
+name = "dotty-dict"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6a/ab/88d67f02024700b48cd8232579ad1316aa9df2272c63049c27cc094229d6/dotty_dict-1.3.1.tar.gz", hash = "sha256:4b016e03b8ae265539757a53eba24b9bfda506fb94fbce0bee843c6f05541a15", size = 7699 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1a/91/e0d457ee03ec33d79ee2cd8d212debb1bc21dfb99728ae35efdb5832dc22/dotty_dict-1.3.1-py3-none-any.whl", hash = "sha256:5022d234d9922f13aa711b4950372a06a6d64cb6d6db9ba43d0ba133ebfce31f", size = 7014 },
+]
+
+[[package]]
+name = "email-validator"
+version = "2.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "dnspython" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 },
+]
+
+[[package]]
+name = "fastapi"
+version = "0.115.11"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "starlette" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b5/28/c5d26e5860df807241909a961a37d45e10533acef95fc368066c7dd186cd/fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f", size = 294441 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/5d/4d8bbb94f0dbc22732350c06965e40740f4a92ca560e90bb566f4f73af41/fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64", size = 94926 },
+]
+
+[package.optional-dependencies]
+standard = [
+ { name = "email-validator" },
+ { name = "fastapi-cli", extra = ["standard"] },
+ { name = "httpx" },
+ { name = "jinja2" },
+ { name = "python-multipart" },
+ { name = "uvicorn", extra = ["standard"] },
+]
+
+[[package]]
+name = "fastapi-cli"
+version = "0.0.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "rich-toolkit" },
+ { name = "typer" },
+ { name = "uvicorn", extra = ["standard"] },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705 },
+]
+
+[package.optional-dependencies]
+standard = [
+ { name = "uvicorn", extra = ["standard"] },
+]
+
+[[package]]
+name = "fastcore"
+version = "1.7.29"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a1/a6/f457241a8a5c42b80ef50b96e7cc515dd93bdb9ea273133004bbc8a6aa96/fastcore-1.7.29.tar.gz", hash = "sha256:e7e734cbe58805a22c205341c6671de562a8abba54b13eeb24cdb4486d066e31", size = 80514 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/3a/a0b1c764426622287c9b6547d4ea637c406bc884141814df4a5ebab3ab9b/fastcore-1.7.29-py3-none-any.whl", hash = "sha256:76fd4815eabbed704faca3abfea4b7e1f98b6351ba6c869a2d405f37bc4b0074", size = 84208 },
+]
+
+[[package]]
+name = "filelock"
+version = "3.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/dc/9c/0b15fb47b464e1b663b1acd1253a062aa5feecb07d4e597daea542ebd2b5/filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e", size = 18027 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/ec/00d68c4ddfedfe64159999e5f8a98fb8442729a63e2077eb9dcd89623d27/filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338", size = 16164 },
+]
+
+[[package]]
+name = "frozenlist"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/73/fa6d1a96ab7fd6e6d1c3500700963eab46813847f01ef0ccbaa726181dd5/frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21", size = 94026 },
+ { url = "https://files.pythonhosted.org/packages/ab/04/ea8bf62c8868b8eada363f20ff1b647cf2e93377a7b284d36062d21d81d1/frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d", size = 54150 },
+ { url = "https://files.pythonhosted.org/packages/d0/9a/8e479b482a6f2070b26bda572c5e6889bb3ba48977e81beea35b5ae13ece/frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e", size = 51927 },
+ { url = "https://files.pythonhosted.org/packages/e3/12/2aad87deb08a4e7ccfb33600871bbe8f0e08cb6d8224371387f3303654d7/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a", size = 282647 },
+ { url = "https://files.pythonhosted.org/packages/77/f2/07f06b05d8a427ea0060a9cef6e63405ea9e0d761846b95ef3fb3be57111/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a", size = 289052 },
+ { url = "https://files.pythonhosted.org/packages/bd/9f/8bf45a2f1cd4aa401acd271b077989c9267ae8463e7c8b1eb0d3f561b65e/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee", size = 291719 },
+ { url = "https://files.pythonhosted.org/packages/41/d1/1f20fd05a6c42d3868709b7604c9f15538a29e4f734c694c6bcfc3d3b935/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6", size = 267433 },
+ { url = "https://files.pythonhosted.org/packages/af/f2/64b73a9bb86f5a89fb55450e97cd5c1f84a862d4ff90d9fd1a73ab0f64a5/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e", size = 283591 },
+ { url = "https://files.pythonhosted.org/packages/29/e2/ffbb1fae55a791fd6c2938dd9ea779509c977435ba3940b9f2e8dc9d5316/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9", size = 273249 },
+ { url = "https://files.pythonhosted.org/packages/2e/6e/008136a30798bb63618a114b9321b5971172a5abddff44a100c7edc5ad4f/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039", size = 271075 },
+ { url = "https://files.pythonhosted.org/packages/ae/f0/4e71e54a026b06724cec9b6c54f0b13a4e9e298cc8db0f82ec70e151f5ce/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784", size = 285398 },
+ { url = "https://files.pythonhosted.org/packages/4d/36/70ec246851478b1c0b59f11ef8ade9c482ff447c1363c2bd5fad45098b12/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631", size = 294445 },
+ { url = "https://files.pythonhosted.org/packages/37/e0/47f87544055b3349b633a03c4d94b405956cf2437f4ab46d0928b74b7526/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f", size = 280569 },
+ { url = "https://files.pythonhosted.org/packages/f9/7c/490133c160fb6b84ed374c266f42800e33b50c3bbab1652764e6e1fc498a/frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8", size = 44721 },
+ { url = "https://files.pythonhosted.org/packages/b1/56/4e45136ffc6bdbfa68c29ca56ef53783ef4c2fd395f7cbf99a2624aa9aaa/frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f", size = 51329 },
+ { url = "https://files.pythonhosted.org/packages/da/3b/915f0bca8a7ea04483622e84a9bd90033bab54bdf485479556c74fd5eaf5/frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953", size = 91538 },
+ { url = "https://files.pythonhosted.org/packages/c7/d1/a7c98aad7e44afe5306a2b068434a5830f1470675f0e715abb86eb15f15b/frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0", size = 52849 },
+ { url = "https://files.pythonhosted.org/packages/3a/c8/76f23bf9ab15d5f760eb48701909645f686f9c64fbb8982674c241fbef14/frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2", size = 50583 },
+ { url = "https://files.pythonhosted.org/packages/1f/22/462a3dd093d11df623179d7754a3b3269de3b42de2808cddef50ee0f4f48/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f", size = 265636 },
+ { url = "https://files.pythonhosted.org/packages/80/cf/e075e407fc2ae7328155a1cd7e22f932773c8073c1fc78016607d19cc3e5/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608", size = 270214 },
+ { url = "https://files.pythonhosted.org/packages/a1/58/0642d061d5de779f39c50cbb00df49682832923f3d2ebfb0fedf02d05f7f/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b", size = 273905 },
+ { url = "https://files.pythonhosted.org/packages/ab/66/3fe0f5f8f2add5b4ab7aa4e199f767fd3b55da26e3ca4ce2cc36698e50c4/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840", size = 250542 },
+ { url = "https://files.pythonhosted.org/packages/f6/b8/260791bde9198c87a465224e0e2bb62c4e716f5d198fc3a1dacc4895dbd1/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439", size = 267026 },
+ { url = "https://files.pythonhosted.org/packages/2e/a4/3d24f88c527f08f8d44ade24eaee83b2627793fa62fa07cbb7ff7a2f7d42/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de", size = 257690 },
+ { url = "https://files.pythonhosted.org/packages/de/9a/d311d660420b2beeff3459b6626f2ab4fb236d07afbdac034a4371fe696e/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641", size = 253893 },
+ { url = "https://files.pythonhosted.org/packages/c6/23/e491aadc25b56eabd0f18c53bb19f3cdc6de30b2129ee0bc39cd387cd560/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e", size = 267006 },
+ { url = "https://files.pythonhosted.org/packages/08/c4/ab918ce636a35fb974d13d666dcbe03969592aeca6c3ab3835acff01f79c/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9", size = 276157 },
+ { url = "https://files.pythonhosted.org/packages/c0/29/3b7a0bbbbe5a34833ba26f686aabfe982924adbdcafdc294a7a129c31688/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03", size = 264642 },
+ { url = "https://files.pythonhosted.org/packages/ab/42/0595b3dbffc2e82d7fe658c12d5a5bafcd7516c6bf2d1d1feb5387caa9c1/frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c", size = 44914 },
+ { url = "https://files.pythonhosted.org/packages/17/c4/b7db1206a3fea44bf3b838ca61deb6f74424a8a5db1dd53ecb21da669be6/frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28", size = 51167 },
+ { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901 },
+]
+
+[[package]]
+name = "fsspec"
+version = "2024.12.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/11/de70dee31455c546fbc88301971ec03c328f3d1138cfba14263f651e9551/fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f", size = 291600 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/86/5486b0188d08aa643e127774a99bac51ffa6cf343e3deb0583956dca5b22/fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2", size = 183862 },
+]
+
+[package.optional-dependencies]
+http = [
+ { name = "aiohttp" },
+]
+
+[[package]]
+name = "genson"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470 },
+]
+
+[[package]]
+name = "ghapi"
+version = "1.0.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "fastcore" },
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f8/88/97e6b0c94885db3530d04ccab7016c606dcaf08bf0581ced1193b9668d06/ghapi-1.0.6.tar.gz", hash = "sha256:64fdd9f06d8e3373065c42c2a03e067e2bbb9ca18b583cd6e38a28aaad0224f6", size = 65518 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4c/ad/f7204c0c38175f300621af7880737ca6379dd633e9b7d1c0a8fc2748f0dc/ghapi-1.0.6-py3-none-any.whl", hash = "sha256:b3d96bf18fcaa2cb7131bad9de2948e2a1c2bb226377a25826f6c80950c57854", size = 62391 },
+]
+
+[[package]]
+name = "gitdb"
+version = "4.0.12"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "smmap" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 },
+]
+
+[[package]]
+name = "gitpython"
+version = "3.1.44"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "gitdb" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599 },
+]
+
+[[package]]
+name = "giturlparse"
+version = "0.12.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/37/5f/543dc54c82842376139748226e5aa61eb95093992f63dd495af9c6b4f076/giturlparse-0.12.0.tar.gz", hash = "sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a", size = 14907 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dd/94/c6ff3388b8e3225a014e55aed957188639aa0966443e0408d38f0c9614a7/giturlparse-0.12.0-py2.py3-none-any.whl", hash = "sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb", size = 15752 },
+]
+
+[[package]]
+name = "greenlet"
+version = "3.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 },
+ { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 },
+ { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 },
+ { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 },
+ { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 },
+ { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 },
+ { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 },
+ { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 },
+ { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 },
+ { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 },
+ { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 },
+ { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 },
+ { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 },
+ { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 },
+ { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 },
+ { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 },
+ { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 },
+ { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 },
+ { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 },
+ { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 },
+ { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 },
+ { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 },
+ { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 },
+ { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 },
+ { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 },
+]
+
+[[package]]
+name = "grpclib"
+version = "0.4.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "h2" },
+ { name = "multidict" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/79/b9/55936e462a5925190d7427e880b3033601d1effd13809b483d13a926061a/grpclib-0.4.7.tar.gz", hash = "sha256:2988ef57c02b22b7a2e8e961792c41ccf97efc2ace91ae7a5b0de03c363823c3", size = 61254 }
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 },
+]
+
+[[package]]
+name = "h2"
+version = "4.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "hpack" },
+ { name = "hyperframe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957 },
+]
+
+[[package]]
+name = "hatch-vcs"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "hatchling" },
+ { name = "setuptools-scm" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f5/c9/54bb4fa27b4e4a014ef3bb17710cdf692b3aa2cbc7953da885f1bf7e06ea/hatch_vcs-0.4.0.tar.gz", hash = "sha256:093810748fe01db0d451fabcf2c1ac2688caefd232d4ede967090b1c1b07d9f7", size = 10917 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/82/0f/6cbd9976160bc334add63bc2e7a58b1433a31b34b7cda6c5de6dd983d9a7/hatch_vcs-0.4.0-py3-none-any.whl", hash = "sha256:b8a2b6bee54cf6f9fc93762db73890017ae59c9081d1038a41f16235ceaf8b2c", size = 8412 },
+]
+
+[[package]]
+name = "hatchling"
+version = "1.27.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "pluggy" },
+ { name = "trove-classifiers" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8f/8a/cc1debe3514da292094f1c3a700e4ca25442489731ef7c0814358816bb03/hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6", size = 54983 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/08/e7/ae38d7a6dfba0533684e0b2136817d667588ae3ec984c1a4e5df5eb88482/hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b", size = 75794 },
+]
+
+[[package]]
+name = "hpack"
+version = "4.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 },
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 },
+]
+
+[[package]]
+name = "httptools"
+version = "0.6.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 },
+ { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 },
+ { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 },
+ { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 },
+ { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 },
+ { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 },
+ { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 },
+ { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 },
+ { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 },
+ { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 },
+ { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 },
+ { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 },
+ { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 },
+ { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 },
+]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "certifi" },
+ { name = "httpcore" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
+]
+
+[[package]]
+name = "httpx-sse"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 },
+]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.29.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "filelock" },
+ { name = "fsspec" },
+ { name = "packaging" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/22/37/797d6476f13e5ef6af5fc48a5d641d32b39c37e166ccf40c3714c5854a85/huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250", size = 389776 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ae/05/75b90de9093de0aadafc868bb2fa7c57651fd8f45384adf39bd77f63980d/huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5", size = 468049 },
+]
+
+[[package]]
+name = "humanize"
+version = "4.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5b/8c/4f2f0784d08a383b5de3d3b1d65a6f204cc5dc487621c91c550388d756af/humanize-4.12.1.tar.gz", hash = "sha256:1338ba97415c96556758a6e2f65977ed406dddf4620d4c6db9bbdfd07f0f1232", size = 80827 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/32/30/5ef5994b090398f9284d2662f56853e5183ae2cb5d8e3db67e4f4cfea407/humanize-4.12.1-py3-none-any.whl", hash = "sha256:86014ca5c52675dffa1d404491952f1f5bf03b07c175a51891a343daebf01fea", size = 127409 },
+]
+
+[[package]]
+name = "hyperframe"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 },
+]
+
+[[package]]
+name = "identify"
+version = "2.6.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/fa/5eb460539e6f5252a7c5a931b53426e49258cde17e3d50685031c300a8fd/identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc", size = 99249 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/8c/4bfcab2d8286473b8d83ea742716f4b79290172e75f91142bc1534b05b9a/identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255", size = 99109 },
+]
+
+[[package]]
+name = "idna"
+version = "3.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 },
+]
+
+[[package]]
+name = "importlib-resources"
+version = "6.5.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461 },
+]
+
+[[package]]
+name = "inflect"
+version = "5.6.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cb/db/cae5d8524c4b5e574c281895b212062f3b06d0e14186904ed71c538b4e90/inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9", size = 69378 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/df/d8/3e1a32d305215166f5c32652c473aa766bd7809cd10b34c544dbc31facb5/inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238", size = 33704 },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
+]
+
+[[package]]
+name = "isort"
+version = "6.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186 },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 },
+]
+
+[[package]]
+name = "jiter"
+version = "0.8.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/70/90bc7bd3932e651486861df5c8ffea4ca7c77d28e8532ddefe2abc561a53/jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d", size = 163007 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/17/c8747af8ea4e045f57d6cfd6fc180752cab9bc3de0e8a0c9ca4e8af333b1/jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f", size = 302027 },
+ { url = "https://files.pythonhosted.org/packages/3c/c1/6da849640cd35a41e91085723b76acc818d4b7d92b0b6e5111736ce1dd10/jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44", size = 310326 },
+ { url = "https://files.pythonhosted.org/packages/06/99/a2bf660d8ccffee9ad7ed46b4f860d2108a148d0ea36043fd16f4dc37e94/jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f", size = 334242 },
+ { url = "https://files.pythonhosted.org/packages/a7/5f/cea1c17864828731f11427b9d1ab7f24764dbd9aaf4648a7f851164d2718/jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60", size = 356654 },
+ { url = "https://files.pythonhosted.org/packages/e9/13/62774b7e5e7f5d5043efe1d0f94ead66e6d0f894ae010adb56b3f788de71/jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57", size = 379967 },
+ { url = "https://files.pythonhosted.org/packages/ec/fb/096b34c553bb0bd3f2289d5013dcad6074948b8d55212aa13a10d44c5326/jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e", size = 389252 },
+ { url = "https://files.pythonhosted.org/packages/17/61/beea645c0bf398ced8b199e377b61eb999d8e46e053bb285c91c3d3eaab0/jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887", size = 345490 },
+ { url = "https://files.pythonhosted.org/packages/d5/df/834aa17ad5dcc3cf0118821da0a0cf1589ea7db9832589278553640366bc/jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d", size = 376991 },
+ { url = "https://files.pythonhosted.org/packages/67/80/87d140399d382fb4ea5b3d56e7ecaa4efdca17cd7411ff904c1517855314/jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152", size = 510822 },
+ { url = "https://files.pythonhosted.org/packages/5c/37/3394bb47bac1ad2cb0465601f86828a0518d07828a650722e55268cdb7e6/jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29", size = 503730 },
+ { url = "https://files.pythonhosted.org/packages/f9/e2/253fc1fa59103bb4e3aa0665d6ceb1818df1cd7bf3eb492c4dad229b1cd4/jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e", size = 203375 },
+ { url = "https://files.pythonhosted.org/packages/41/69/6d4bbe66b3b3b4507e47aa1dd5d075919ad242b4b1115b3f80eecd443687/jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c", size = 204740 },
+ { url = "https://files.pythonhosted.org/packages/6c/b0/bfa1f6f2c956b948802ef5a021281978bf53b7a6ca54bb126fd88a5d014e/jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84", size = 301190 },
+ { url = "https://files.pythonhosted.org/packages/a4/8f/396ddb4e292b5ea57e45ade5dc48229556b9044bad29a3b4b2dddeaedd52/jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4", size = 309334 },
+ { url = "https://files.pythonhosted.org/packages/7f/68/805978f2f446fa6362ba0cc2e4489b945695940656edd844e110a61c98f8/jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587", size = 333918 },
+ { url = "https://files.pythonhosted.org/packages/b3/99/0f71f7be667c33403fa9706e5b50583ae5106d96fab997fa7e2f38ee8347/jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c", size = 356057 },
+ { url = "https://files.pythonhosted.org/packages/8d/50/a82796e421a22b699ee4d2ce527e5bcb29471a2351cbdc931819d941a167/jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18", size = 379790 },
+ { url = "https://files.pythonhosted.org/packages/3c/31/10fb012b00f6d83342ca9e2c9618869ab449f1aa78c8f1b2193a6b49647c/jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6", size = 388285 },
+ { url = "https://files.pythonhosted.org/packages/c8/81/f15ebf7de57be488aa22944bf4274962aca8092e4f7817f92ffa50d3ee46/jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef", size = 344764 },
+ { url = "https://files.pythonhosted.org/packages/b3/e8/0cae550d72b48829ba653eb348cdc25f3f06f8a62363723702ec18e7be9c/jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1", size = 376620 },
+ { url = "https://files.pythonhosted.org/packages/b8/50/e5478ff9d82534a944c03b63bc217c5f37019d4a34d288db0f079b13c10b/jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9", size = 510402 },
+ { url = "https://files.pythonhosted.org/packages/8e/1e/3de48bbebbc8f7025bd454cedc8c62378c0e32dd483dece5f4a814a5cb55/jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05", size = 503018 },
+ { url = "https://files.pythonhosted.org/packages/d5/cd/d5a5501d72a11fe3e5fd65c78c884e5164eefe80077680533919be22d3a3/jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a", size = 203190 },
+ { url = "https://files.pythonhosted.org/packages/51/bf/e5ca301245ba951447e3ad677a02a64a8845b185de2603dabd83e1e4b9c6/jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865", size = 203551 },
+ { url = "https://files.pythonhosted.org/packages/2f/3c/71a491952c37b87d127790dd7a0b1ebea0514c6b6ad30085b16bbe00aee6/jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca", size = 308347 },
+ { url = "https://files.pythonhosted.org/packages/a0/4c/c02408042e6a7605ec063daed138e07b982fdb98467deaaf1c90950cf2c6/jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0", size = 342875 },
+ { url = "https://files.pythonhosted.org/packages/91/61/c80ef80ed8a0a21158e289ef70dac01e351d929a1c30cb0f49be60772547/jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566", size = 202374 },
+]
+
+[[package]]
+name = "jsonpatch"
+version = "1.33"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jsonpointer" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898 },
+]
+
+[[package]]
+name = "jsonpointer"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595 },
+]
+
+[[package]]
+name = "langchain"
+version = "0.3.20"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+ { name = "langchain-text-splitters" },
+ { name = "langsmith" },
+ { name = "pydantic" },
+ { name = "pyyaml" },
+ { name = "requests" },
+ { name = "sqlalchemy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2a/b0/5121cdd19cf99e684043f4eae528c893f56bd25e7711d4de89f27832a5f3/langchain-0.3.20.tar.gz", hash = "sha256:edcc3241703e1f6557ef5a5c35cd56f9ccc25ff12e38b4829c66d94971737a93", size = 10225276 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b5/d4/afe8174838bdd3baba5d6a19e9f3af4c54c5db1ab4d66ef0b650c6157919/langchain-0.3.20-py3-none-any.whl", hash = "sha256:273287f8e61ffdf7e811cf8799e6a71e9381325b8625fd6618900faba79cfdd0", size = 1011577 },
+]
+
+[package.optional-dependencies]
+openai = [
+ { name = "langchain-openai" },
+]
+
+[[package]]
+name = "langchain-anthropic"
+version = "0.3.9"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anthropic" },
+ { name = "langchain-core" },
+ { name = "pydantic" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/be/0a/7ccb79c41575b04266fc4def50f41d0a4689361421d82a14350d9d5e783e/langchain_anthropic-0.3.9.tar.gz", hash = "sha256:e8012d7986ad1d8412df6914c56f3c0d2797f231766a03bb1ad22cc7023e6e1d", size = 42205 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b9/27/258565b4a487fca7db363ea95765e6f1f00c23baa83dc4ec19a009213658/langchain_anthropic-0.3.9-py3-none-any.whl", hash = "sha256:adbbfaf3ce9798d46fb43d6fc01105630238f375dc6043d35d0aafab61fdbb71", size = 24414 },
+]
+
+[[package]]
+name = "langchain-core"
+version = "0.3.41"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jsonpatch" },
+ { name = "langsmith" },
+ { name = "packaging" },
+ { name = "pydantic" },
+ { name = "pyyaml" },
+ { name = "tenacity" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2b/0a/aa5167a1a46094024b8fe50917e37f1df5bcd0034adb25452e121dae60e6/langchain_core-0.3.41.tar.gz", hash = "sha256:d3ee9f3616ebbe7943470ade23d4a04e1729b1512c0ec55a4a07bd2ac64dedb4", size = 528826 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/a6/551de93e02b1ef4ec031f6e1c0ff31a70790096c1e7066168a7693e4efe5/langchain_core-0.3.41-py3-none-any.whl", hash = "sha256:1a27cca5333bae7597de4004fb634b5f3e71667a3da6493b94ce83bcf15a23bd", size = 415149 },
+]
+
+[[package]]
+name = "langchain-openai"
+version = "0.3.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+ { name = "openai" },
+ { name = "tiktoken" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8e/3c/08add067e46409d3e881933155f546edb08644e5e4e2360ff22c6a2104a8/langchain_openai-0.3.7.tar.gz", hash = "sha256:b8b51a3aaa1cc3bda060651ea41145f7728219e8a7150b5404fb1e8446de9cef", size = 256488 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/36/0e/816c5293eda67600d374bb8484a9adab873c9096489f6f91634581919f35/langchain_openai-0.3.7-py3-none-any.whl", hash = "sha256:0aefc7bdf8e7398d41e09c4313cace816df6438f2aa93d34f79523487310f0da", size = 55254 },
+]
+
+[[package]]
+name = "langchain-text-splitters"
+version = "0.3.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0d/33/89912a07c63e4e818f9b0c8d52e4f9d600c97beca8a91db8c9dae6a1b28f/langchain_text_splitters-0.3.6.tar.gz", hash = "sha256:c537972f4b7c07451df431353a538019ad9dadff7a1073ea363946cea97e1bee", size = 40545 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4c/f8/6b82af988e65af9697f6a2f25373fb173fd32d48b62772a8773c5184c870/langchain_text_splitters-0.3.6-py3-none-any.whl", hash = "sha256:e5d7b850f6c14259ea930be4a964a65fa95d9df7e1dbdd8bad8416db72292f4e", size = 31197 },
+]
+
+[[package]]
+name = "langchain-xai"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "langchain-core" },
+ { name = "langchain-openai" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/94/a633bf1b4bbf66e4516f4188adc1174480c465ae12fb98f06c3e23c98519/langchain_xai-0.2.1.tar.gz", hash = "sha256:143a6f52be7617b5e5c68ab10c9b7df90914f54a6b3098566ce22b5d8fd89da5", size = 7788 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/88/d8050e610fadabf97c1745d24f0987b3e53b72fca63c8038ab1e0c103da9/langchain_xai-0.2.1-py3-none-any.whl", hash = "sha256:87228125cb15131663979d627210fca47dcd6b9a28462e8b5fee47f73bbed9f4", size = 6263 },
+]
+
+[[package]]
+name = "langgraph"
+version = "0.3.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+ { name = "langgraph-checkpoint" },
+ { name = "langgraph-prebuilt" },
+ { name = "langgraph-sdk" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4e/fa/b1ecc95a2464bc7dbe5e67fbd21096013829119899c33236090b98c75508/langgraph-0.3.5.tar.gz", hash = "sha256:7c0d8e61aa02578b41036c9f7a599ccba2562d269f66ef76bacbba47a99a7eca", size = 114020 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a4/5f/1e1d9173b5c41eff54f88d9f4ee82c38eb4928120ab6a21a68a78d1c499e/langgraph-0.3.5-py3-none-any.whl", hash = "sha256:be313ec300633c857873ea3e44aece4dd7d0b11f131d385108b359d377a85bf7", size = 131527 },
+]
+
+[[package]]
+name = "langgraph-checkpoint"
+version = "2.0.16"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+ { name = "msgpack" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/01/66/5d4a2013a84c511be289bb4a5ef91cbaad28c091b6b366fdb79710a1458b/langgraph_checkpoint-2.0.16.tar.gz", hash = "sha256:49ba8cfa12b2aae845ccc3b1fbd1d7a8d3a6c4a2e387ab3a92fca40dd3d4baa5", size = 34206 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/63/03bc3dd304ead45b53313cab8727329e1d139a2d220f2d030c72242c860e/langgraph_checkpoint-2.0.16-py3-none-any.whl", hash = "sha256:dfab51076a6eddb5f9e146cfe1b977e3dd6419168b2afa23ff3f4e47973bf06f", size = 38291 },
+]
+
+[[package]]
+name = "langgraph-prebuilt"
+version = "0.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "langchain-core" },
+ { name = "langgraph-checkpoint" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/22/15/848593ccace12e4f8b80cc0b159b0ba1da17605e1eecbda5f37d891748a3/langgraph_prebuilt-0.1.1.tar.gz", hash = "sha256:420a748ff93842f2b1a345a0c1ca3939d2bc7a2d46c20e9a9a0d8f148152cc47", size = 23257 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3c/62/a424fdb892f578fa88b2ff4df0bfdebdc8b89501dacb8ca3b480305cbfef/langgraph_prebuilt-0.1.1-py3-none-any.whl", hash = "sha256:148a9558a36ec7e83cc6512f3521425c862b0463251ae0242ade52a448c54e78", size = 24622 },
+]
+
+[[package]]
+name = "langgraph-sdk"
+version = "0.1.53"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "httpx" },
+ { name = "orjson" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/39/b2/a261cfbf91a4499396ba0993cf5601076301dd22883d3c0901e905253917/langgraph_sdk-0.1.53.tar.gz", hash = "sha256:12906ed965905fa27e0c28d9fa07dc6fd89e6895ff321ff049fdf3965d057cc4", size = 42369 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fc/97/3492a07b454cc74bf49938e83f0a95c608a8bc5c3dda338091d3c66e3ec5/langgraph_sdk-0.1.53-py3-none-any.whl", hash = "sha256:4fab62caad73661ffe4c3ababedcd0d7bfaaba986bee4416b9c28948458a3af5", size = 45441 },
+]
+
+[[package]]
+name = "langsmith"
+version = "0.3.11"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "httpx" },
+ { name = "orjson", marker = "platform_python_implementation != 'PyPy'" },
+ { name = "packaging" },
+ { name = "pydantic" },
+ { name = "requests" },
+ { name = "requests-toolbelt" },
+ { name = "zstandard" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/34/c4c0eddad03e00457cd6be1a88c288cd4419da8d368d8f519a29abe5392c/langsmith-0.3.11.tar.gz", hash = "sha256:ddf29d24352e99de79c9618aaf95679214324e146c5d3d9475a7ddd2870018b1", size = 323815 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ff/68/514ffa62860202a5a0a3acbf5c05017ef9df38d4437d2cb44a3cf93d617b/langsmith-0.3.11-py3-none-any.whl", hash = "sha256:0cca22737ef07d3b038a437c141deda37e00add56022582680188b681bec095e", size = 335265 },
+]
+
+[[package]]
+name = "lazy-object-proxy"
+version = "1.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2c/f0/f02e2d150d581a294efded4020094a371bbab42423fe78625ac18854d89b/lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69", size = 43271 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/5d/768a7f2ccebb29604def61842fd54f6f5f75c79e366ee8748dda84de0b13/lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba", size = 27560 },
+ { url = "https://files.pythonhosted.org/packages/b3/ce/f369815549dbfa4bebed541fa4e1561d69e4f268a1f6f77da886df182dab/lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43", size = 72403 },
+ { url = "https://files.pythonhosted.org/packages/44/46/3771e0a4315044aa7b67da892b2fb1f59dfcf0eaff2c8967b2a0a85d5896/lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9", size = 72401 },
+ { url = "https://files.pythonhosted.org/packages/81/39/84ce4740718e1c700bd04d3457ac92b2e9ce76529911583e7a2bf4d96eb2/lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3", size = 75375 },
+ { url = "https://files.pythonhosted.org/packages/86/3b/d6b65da2b864822324745c0a73fe7fd86c67ccea54173682c3081d7adea8/lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b", size = 75466 },
+ { url = "https://files.pythonhosted.org/packages/f5/33/467a093bf004a70022cb410c590d937134bba2faa17bf9dc42a48f49af35/lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074", size = 25914 },
+ { url = "https://files.pythonhosted.org/packages/77/ce/7956dc5ac2f8b62291b798c8363c81810e22a9effe469629d297d087e350/lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282", size = 27525 },
+ { url = "https://files.pythonhosted.org/packages/31/8b/94dc8d58704ab87b39faed6f2fc0090b9d90e2e2aa2bbec35c79f3d2a054/lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d", size = 16405 },
+]
+
+[[package]]
+name = "levenshtein"
+version = "0.27.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "rapidfuzz" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7e/b3/b5f8011483ba9083a0bc74c4d58705e9cf465fbe55c948a1b1357d0a2aa8/levenshtein-0.27.1.tar.gz", hash = "sha256:3e18b73564cfc846eec94dd13fab6cb006b5d2e0cc56bad1fd7d5585881302e3", size = 382571 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0d/73/84a7126b9e6441c2547f1fbfd65f3c15c387d1fc04e0dd1d025a12107771/levenshtein-0.27.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:25fb540d8c55d1dc7bdc59b7de518ea5ed9df92eb2077e74bcb9bb6de7b06f69", size = 173953 },
+ { url = "https://files.pythonhosted.org/packages/8f/5c/06c01870c0cf336f9f29397bbfbfbbfd3a59918868716e7bb15828e89367/levenshtein-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f09cfab6387e9c908c7b37961c045e8e10eb9b7ec4a700367f8e080ee803a562", size = 156399 },
+ { url = "https://files.pythonhosted.org/packages/c7/4a/c1d3f27ec8b3fff5a96617251bf3f61c67972869ac0a0419558fc3e2cbe6/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dafa29c0e616f322b574e0b2aeb5b1ff2f8d9a1a6550f22321f3bd9bb81036e3", size = 151061 },
+ { url = "https://files.pythonhosted.org/packages/4d/8f/2521081e9a265891edf46aa30e1b59c1f347a452aed4c33baafbec5216fa/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be7a7642ea64392fa1e6ef7968c2e50ef2152c60948f95d0793361ed97cf8a6f", size = 183119 },
+ { url = "https://files.pythonhosted.org/packages/1f/a0/a63e3bce6376127596d04be7f57e672d2f3d5f540265b1e30b9dd9b3c5a9/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:060b48c45ed54bcea9582ce79c6365b20a1a7473767e0b3d6be712fa3a22929c", size = 185352 },
+ { url = "https://files.pythonhosted.org/packages/17/8c/8352e992063952b38fb61d49bad8d193a4a713e7eeceb3ae74b719d7863d/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:712f562c5e64dd0398d3570fe99f8fbb88acec7cc431f101cb66c9d22d74c542", size = 159879 },
+ { url = "https://files.pythonhosted.org/packages/69/b4/564866e2038acf47c3de3e9292fc7fc7cc18d2593fedb04f001c22ac6e15/levenshtein-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6141ad65cab49aa4527a3342d76c30c48adb2393b6cdfeca65caae8d25cb4b8", size = 245005 },
+ { url = "https://files.pythonhosted.org/packages/ba/f9/7367f87e3a6eed282f3654ec61a174b4d1b78a7a73f2cecb91f0ab675153/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:799b8d73cda3265331116f62932f553804eae16c706ceb35aaf16fc2a704791b", size = 1116865 },
+ { url = "https://files.pythonhosted.org/packages/f5/02/b5b3bfb4b4cd430e9d110bad2466200d51c6061dae7c5a64e36047c8c831/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ec99871d98e517e1cc4a15659c62d6ea63ee5a2d72c5ddbebd7bae8b9e2670c8", size = 1401723 },
+ { url = "https://files.pythonhosted.org/packages/ef/69/b93bccd093b3f06a99e67e11ebd6e100324735dc2834958ba5852a1b9fed/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8799164e1f83588dbdde07f728ea80796ea72196ea23484d78d891470241b222", size = 1226276 },
+ { url = "https://files.pythonhosted.org/packages/ab/32/37dd1bc5ce866c136716619e6f7081d7078d7dd1c1da7025603dcfd9cf5f/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:583943813898326516ab451a83f734c6f07488cda5c361676150d3e3e8b47927", size = 1420132 },
+ { url = "https://files.pythonhosted.org/packages/4b/08/f3bc828dd9f0f8433b26f37c4fceab303186ad7b9b70819f2ccb493d99fc/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bb22956af44bb4eade93546bf95be610c8939b9a9d4d28b2dfa94abf454fed7", size = 1189144 },
+ { url = "https://files.pythonhosted.org/packages/2d/54/5ecd89066cf579223d504abe3ac37ba11f63b01a19fd12591083acc00eb6/levenshtein-0.27.1-cp312-cp312-win32.whl", hash = "sha256:d9099ed1bcfa7ccc5540e8ad27b5dc6f23d16addcbe21fdd82af6440f4ed2b6d", size = 88279 },
+ { url = "https://files.pythonhosted.org/packages/53/79/4f8fabcc5aca9305b494d1d6c7a98482e90a855e0050ae9ff5d7bf4ab2c6/levenshtein-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:7f071ecdb50aa6c15fd8ae5bcb67e9da46ba1df7bba7c6bf6803a54c7a41fd96", size = 100659 },
+ { url = "https://files.pythonhosted.org/packages/cb/81/f8e4c0f571c2aac2e0c56a6e0e41b679937a2b7013e79415e4aef555cff0/levenshtein-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:83b9033a984ccace7703f35b688f3907d55490182fd39b33a8e434d7b2e249e6", size = 88168 },
+ { url = "https://files.pythonhosted.org/packages/c6/d3/30485fb9aee848542ee2d01aba85106a7f5da982ebeeffc619f70ea593c7/levenshtein-0.27.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ab00c2cae2889166afb7e1af64af2d4e8c1b126f3902d13ef3740df00e54032d", size = 173397 },
+ { url = "https://files.pythonhosted.org/packages/df/9f/40a81c54cfe74b22737710e654bd25ad934a675f737b60b24f84099540e0/levenshtein-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c27e00bc7527e282f7c437817081df8da4eb7054e7ef9055b851fa3947896560", size = 155787 },
+ { url = "https://files.pythonhosted.org/packages/df/98/915f4e24e21982b6eca2c0203546c160f4a83853fa6a2ac6e2b208a54afc/levenshtein-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5b07de42bfc051136cc8e7f1e7ba2cb73666aa0429930f4218efabfdc5837ad", size = 150013 },
+ { url = "https://files.pythonhosted.org/packages/80/93/9b0773107580416b9de14bf6a12bd1dd2b2964f7a9f6fb0e40723e1f0572/levenshtein-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb11ad3c9dae3063405aa50d9c96923722ab17bb606c776b6817d70b51fd7e07", size = 181234 },
+ { url = "https://files.pythonhosted.org/packages/91/b1/3cd4f69af32d40de14808142cc743af3a1b737b25571bd5e8d2f46b885e0/levenshtein-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c5986fb46cb0c063305fd45b0a79924abf2959a6d984bbac2b511d3ab259f3f", size = 183697 },
+ { url = "https://files.pythonhosted.org/packages/bb/65/b691e502c6463f6965b7e0d8d84224c188aa35b53fbc85853c72a0e436c9/levenshtein-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75191e469269ddef2859bc64c4a8cfd6c9e063302766b5cb7e1e67f38cc7051a", size = 159964 },
+ { url = "https://files.pythonhosted.org/packages/0f/c0/89a922a47306a475fb6d8f2ab08668f143d3dc7dea4c39d09e46746e031c/levenshtein-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b3a7b2266933babc04e4d9821a495142eebd6ef709f90e24bc532b52b81385", size = 244759 },
+ { url = "https://files.pythonhosted.org/packages/b4/93/30283c6e69a6556b02e0507c88535df9613179f7b44bc49cdb4bc5e889a3/levenshtein-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbac509794afc3e2a9e73284c9e3d0aab5b1d928643f42b172969c3eefa1f2a3", size = 1115955 },
+ { url = "https://files.pythonhosted.org/packages/0b/cf/7e19ea2c23671db02fbbe5a5a4aeafd1d471ee573a6251ae17008458c434/levenshtein-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8d68714785178347ecb272b94e85cbf7e638165895c4dd17ab57e7742d8872ec", size = 1400921 },
+ { url = "https://files.pythonhosted.org/packages/e3/f7/fb42bfe2f3b46ef91f0fc6fa217b44dbeb4ef8c72a9c1917bbbe1cafc0f8/levenshtein-0.27.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8ee74ee31a5ab8f61cd6c6c6e9ade4488dde1285f3c12207afc018393c9b8d14", size = 1225037 },
+ { url = "https://files.pythonhosted.org/packages/74/25/c86f8874ac7b0632b172d0d1622ed3ab9608a7f8fe85d41d632b16f5948e/levenshtein-0.27.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f2441b6365453ec89640b85344afd3d602b0d9972840b693508074c613486ce7", size = 1420601 },
+ { url = "https://files.pythonhosted.org/packages/20/fe/ebfbaadcd90ea7dfde987ae95b5c11dc27c2c5d55a2c4ccbbe4e18a8af7b/levenshtein-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9be39640a46d8a0f9be729e641651d16a62b2c07d3f4468c36e1cc66b0183b9", size = 1188241 },
+ { url = "https://files.pythonhosted.org/packages/2e/1a/aa6b07316e10781a6c5a5a8308f9bdc22213dc3911b959daa6d7ff654fc6/levenshtein-0.27.1-cp313-cp313-win32.whl", hash = "sha256:a520af67d976761eb6580e7c026a07eb8f74f910f17ce60e98d6e492a1f126c7", size = 88103 },
+ { url = "https://files.pythonhosted.org/packages/9d/7b/9bbfd417f80f1047a28d0ea56a9b38b9853ba913b84dd5998785c5f98541/levenshtein-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:7dd60aa49c2d8d23e0ef6452c8329029f5d092f386a177e3385d315cabb78f2a", size = 100579 },
+ { url = "https://files.pythonhosted.org/packages/8b/01/5f3ff775db7340aa378b250e2a31e6b4b038809a24ff0a3636ef20c7ca31/levenshtein-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:149cd4f0baf5884ac5df625b7b0d281721b15de00f447080e38f5188106e1167", size = 87933 },
+]
+
+[[package]]
+name = "lox"
+version = "0.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pathos" },
+ { name = "sphinx-rtd-theme" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0f/b5/2bfa8da2a1dd6647c3ea0b8d7ae366bbb36b49f9f3858a253199daacb860/lox-0.12.0.tar.gz", hash = "sha256:cc7d5f867afb4dc7c2bce7bd6e90f4665c6df492863f35ff63229300b7219977", size = 37579 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/9a/cc790ca4b853821b76acb5944d32036590a789e5f3b9e4f10a8962bcfda5/lox-0.12.0-py2.py3-none-any.whl", hash = "sha256:ac0a392662f3a75cc9097655d26169d5e3564e2670431fd9884a7a09a09f6921", size = 25372 },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 },
+ { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 },
+ { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 },
+ { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 },
+ { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 },
+ { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 },
+ { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 },
+ { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 },
+ { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 },
+ { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 },
+ { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 },
+ { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 },
+ { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 },
+ { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 },
+ { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 },
+ { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 },
+ { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 },
+ { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 },
+ { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 },
+ { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 },
+ { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 },
+ { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 },
+ { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 },
+ { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 },
+ { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 },
+ { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 },
+ { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 },
+ { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 },
+ { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 },
+]
+
+[[package]]
+name = "marshmallow"
+version = "3.26.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878 },
+]
+
+[[package]]
+name = "mcp"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "httpx" },
+ { name = "httpx-sse" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "sse-starlette" },
+ { name = "starlette" },
+ { name = "uvicorn" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6b/b6/81e5f2490290351fc97bf46c24ff935128cb7d34d68e3987b522f26f7ada/mcp-1.3.0.tar.gz", hash = "sha256:f409ae4482ce9d53e7ac03f3f7808bcab735bdfc0fba937453782efb43882d45", size = 150235 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/d2/a9e87b506b2094f5aa9becc1af5178842701b27217fa43877353da2577e3/mcp-1.3.0-py3-none-any.whl", hash = "sha256:2829d67ce339a249f803f22eba5e90385eafcac45c94b00cab6cef7e8f217211", size = 70672 },
+]
+
+[package.optional-dependencies]
+cli = [
+ { name = "python-dotenv" },
+ { name = "typer" },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
+]
+
+[[package]]
+name = "mini-racer"
+version = "0.12.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/2d/e051f58e17117b1b8b11a7d17622c1528fa9002c553943c6b677c1b412da/mini_racer-0.12.4.tar.gz", hash = "sha256:84c67553ce9f3736d4c617d8a3f882949d37a46cfb47fe11dab33dd6704e62a4", size = 447529 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/71/fe/1452b6c74cae9e8cd7b6a16d8b1ef08bba4dd0ed373a95f3b401c2e712ea/mini_racer-0.12.4-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:bce8a3cee946575a352f5e65335903bc148da42c036d0c738ac67e931600e455", size = 15701219 },
+ { url = "https://files.pythonhosted.org/packages/99/ae/c22478eff26e6136341e6b40d34f8d285f910ca4d2e2a0ca4703ef87be79/mini_racer-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:56c832e6ac2db6a304d1e8e80030615297aafbc6940f64f3479af4ba16abccd5", size = 14566436 },
+ { url = "https://files.pythonhosted.org/packages/44/89/f062aa116b14fcace91f0af86a37605f0ba7c07a01c8101b5ea104d489b1/mini_racer-0.12.4-py3-none-manylinux_2_31_aarch64.whl", hash = "sha256:b82c4bd2976e280ed0a72c9c2de01b13f18ccfbe6f4892cbc22aae04410fac3c", size = 14931664 },
+ { url = "https://files.pythonhosted.org/packages/9c/a1/09122c88a0dd0a2141b0ea068d70f5d31acd0015d6f3157b8efd3ff7e026/mini_racer-0.12.4-py3-none-manylinux_2_31_x86_64.whl", hash = "sha256:69a1c44d02a9069b881684cef15a2d747fe0743df29eadc881fda7002aae5fd2", size = 14955238 },
+ { url = "https://files.pythonhosted.org/packages/6c/3b/826e41f92631560e5c6ca2aa4ef9005bdccf9290c1e7ddebe05e0a3b8c7c/mini_racer-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:499dbc267dfe60e954bc1b6c3787f7b10fc41fe1975853c9a6ddb55eb83dc4d9", size = 15211136 },
+ { url = "https://files.pythonhosted.org/packages/e5/37/15b30316630d1f63b025f058dc92efa75931a37315c34ca07f80be2cc405/mini_racer-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:231f949f5787d18351939f1fe59e5a6fe134bccb5ecf8f836b9beab69d91c8d9", size = 15128684 },
+ { url = "https://files.pythonhosted.org/packages/5c/0e/a9943f90b4a8a6d3849b81a00a00d2db128d876365385af382a0e2caf191/mini_racer-0.12.4-py3-none-win_amd64.whl", hash = "sha256:9446e3bd6a4eb9fbedf1861326f7476080995a31c9b69308acef17e5b7ecaa1b", size = 13674040 },
+]
+
+[[package]]
+name = "modal"
+version = "0.73.87"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "certifi" },
+ { name = "click" },
+ { name = "fastapi" },
+ { name = "grpclib" },
+ { name = "protobuf" },
+ { name = "rich" },
+ { name = "synchronicity" },
+ { name = "toml" },
+ { name = "typer" },
+ { name = "types-certifi" },
+ { name = "types-toml" },
+ { name = "typing-extensions" },
+ { name = "watchfiles" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/60/5e8bdc689d0a966f72fa523fd8d0c335893c68a036c932be26d2d52f00b9/modal-0.73.87.tar.gz", hash = "sha256:07052bebfe043b411d4ce7fcac1a69b3c7840d19cda3f2320d4bad3c2bfcd7a5", size = 469486 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/28/94/13dccb2a54c44bd5c566f12f478de2d16a8d2c416d6c0c39505f05c5f838/modal-0.73.87-py3-none-any.whl", hash = "sha256:8a372003cbac173b9d28a7a583eece9cd9b083653be258fe266ff04e17b13c09", size = 535780 },
+]
+
+[[package]]
+name = "msgpack"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cb/d0/7555686ae7ff5731205df1012ede15dd9d927f6227ea151e901c7406af4f/msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e", size = 167260 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e1/d6/716b7ca1dbde63290d2973d22bbef1b5032ca634c3ff4384a958ec3f093a/msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d", size = 152421 },
+ { url = "https://files.pythonhosted.org/packages/70/da/5312b067f6773429cec2f8f08b021c06af416bba340c912c2ec778539ed6/msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2", size = 85277 },
+ { url = "https://files.pythonhosted.org/packages/28/51/da7f3ae4462e8bb98af0d5bdf2707f1b8c65a0d4f496e46b6afb06cbc286/msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420", size = 82222 },
+ { url = "https://files.pythonhosted.org/packages/33/af/dc95c4b2a49cff17ce47611ca9ba218198806cad7796c0b01d1e332c86bb/msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2", size = 392971 },
+ { url = "https://files.pythonhosted.org/packages/f1/54/65af8de681fa8255402c80eda2a501ba467921d5a7a028c9c22a2c2eedb5/msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39", size = 401403 },
+ { url = "https://files.pythonhosted.org/packages/97/8c/e333690777bd33919ab7024269dc3c41c76ef5137b211d776fbb404bfead/msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f", size = 385356 },
+ { url = "https://files.pythonhosted.org/packages/57/52/406795ba478dc1c890559dd4e89280fa86506608a28ccf3a72fbf45df9f5/msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247", size = 383028 },
+ { url = "https://files.pythonhosted.org/packages/e7/69/053b6549bf90a3acadcd8232eae03e2fefc87f066a5b9fbb37e2e608859f/msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c", size = 391100 },
+ { url = "https://files.pythonhosted.org/packages/23/f0/d4101d4da054f04274995ddc4086c2715d9b93111eb9ed49686c0f7ccc8a/msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b", size = 394254 },
+ { url = "https://files.pythonhosted.org/packages/1c/12/cf07458f35d0d775ff3a2dc5559fa2e1fcd06c46f1ef510e594ebefdca01/msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b", size = 69085 },
+ { url = "https://files.pythonhosted.org/packages/73/80/2708a4641f7d553a63bc934a3eb7214806b5b39d200133ca7f7afb0a53e8/msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f", size = 75347 },
+ { url = "https://files.pythonhosted.org/packages/c8/b0/380f5f639543a4ac413e969109978feb1f3c66e931068f91ab6ab0f8be00/msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf", size = 151142 },
+ { url = "https://files.pythonhosted.org/packages/c8/ee/be57e9702400a6cb2606883d55b05784fada898dfc7fd12608ab1fdb054e/msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330", size = 84523 },
+ { url = "https://files.pythonhosted.org/packages/7e/3a/2919f63acca3c119565449681ad08a2f84b2171ddfcff1dba6959db2cceb/msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734", size = 81556 },
+ { url = "https://files.pythonhosted.org/packages/7c/43/a11113d9e5c1498c145a8925768ea2d5fce7cbab15c99cda655aa09947ed/msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e", size = 392105 },
+ { url = "https://files.pythonhosted.org/packages/2d/7b/2c1d74ca6c94f70a1add74a8393a0138172207dc5de6fc6269483519d048/msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca", size = 399979 },
+ { url = "https://files.pythonhosted.org/packages/82/8c/cf64ae518c7b8efc763ca1f1348a96f0e37150061e777a8ea5430b413a74/msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915", size = 383816 },
+ { url = "https://files.pythonhosted.org/packages/69/86/a847ef7a0f5ef3fa94ae20f52a4cacf596a4e4a010197fbcc27744eb9a83/msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d", size = 380973 },
+ { url = "https://files.pythonhosted.org/packages/aa/90/c74cf6e1126faa93185d3b830ee97246ecc4fe12cf9d2d31318ee4246994/msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434", size = 387435 },
+ { url = "https://files.pythonhosted.org/packages/7a/40/631c238f1f338eb09f4acb0f34ab5862c4e9d7eda11c1b685471a4c5ea37/msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c", size = 399082 },
+ { url = "https://files.pythonhosted.org/packages/e9/1b/fa8a952be252a1555ed39f97c06778e3aeb9123aa4cccc0fd2acd0b4e315/msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc", size = 69037 },
+ { url = "https://files.pythonhosted.org/packages/b6/bc/8bd826dd03e022153bfa1766dcdec4976d6c818865ed54223d71f07862b3/msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f", size = 75140 },
+]
+
+[[package]]
+name = "multidict"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713 },
+ { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516 },
+ { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557 },
+ { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170 },
+ { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836 },
+ { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475 },
+ { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049 },
+ { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370 },
+ { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178 },
+ { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567 },
+ { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822 },
+ { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656 },
+ { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360 },
+ { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382 },
+ { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529 },
+ { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771 },
+ { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533 },
+ { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595 },
+ { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094 },
+ { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876 },
+ { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500 },
+ { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099 },
+ { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403 },
+ { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348 },
+ { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673 },
+ { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927 },
+ { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711 },
+ { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519 },
+ { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426 },
+ { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531 },
+ { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051 },
+]
+
+[[package]]
+name = "multiprocess"
+version = "0.70.16"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "dill" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b5/ae/04f39c5d0d0def03247c2893d6f2b83c136bf3320a2154d7b8858f2ba72d/multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1", size = 1772603 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824 },
+ { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519 },
+ { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741 },
+ { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628 },
+ { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 },
+]
+
+[[package]]
+name = "mypy"
+version = "1.15.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 },
+ { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 },
+ { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 },
+ { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 },
+ { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 },
+ { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 },
+ { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 },
+ { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 },
+ { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 },
+ { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 },
+ { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 },
+ { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 },
+ { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 },
+]
+
+[[package]]
+name = "narwhals"
+version = "1.29.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e6/f7/caa23ebc4aed3ef2314441c44e1d842e701adc6af57587ffda9263c03b6e/narwhals-1.29.0.tar.gz", hash = "sha256:1021c345d56c66ff0cc8e6d03ca8c543d01ffc411630973a5cb69ee86824d823", size = 248349 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ee/f6/1fcd6b3d0e21d9b75e71ae68fbc92bbb9b9b1f4f33dd81c61d8f53378b30/narwhals-1.29.0-py3-none-any.whl", hash = "sha256:653aa8e5eb435816e7b50c8def17e7e5e3324c2ffd8a3eec03fef85792e9cf5e", size = 305214 },
+]
+
+[[package]]
+name = "neo4j"
+version = "5.28.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytz" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4b/20/733dac16f7cedc80b23093415822c9763302519cba0e7c8bcdb5c01fc512/neo4j-5.28.1.tar.gz", hash = "sha256:ae8e37a1d895099062c75bc359b2cce62099baac7be768d0eba7180c1298e214", size = 231094 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/57/94225fe5e9dabdc0ff60c88cbfcedf11277f4b34e7ab1373d3e62dbdd207/neo4j-5.28.1-py3-none-any.whl", hash = "sha256:6755ef9e5f4e14b403aef1138fb6315b120631a0075c138b5ddb2a06b87b09fd", size = 312258 },
+]
+
+[[package]]
+name = "networkx"
+version = "3.4.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263 },
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.9.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 },
+]
+
+[[package]]
+name = "numpy"
+version = "2.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fb/90/8956572f5c4ae52201fdec7ba2044b2c882832dcec7d5d0922c9e9acf2de/numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020", size = 20262700 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/43/ec/43628dcf98466e087812142eec6d1c1a6c6bdfdad30a0aa07b872dc01f6f/numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d", size = 20929458 },
+ { url = "https://files.pythonhosted.org/packages/9b/c0/2f4225073e99a5c12350954949ed19b5d4a738f541d33e6f7439e33e98e4/numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95", size = 14115299 },
+ { url = "https://files.pythonhosted.org/packages/ca/fa/d2c5575d9c734a7376cc1592fae50257ec95d061b27ee3dbdb0b3b551eb2/numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea", size = 5145723 },
+ { url = "https://files.pythonhosted.org/packages/eb/dc/023dad5b268a7895e58e791f28dc1c60eb7b6c06fcbc2af8538ad069d5f3/numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532", size = 6678797 },
+ { url = "https://files.pythonhosted.org/packages/3f/19/bcd641ccf19ac25abb6fb1dcd7744840c11f9d62519d7057b6ab2096eb60/numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e", size = 14067362 },
+ { url = "https://files.pythonhosted.org/packages/39/04/78d2e7402fb479d893953fb78fa7045f7deb635ec095b6b4f0260223091a/numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe", size = 16116679 },
+ { url = "https://files.pythonhosted.org/packages/d0/a1/e90f7aa66512be3150cb9d27f3d9995db330ad1b2046474a13b7040dfd92/numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021", size = 15264272 },
+ { url = "https://files.pythonhosted.org/packages/dc/b6/50bd027cca494de4fa1fc7bf1662983d0ba5f256fa0ece2c376b5eb9b3f0/numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8", size = 17880549 },
+ { url = "https://files.pythonhosted.org/packages/96/30/f7bf4acb5f8db10a96f73896bdeed7a63373137b131ca18bd3dab889db3b/numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe", size = 6293394 },
+ { url = "https://files.pythonhosted.org/packages/42/6e/55580a538116d16ae7c9aa17d4edd56e83f42126cb1dfe7a684da7925d2c/numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d", size = 12626357 },
+ { url = "https://files.pythonhosted.org/packages/0e/8b/88b98ed534d6a03ba8cddb316950fe80842885709b58501233c29dfa24a9/numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba", size = 20916001 },
+ { url = "https://files.pythonhosted.org/packages/d9/b4/def6ec32c725cc5fbd8bdf8af80f616acf075fe752d8a23e895da8c67b70/numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50", size = 14130721 },
+ { url = "https://files.pythonhosted.org/packages/20/60/70af0acc86495b25b672d403e12cb25448d79a2b9658f4fc45e845c397a8/numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1", size = 5130999 },
+ { url = "https://files.pythonhosted.org/packages/2e/69/d96c006fb73c9a47bcb3611417cf178049aae159afae47c48bd66df9c536/numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5", size = 6665299 },
+ { url = "https://files.pythonhosted.org/packages/5a/3f/d8a877b6e48103733ac224ffa26b30887dc9944ff95dffdfa6c4ce3d7df3/numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2", size = 14064096 },
+ { url = "https://files.pythonhosted.org/packages/e4/43/619c2c7a0665aafc80efca465ddb1f260287266bdbdce517396f2f145d49/numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1", size = 16114758 },
+ { url = "https://files.pythonhosted.org/packages/d9/79/ee4fe4f60967ccd3897aa71ae14cdee9e3c097e3256975cc9575d393cb42/numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304", size = 15259880 },
+ { url = "https://files.pythonhosted.org/packages/fb/c8/8b55cf05db6d85b7a7d414b3d1bd5a740706df00bfa0824a08bf041e52ee/numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d", size = 17876721 },
+ { url = "https://files.pythonhosted.org/packages/21/d6/b4c2f0564b7dcc413117b0ffbb818d837e4b29996b9234e38b2025ed24e7/numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693", size = 6290195 },
+ { url = "https://files.pythonhosted.org/packages/97/e7/7d55a86719d0de7a6a597949f3febefb1009435b79ba510ff32f05a8c1d7/numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b", size = 12619013 },
+ { url = "https://files.pythonhosted.org/packages/a6/1f/0b863d5528b9048fd486a56e0b97c18bf705e88736c8cea7239012119a54/numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890", size = 20944621 },
+ { url = "https://files.pythonhosted.org/packages/aa/99/b478c384f7a0a2e0736177aafc97dc9152fc036a3fdb13f5a3ab225f1494/numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c", size = 14142502 },
+ { url = "https://files.pythonhosted.org/packages/fb/61/2d9a694a0f9cd0a839501d362de2a18de75e3004576a3008e56bdd60fcdb/numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94", size = 5176293 },
+ { url = "https://files.pythonhosted.org/packages/33/35/51e94011b23e753fa33f891f601e5c1c9a3d515448659b06df9d40c0aa6e/numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0", size = 6691874 },
+ { url = "https://files.pythonhosted.org/packages/ff/cf/06e37619aad98a9d03bd8d65b8e3041c3a639be0f5f6b0a0e2da544538d4/numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610", size = 14036826 },
+ { url = "https://files.pythonhosted.org/packages/0c/93/5d7d19955abd4d6099ef4a8ee006f9ce258166c38af259f9e5558a172e3e/numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76", size = 16096567 },
+ { url = "https://files.pythonhosted.org/packages/af/53/d1c599acf7732d81f46a93621dab6aa8daad914b502a7a115b3f17288ab2/numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a", size = 15242514 },
+ { url = "https://files.pythonhosted.org/packages/53/43/c0f5411c7b3ea90adf341d05ace762dad8cb9819ef26093e27b15dd121ac/numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf", size = 17872920 },
+ { url = "https://files.pythonhosted.org/packages/5b/57/6dbdd45ab277aff62021cafa1e15f9644a52f5b5fc840bc7591b4079fb58/numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef", size = 6346584 },
+ { url = "https://files.pythonhosted.org/packages/97/9b/484f7d04b537d0a1202a5ba81c6f53f1846ae6c63c2127f8df869ed31342/numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082", size = 12706784 },
+]
+
+[[package]]
+name = "openai"
+version = "1.66.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "distro" },
+ { name = "httpx" },
+ { name = "jiter" },
+ { name = "pydantic" },
+ { name = "sniffio" },
+ { name = "tqdm" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/77/5172104ca1df35ed2ed8fb26dbc787f721c39498fc51d666c4db07756a0c/openai-1.66.3.tar.gz", hash = "sha256:8dde3aebe2d081258d4159c4cb27bdc13b5bb3f7ea2201d9bd940b9a89faf0c9", size = 397244 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/5a/e20182f7b6171642d759c548daa0ba20a1d3ac10d2bd0a13fd75704a9ac3/openai-1.66.3-py3-none-any.whl", hash = "sha256:a427c920f727711877ab17c11b95f1230b27767ba7a01e5b66102945141ceca9", size = 567400 },
+]
+
+[[package]]
+name = "orjson"
+version = "3.10.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/5dea21763eeff8c1590076918a446ea3d6140743e0e36f58f369928ed0f4/orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e", size = 5282482 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/66/85/22fe737188905a71afcc4bf7cc4c79cd7f5bbe9ed1fe0aac4ce4c33edc30/orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a", size = 249504 },
+ { url = "https://files.pythonhosted.org/packages/48/b7/2622b29f3afebe938a0a9037e184660379797d5fd5234e5998345d7a5b43/orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d", size = 125080 },
+ { url = "https://files.pythonhosted.org/packages/ce/8f/0b72a48f4403d0b88b2a41450c535b3e8989e8a2d7800659a967efc7c115/orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0", size = 150121 },
+ { url = "https://files.pythonhosted.org/packages/06/ec/acb1a20cd49edb2000be5a0404cd43e3c8aad219f376ac8c60b870518c03/orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4", size = 139796 },
+ { url = "https://files.pythonhosted.org/packages/33/e1/f7840a2ea852114b23a52a1c0b2bea0a1ea22236efbcdb876402d799c423/orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767", size = 154636 },
+ { url = "https://files.pythonhosted.org/packages/fa/da/31543337febd043b8fa80a3b67de627669b88c7b128d9ad4cc2ece005b7a/orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41", size = 130621 },
+ { url = "https://files.pythonhosted.org/packages/ed/78/66115dc9afbc22496530d2139f2f4455698be444c7c2475cb48f657cefc9/orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514", size = 138516 },
+ { url = "https://files.pythonhosted.org/packages/22/84/cd4f5fb5427ffcf823140957a47503076184cb1ce15bcc1165125c26c46c/orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17", size = 130762 },
+ { url = "https://files.pythonhosted.org/packages/93/1f/67596b711ba9f56dd75d73b60089c5c92057f1130bb3a25a0f53fb9a583b/orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b", size = 414700 },
+ { url = "https://files.pythonhosted.org/packages/7c/0c/6a3b3271b46443d90efb713c3e4fe83fa8cd71cda0d11a0f69a03f437c6e/orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7", size = 141077 },
+ { url = "https://files.pythonhosted.org/packages/3b/9b/33c58e0bfc788995eccd0d525ecd6b84b40d7ed182dd0751cd4c1322ac62/orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a", size = 129898 },
+ { url = "https://files.pythonhosted.org/packages/01/c1/d577ecd2e9fa393366a1ea0a9267f6510d86e6c4bb1cdfb9877104cac44c/orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665", size = 142566 },
+ { url = "https://files.pythonhosted.org/packages/ed/eb/a85317ee1732d1034b92d56f89f1de4d7bf7904f5c8fb9dcdd5b1c83917f/orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa", size = 133732 },
+ { url = "https://files.pythonhosted.org/packages/06/10/fe7d60b8da538e8d3d3721f08c1b7bff0491e8fa4dd3bf11a17e34f4730e/orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6", size = 249399 },
+ { url = "https://files.pythonhosted.org/packages/6b/83/52c356fd3a61abd829ae7e4366a6fe8e8863c825a60d7ac5156067516edf/orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a", size = 125044 },
+ { url = "https://files.pythonhosted.org/packages/55/b2/d06d5901408e7ded1a74c7c20d70e3a127057a6d21355f50c90c0f337913/orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9", size = 150066 },
+ { url = "https://files.pythonhosted.org/packages/75/8c/60c3106e08dc593a861755781c7c675a566445cc39558677d505878d879f/orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0", size = 139737 },
+ { url = "https://files.pythonhosted.org/packages/6a/8c/ae00d7d0ab8a4490b1efeb01ad4ab2f1982e69cc82490bf8093407718ff5/orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307", size = 154804 },
+ { url = "https://files.pythonhosted.org/packages/22/86/65dc69bd88b6dd254535310e97bc518aa50a39ef9c5a2a5d518e7a223710/orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e", size = 130583 },
+ { url = "https://files.pythonhosted.org/packages/bb/00/6fe01ededb05d52be42fabb13d93a36e51f1fd9be173bd95707d11a8a860/orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7", size = 138465 },
+ { url = "https://files.pythonhosted.org/packages/db/2f/4cc151c4b471b0cdc8cb29d3eadbce5007eb0475d26fa26ed123dca93b33/orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8", size = 130742 },
+ { url = "https://files.pythonhosted.org/packages/9f/13/8a6109e4b477c518498ca37963d9c0eb1508b259725553fb53d53b20e2ea/orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca", size = 414669 },
+ { url = "https://files.pythonhosted.org/packages/22/7b/1d229d6d24644ed4d0a803de1b0e2df832032d5beda7346831c78191b5b2/orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561", size = 141043 },
+ { url = "https://files.pythonhosted.org/packages/cc/d3/6dc91156cf12ed86bed383bcb942d84d23304a1e57b7ab030bf60ea130d6/orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825", size = 129826 },
+ { url = "https://files.pythonhosted.org/packages/b3/38/c47c25b86f6996f1343be721b6ea4367bc1c8bc0fc3f6bbcd995d18cb19d/orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890", size = 142542 },
+ { url = "https://files.pythonhosted.org/packages/27/f1/1d7ec15b20f8ce9300bc850de1e059132b88990e46cd0ccac29cbf11e4f9/orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf", size = 133444 },
+]
+
+[[package]]
+name = "packaging"
+version = "24.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 },
+]
+
+[[package]]
+name = "pandas"
+version = "2.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+ { name = "python-dateutil" },
+ { name = "pytz" },
+ { name = "tzdata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 },
+ { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 },
+ { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 },
+ { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 },
+ { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 },
+ { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 },
+ { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 },
+ { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643 },
+ { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573 },
+ { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085 },
+ { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809 },
+ { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316 },
+ { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055 },
+ { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175 },
+ { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650 },
+ { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177 },
+ { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526 },
+ { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013 },
+ { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620 },
+ { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 },
+]
+
+[[package]]
+name = "pathos"
+version = "0.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "dill" },
+ { name = "multiprocess" },
+ { name = "pox" },
+ { name = "ppft" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/be/99/7fcb91495e40735958a576b9bde930cc402d594e9ad5277bdc9b6326e1c8/pathos-0.3.2.tar.gz", hash = "sha256:4f2a42bc1e10ccf0fe71961e7145fc1437018b6b21bd93b2446abc3983e49a7a", size = 166506 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/7f/cea34872c000d17972dad998575d14656d7c6bcf1a08a8d66d73c1ef2cca/pathos-0.3.2-py3-none-any.whl", hash = "sha256:d669275e6eb4b3fbcd2846d7a6d1bba315fe23add0c614445ba1408d8b38bafe", size = 82075 },
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 },
+]
+
+[[package]]
+name = "pip"
+version = "25.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/70/53/b309b4a497b09655cb7e07088966881a57d082f48ac3cb54ea729fd2c6cf/pip-25.0.1.tar.gz", hash = "sha256:88f96547ea48b940a3a385494e181e29fb8637898f88d88737c5049780f196ea", size = 1950850 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c9/bc/b7db44f5f39f9d0494071bddae6880eb645970366d0a200022a1a93d57f5/pip-25.0.1-py3-none-any.whl", hash = "sha256:c46efd13b6aa8279f33f2864459c8ce587ea6a1a59ee20de055868d8f7688f7f", size = 1841526 },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.3.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 },
+]
+
+[[package]]
+name = "plotly"
+version = "6.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "narwhals" },
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9c/80/761c14012d6daf18e12b6d1e4f6b218e999bcceb694d7a9b180154f9e4db/plotly-6.0.0.tar.gz", hash = "sha256:c4aad38b8c3d65e4a5e7dd308b084143b9025c2cc9d5317fc1f1d30958db87d3", size = 8111782 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/77/a946f38b57fb88e736c71fbdd737a1aebd27b532bda0779c137f357cf5fc/plotly-6.0.0-py3-none-any.whl", hash = "sha256:f708871c3a9349a68791ff943a5781b1ec04de7769ea69068adcd9202e57653a", size = 14805949 },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
+]
+
+[[package]]
+name = "pox"
+version = "0.3.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2e/0d/f2eb94b4d1358a60f3539a6abcbbd757fbcb78538fe8d4cfa49850356ccf/pox-0.3.5.tar.gz", hash = "sha256:8120ee4c94e950e6e0483e050a4f0e56076e590ba0a9add19524c254bd23c2d1", size = 119452 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1d/4c/490d8f7825f38fa77bff188c568163f222d01f6c6d76f574429135edfc49/pox-0.3.5-py3-none-any.whl", hash = "sha256:9e82bcc9e578b43e80a99cad80f0d8f44f4d424f0ee4ee8d4db27260a6aa365a", size = 29492 },
+]
+
+[[package]]
+name = "ppft"
+version = "1.7.6.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2b/06/305532df3e1b0c601f60854b6e080991835809d077934cf41976d0f224ce/ppft-1.7.6.9.tar.gz", hash = "sha256:73161c67474ea9d81d04bcdad166d399cff3f084d5d2dc21ebdd46c075bbc265", size = 136395 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/b3/45a04dabc39d93ad4836d99625e7c5350257b48e9ae2c5b701f3d5da6960/ppft-1.7.6.9-py3-none-any.whl", hash = "sha256:dab36548db5ca3055067fbe6b1a17db5fee29f3c366c579a9a27cebb52ed96f0", size = 56792 },
+]
+
+[[package]]
+name = "pre-commit"
+version = "4.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cfgv" },
+ { name = "identify" },
+ { name = "nodeenv" },
+ { name = "pyyaml" },
+ { name = "virtualenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2a/13/b62d075317d8686071eb843f0bb1f195eb332f48869d3c31a4c6f1e063ac/pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4", size = 193330 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/43/b3/df14c580d82b9627d173ceea305ba898dca135feb360b6d84019d0803d3b/pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b", size = 220560 },
+]
+
+[[package]]
+name = "propcache"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/92/76/f941e63d55c0293ff7829dd21e7cf1147e90a526756869a9070f287a68c9/propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5", size = 42722 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8d/2c/921f15dc365796ec23975b322b0078eae72995c7b4d49eba554c6a308d70/propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e", size = 79867 },
+ { url = "https://files.pythonhosted.org/packages/11/a5/4a6cc1a559d1f2fb57ea22edc4245158cdffae92f7f92afcee2913f84417/propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af", size = 46109 },
+ { url = "https://files.pythonhosted.org/packages/e1/6d/28bfd3af3a567ad7d667348e7f46a520bda958229c4d545ba138a044232f/propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5", size = 45635 },
+ { url = "https://files.pythonhosted.org/packages/73/20/d75b42eaffe5075eac2f4e168f6393d21c664c91225288811d85451b2578/propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b", size = 242159 },
+ { url = "https://files.pythonhosted.org/packages/a5/fb/4b537dd92f9fd4be68042ec51c9d23885ca5fafe51ec24c58d9401034e5f/propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667", size = 248163 },
+ { url = "https://files.pythonhosted.org/packages/e7/af/8a9db04ac596d531ca0ef7dde518feaadfcdabef7b17d6a5ec59ee3effc2/propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7", size = 248794 },
+ { url = "https://files.pythonhosted.org/packages/9d/c4/ecfc988879c0fd9db03228725b662d76cf484b6b46f7e92fee94e4b52490/propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7", size = 243912 },
+ { url = "https://files.pythonhosted.org/packages/04/a2/298dd27184faa8b7d91cc43488b578db218b3cc85b54d912ed27b8c5597a/propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf", size = 229402 },
+ { url = "https://files.pythonhosted.org/packages/be/0d/efe7fec316ca92dbf4bc4a9ba49ca889c43ca6d48ab1d6fa99fc94e5bb98/propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138", size = 226896 },
+ { url = "https://files.pythonhosted.org/packages/60/63/72404380ae1d9c96d96e165aa02c66c2aae6072d067fc4713da5cde96762/propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86", size = 221447 },
+ { url = "https://files.pythonhosted.org/packages/9d/18/b8392cab6e0964b67a30a8f4dadeaff64dc7022b5a34bb1d004ea99646f4/propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d", size = 222440 },
+ { url = "https://files.pythonhosted.org/packages/6f/be/105d9ceda0f97eff8c06bac1673448b2db2a497444de3646464d3f5dc881/propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e", size = 234104 },
+ { url = "https://files.pythonhosted.org/packages/cb/c9/f09a4ec394cfcce4053d8b2a04d622b5f22d21ba9bb70edd0cad061fa77b/propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64", size = 239086 },
+ { url = "https://files.pythonhosted.org/packages/ea/aa/96f7f9ed6def82db67c972bdb7bd9f28b95d7d98f7e2abaf144c284bf609/propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c", size = 230991 },
+ { url = "https://files.pythonhosted.org/packages/5a/11/bee5439de1307d06fad176f7143fec906e499c33d7aff863ea8428b8e98b/propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d", size = 40337 },
+ { url = "https://files.pythonhosted.org/packages/e4/17/e5789a54a0455a61cb9efc4ca6071829d992220c2998a27c59aeba749f6f/propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57", size = 44404 },
+ { url = "https://files.pythonhosted.org/packages/3a/0f/a79dd23a0efd6ee01ab0dc9750d8479b343bfd0c73560d59d271eb6a99d4/propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568", size = 77287 },
+ { url = "https://files.pythonhosted.org/packages/b8/51/76675703c90de38ac75adb8deceb3f3ad99b67ff02a0fa5d067757971ab8/propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9", size = 44923 },
+ { url = "https://files.pythonhosted.org/packages/01/9b/fd5ddbee66cf7686e73c516227c2fd9bf471dbfed0f48329d095ea1228d3/propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767", size = 44325 },
+ { url = "https://files.pythonhosted.org/packages/13/1c/6961f11eb215a683b34b903b82bde486c606516c1466bf1fa67f26906d51/propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8", size = 225116 },
+ { url = "https://files.pythonhosted.org/packages/ef/ea/f8410c40abcb2e40dffe9adeed017898c930974650a63e5c79b886aa9f73/propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0", size = 229905 },
+ { url = "https://files.pythonhosted.org/packages/ef/5a/a9bf90894001468bf8e6ea293bb00626cc9ef10f8eb7996e9ec29345c7ed/propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d", size = 233221 },
+ { url = "https://files.pythonhosted.org/packages/dd/ce/fffdddd9725b690b01d345c1156b4c2cc6dca09ab5c23a6d07b8f37d6e2f/propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05", size = 227627 },
+ { url = "https://files.pythonhosted.org/packages/58/ae/45c89a5994a334735a3032b48e8e4a98c05d9536ddee0719913dc27da548/propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe", size = 214217 },
+ { url = "https://files.pythonhosted.org/packages/01/84/bc60188c3290ff8f5f4a92b9ca2d93a62e449c8daf6fd11ad517ad136926/propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1", size = 212921 },
+ { url = "https://files.pythonhosted.org/packages/14/b3/39d60224048feef7a96edabb8217dc3f75415457e5ebbef6814f8b2a27b5/propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92", size = 208200 },
+ { url = "https://files.pythonhosted.org/packages/9d/b3/0a6720b86791251273fff8a01bc8e628bc70903513bd456f86cde1e1ef84/propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787", size = 208400 },
+ { url = "https://files.pythonhosted.org/packages/e9/4f/bb470f3e687790547e2e78105fb411f54e0cdde0d74106ccadd2521c6572/propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545", size = 218116 },
+ { url = "https://files.pythonhosted.org/packages/34/71/277f7f9add469698ac9724c199bfe06f85b199542121a71f65a80423d62a/propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e", size = 222911 },
+ { url = "https://files.pythonhosted.org/packages/92/e3/a7b9782aef5a2fc765b1d97da9ec7aed2f25a4e985703608e73232205e3f/propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626", size = 216563 },
+ { url = "https://files.pythonhosted.org/packages/ab/76/0583ca2c551aa08ffcff87b2c6849c8f01c1f6fb815a5226f0c5c202173e/propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374", size = 39763 },
+ { url = "https://files.pythonhosted.org/packages/80/ec/c6a84f9a36f608379b95f0e786c111d5465926f8c62f12be8cdadb02b15c/propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a", size = 43650 },
+ { url = "https://files.pythonhosted.org/packages/ee/95/7d32e3560f5bf83fc2f2a4c1b0c181d327d53d5f85ebd045ab89d4d97763/propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf", size = 82140 },
+ { url = "https://files.pythonhosted.org/packages/86/89/752388f12e6027a5e63f5d075f15291ded48e2d8311314fff039da5a9b11/propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0", size = 47296 },
+ { url = "https://files.pythonhosted.org/packages/1b/4c/b55c98d586c69180d3048984a57a5ea238bdeeccf82dbfcd598e935e10bb/propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829", size = 46724 },
+ { url = "https://files.pythonhosted.org/packages/0f/b6/67451a437aed90c4e951e320b5b3d7eb584ade1d5592f6e5e8f678030989/propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa", size = 291499 },
+ { url = "https://files.pythonhosted.org/packages/ee/ff/e4179facd21515b24737e1e26e02615dfb5ed29416eed4cf5bc6ac5ce5fb/propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6", size = 293911 },
+ { url = "https://files.pythonhosted.org/packages/76/8d/94a8585992a064a23bd54f56c5e58c3b8bf0c0a06ae10e56f2353ae16c3d/propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db", size = 293301 },
+ { url = "https://files.pythonhosted.org/packages/b0/b8/2c860c92b4134f68c7716c6f30a0d723973f881c32a6d7a24c4ddca05fdf/propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54", size = 281947 },
+ { url = "https://files.pythonhosted.org/packages/cd/72/b564be7411b525d11757b713c757c21cd4dc13b6569c3b2b8f6d3c96fd5e/propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121", size = 268072 },
+ { url = "https://files.pythonhosted.org/packages/37/68/d94649e399e8d7fc051e5a4f2334efc567993525af083db145a70690a121/propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e", size = 275190 },
+ { url = "https://files.pythonhosted.org/packages/d8/3c/446e125f5bbbc1922964dd67cb541c01cdb678d811297b79a4ff6accc843/propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e", size = 254145 },
+ { url = "https://files.pythonhosted.org/packages/f4/80/fd3f741483dc8e59f7ba7e05eaa0f4e11677d7db2077522b92ff80117a2a/propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a", size = 257163 },
+ { url = "https://files.pythonhosted.org/packages/dc/cf/6292b5ce6ed0017e6a89024a827292122cc41b6259b30ada0c6732288513/propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac", size = 280249 },
+ { url = "https://files.pythonhosted.org/packages/e8/f0/fd9b8247b449fe02a4f96538b979997e229af516d7462b006392badc59a1/propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e", size = 288741 },
+ { url = "https://files.pythonhosted.org/packages/64/71/cf831fdc2617f86cfd7f414cfc487d018e722dac8acc098366ce9bba0941/propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf", size = 277061 },
+ { url = "https://files.pythonhosted.org/packages/42/78/9432542a35d944abeca9e02927a0de38cd7a298466d8ffa171536e2381c3/propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863", size = 42252 },
+ { url = "https://files.pythonhosted.org/packages/6f/45/960365f4f8978f48ebb56b1127adf33a49f2e69ecd46ac1f46d6cf78a79d/propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46", size = 46425 },
+ { url = "https://files.pythonhosted.org/packages/b5/35/6c4c6fc8774a9e3629cd750dc24a7a4fb090a25ccd5c3246d127b70f9e22/propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043", size = 12101 },
+]
+
+[[package]]
+name = "protobuf"
+version = "5.29.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708 },
+ { url = "https://files.pythonhosted.org/packages/61/fa/aae8e10512b83de633f2646506a6d835b151edf4b30d18d73afd01447253/protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a", size = 434508 },
+ { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825 },
+ { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573 },
+ { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672 },
+ { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550 },
+]
+
+[[package]]
+name = "psutil"
+version = "7.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 },
+ { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 },
+ { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 },
+ { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 },
+ { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 },
+ { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 },
+ { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 },
+]
+
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 },
+ { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 },
+ { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 },
+ { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 },
+ { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 },
+ { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 },
+ { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 },
+ { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 },
+ { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 },
+ { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 },
+ { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 },
+ { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 },
+ { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699 },
+ { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245 },
+ { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631 },
+ { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140 },
+ { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762 },
+ { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967 },
+ { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326 },
+ { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712 },
+ { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155 },
+ { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356 },
+ { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224 },
+]
+
+[[package]]
+name = "pyarrow"
+version = "19.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7f/09/a9046344212690f0632b9c709f9bf18506522feb333c894d0de81d62341a/pyarrow-19.0.1.tar.gz", hash = "sha256:3bf266b485df66a400f282ac0b6d1b500b9d2ae73314a153dbe97d6d5cc8a99e", size = 1129437 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b4/94e828704b050e723f67d67c3535cf7076c7432cd4cf046e4bb3b96a9c9d/pyarrow-19.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:80b2ad2b193e7d19e81008a96e313fbd53157945c7be9ac65f44f8937a55427b", size = 30670749 },
+ { url = "https://files.pythonhosted.org/packages/7e/3b/4692965e04bb1df55e2c314c4296f1eb12b4f3052d4cf43d29e076aedf66/pyarrow-19.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:ee8dec072569f43835932a3b10c55973593abc00936c202707a4ad06af7cb294", size = 32128007 },
+ { url = "https://files.pythonhosted.org/packages/22/f7/2239af706252c6582a5635c35caa17cb4d401cd74a87821ef702e3888957/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5d1ec7ec5324b98887bdc006f4d2ce534e10e60f7ad995e7875ffa0ff9cb14", size = 41144566 },
+ { url = "https://files.pythonhosted.org/packages/fb/e3/c9661b2b2849cfefddd9fd65b64e093594b231b472de08ff658f76c732b2/pyarrow-19.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ad4c0eb4e2a9aeb990af6c09e6fa0b195c8c0e7b272ecc8d4d2b6574809d34", size = 42202991 },
+ { url = "https://files.pythonhosted.org/packages/fe/4f/a2c0ed309167ef436674782dfee4a124570ba64299c551e38d3fdaf0a17b/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d383591f3dcbe545f6cc62daaef9c7cdfe0dff0fb9e1c8121101cabe9098cfa6", size = 40507986 },
+ { url = "https://files.pythonhosted.org/packages/27/2e/29bb28a7102a6f71026a9d70d1d61df926887e36ec797f2e6acfd2dd3867/pyarrow-19.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b4c4156a625f1e35d6c0b2132635a237708944eb41df5fbe7d50f20d20c17832", size = 42087026 },
+ { url = "https://files.pythonhosted.org/packages/16/33/2a67c0f783251106aeeee516f4806161e7b481f7d744d0d643d2f30230a5/pyarrow-19.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bd1618ae5e5476b7654c7b55a6364ae87686d4724538c24185bbb2952679960", size = 25250108 },
+ { url = "https://files.pythonhosted.org/packages/2b/8d/275c58d4b00781bd36579501a259eacc5c6dfb369be4ddeb672ceb551d2d/pyarrow-19.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e45274b20e524ae5c39d7fc1ca2aa923aab494776d2d4b316b49ec7572ca324c", size = 30653552 },
+ { url = "https://files.pythonhosted.org/packages/a0/9e/e6aca5cc4ef0c7aec5f8db93feb0bde08dbad8c56b9014216205d271101b/pyarrow-19.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d9dedeaf19097a143ed6da37f04f4051aba353c95ef507764d344229b2b740ae", size = 32103413 },
+ { url = "https://files.pythonhosted.org/packages/6a/fa/a7033f66e5d4f1308c7eb0dfcd2ccd70f881724eb6fd1776657fdf65458f/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ebfb5171bb5f4a52319344ebbbecc731af3f021e49318c74f33d520d31ae0c4", size = 41134869 },
+ { url = "https://files.pythonhosted.org/packages/2d/92/34d2569be8e7abdc9d145c98dc410db0071ac579b92ebc30da35f500d630/pyarrow-19.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a21d39fbdb948857f67eacb5bbaaf36802de044ec36fbef7a1c8f0dd3a4ab2", size = 42192626 },
+ { url = "https://files.pythonhosted.org/packages/0a/1f/80c617b1084fc833804dc3309aa9d8daacd46f9ec8d736df733f15aebe2c/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:99bc1bec6d234359743b01e70d4310d0ab240c3d6b0da7e2a93663b0158616f6", size = 40496708 },
+ { url = "https://files.pythonhosted.org/packages/e6/90/83698fcecf939a611c8d9a78e38e7fed7792dcc4317e29e72cf8135526fb/pyarrow-19.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1b93ef2c93e77c442c979b0d596af45e4665d8b96da598db145b0fec014b9136", size = 42075728 },
+ { url = "https://files.pythonhosted.org/packages/40/49/2325f5c9e7a1c125c01ba0c509d400b152c972a47958768e4e35e04d13d8/pyarrow-19.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d9d46e06846a41ba906ab25302cf0fd522f81aa2a85a71021826f34639ad31ef", size = 25242568 },
+ { url = "https://files.pythonhosted.org/packages/3f/72/135088d995a759d4d916ec4824cb19e066585b4909ebad4ab196177aa825/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:c0fe3dbbf054a00d1f162fda94ce236a899ca01123a798c561ba307ca38af5f0", size = 30702371 },
+ { url = "https://files.pythonhosted.org/packages/2e/01/00beeebd33d6bac701f20816a29d2018eba463616bbc07397fdf99ac4ce3/pyarrow-19.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:96606c3ba57944d128e8a8399da4812f56c7f61de8c647e3470b417f795d0ef9", size = 32116046 },
+ { url = "https://files.pythonhosted.org/packages/1f/c9/23b1ea718dfe967cbd986d16cf2a31fe59d015874258baae16d7ea0ccabc/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f04d49a6b64cf24719c080b3c2029a3a5b16417fd5fd7c4041f94233af732f3", size = 41091183 },
+ { url = "https://files.pythonhosted.org/packages/3a/d4/b4a3aa781a2c715520aa8ab4fe2e7fa49d33a1d4e71c8fc6ab7b5de7a3f8/pyarrow-19.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9137cf7e1640dce4c190551ee69d478f7121b5c6f323553b319cac936395f6", size = 42171896 },
+ { url = "https://files.pythonhosted.org/packages/23/1b/716d4cd5a3cbc387c6e6745d2704c4b46654ba2668260d25c402626c5ddb/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:7c1bca1897c28013db5e4c83944a2ab53231f541b9e0c3f4791206d0c0de389a", size = 40464851 },
+ { url = "https://files.pythonhosted.org/packages/ed/bd/54907846383dcc7ee28772d7e646f6c34276a17da740002a5cefe90f04f7/pyarrow-19.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:58d9397b2e273ef76264b45531e9d552d8ec8a6688b7390b5be44c02a37aade8", size = 42085744 },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.10.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.27.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 },
+ { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 },
+ { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 },
+ { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 },
+ { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 },
+ { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 },
+ { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 },
+ { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 },
+ { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 },
+ { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 },
+ { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 },
+ { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 },
+ { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 },
+ { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 },
+ { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 },
+ { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 },
+ { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 },
+ { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 },
+ { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 },
+ { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 },
+ { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 },
+ { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 },
+ { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 },
+ { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 },
+ { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 },
+ { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 },
+ { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 },
+ { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 },
+]
+
+[[package]]
+name = "pydantic-settings"
+version = "2.8.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 },
+]
+
+[[package]]
+name = "pygit2"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b7/ea/17aa8ca38750f1ba69511ceeb41d29961f90eb2e0a242b668c70311efd4e/pygit2-1.17.0.tar.gz", hash = "sha256:fa2bc050b2c2d3e73b54d6d541c792178561a344f07e409f532d5bb97ac7b894", size = 769002 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ee/53/8286256d077a0a38837c4ceee73a3c2b2d6caed3ec86e8bf7b32580e5ed0/pygit2-1.17.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7224d89a7dda7290e458393941e500c8682f375f41e6d80ee423958a5d4013d", size = 5465330 },
+ { url = "https://files.pythonhosted.org/packages/dd/a0/060ebb435d2590c1188ad6bc7ea0d5f0561e09a13db02baec8252b507390/pygit2-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ae1967b0c8a2438b3b0e4a63307b5c22c80024a2f09b28d14dfde0001fed8dc", size = 5683366 },
+ { url = "https://files.pythonhosted.org/packages/21/92/fedc77806ff06b502a82ddbb857a5749429ce7bf638e3007b82bd10b4244/pygit2-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:507343fa142a82028c8448c2626317dc19885985aba8ea27d381777ac484eefb", size = 5645689 },
+ { url = "https://files.pythonhosted.org/packages/14/a9/3405b991f3264163e3d93c16b43929e0e765e559ca83f8697008c7f65587/pygit2-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc04917a680591c6e801df912d7fb722c253b5ac68178ff37b5666dafd06999", size = 5457766 },
+ { url = "https://files.pythonhosted.org/packages/71/bb/40c37e00994727efb1a68bfd1f0b505207ec066ef8004b7e258210f230cc/pygit2-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7bb1b623cbd16962c3a1ec7f8e1012fa224c9e9642758c65e8e656ecc7ff1574", size = 5400609 },
+ { url = "https://files.pythonhosted.org/packages/db/55/7781d8997632ebfe2682a8f80668710eb4bc8c99a80e0691243b020f7391/pygit2-1.17.0-cp312-cp312-win32.whl", hash = "sha256:3029331ddf56a6908547278ab4c354b2d6932eb6a53be81e0093adc98a0ae540", size = 1219823 },
+ { url = "https://files.pythonhosted.org/packages/7c/73/166aae3a12a0c5252619df37a033c8a3c9756a6af4e49640769492d14893/pygit2-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:1011236bab7317b82e6cbc3dff4be8467923b1dcf2ffe28bf2e64805dcb37749", size = 1305143 },
+ { url = "https://files.pythonhosted.org/packages/3d/09/d79f99cc25b895a891eab10697fecde3c2552fdfd467b9b72b388f9a1ad9/pygit2-1.17.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ce938e7a4fdfc816ffceb62babad65fb62e1a5ad261e880b9a072e8da144ccca", size = 5465211 },
+ { url = "https://files.pythonhosted.org/packages/a6/85/74e786da47ee2face731fb892fe87c04ae257d3b5136966f8f839727d130/pygit2-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61ff2c8b0fc96fdf45a7a5239cc262b0293a5171f68d67eea239a42c3b2226cb", size = 5687159 },
+ { url = "https://files.pythonhosted.org/packages/58/61/b502b240ba91a3dec58e4936eb85c4c17d682dfb4872c197c2212fc13bc1/pygit2-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8101aa723c292892ba46303b19487a9fb0de50d9e30f4c1c2a76e3383b6e4b6d", size = 5649303 },
+ { url = "https://files.pythonhosted.org/packages/5a/33/e359c7c938df5b1cef2acb4dcf72cb153677f2185db8bfd0bb06a7ab96f9/pygit2-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e3e9225e3f01bb6a2d4589c126900bbc571cd0876ca9c01372a6e3d3693c0e", size = 5461433 },
+ { url = "https://files.pythonhosted.org/packages/98/8e/6885fd4ce98aedb84fe4459a3c85f3b866577aec9343becfca4a0e50a1eb/pygit2-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:614cfddbf048900da19b016787f153d44ea9fd7ef80f9e03a77024aa1555d5f4", size = 5402395 },
+ { url = "https://files.pythonhosted.org/packages/9f/62/51b84a6c80742e73ecd562f45234c6ef23e833864583bc759d8c6770f493/pygit2-1.17.0-cp313-cp313-win32.whl", hash = "sha256:1391762153af9715ed1d0586e3f207c518f03f5874e1f5b8e398697d006a0a82", size = 1219803 },
+ { url = "https://files.pythonhosted.org/packages/7d/69/8dfe160c7166cec689d985e6efb52198c2c2fd5b722196e4beb920f9f460/pygit2-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:d677d6fb85c426c5f5f8409bdc5a2e391016c99f73b97779b284c4ad25aa75fa", size = 1305156 },
+]
+
+[[package]]
+name = "pygithub"
+version = "2.6.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "deprecated" },
+ { name = "pyjwt", extra = ["crypto"] },
+ { name = "pynacl" },
+ { name = "requests" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/88/e08ab18dc74b2916f48703ed1a797d57cb64eca0e23b0a9254e13cfe3911/pygithub-2.6.1.tar.gz", hash = "sha256:b5c035392991cca63959e9453286b41b54d83bf2de2daa7d7ff7e4312cebf3bf", size = 3659473 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ac/fc/a444cd19ccc8c4946a512f3827ed0b3565c88488719d800d54a75d541c0b/PyGithub-2.6.1-py3-none-any.whl", hash = "sha256:6f2fa6d076ccae475f9fc392cc6cdbd54db985d4f69b8833a28397de75ed6ca3", size = 410451 },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
+]
+
+[[package]]
+name = "pyinstrument"
+version = "5.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/64/6e/85c2722e40cab4fd9df6bbe68a0d032e237cf8cfada71e5f067e4e433214/pyinstrument-5.0.1.tar.gz", hash = "sha256:f4fd0754d02959c113a4b1ebed02f4627b6e2c138719ddf43244fd95f201c8c9", size = 263162 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e1/09/696e29364503393c5bd0471f1c396d41820167b3f496bf8b128dc981f30d/pyinstrument-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cfd7b7dc56501a1f30aa059cc2f1746ece6258a841d2e4609882581f9c17f824", size = 128903 },
+ { url = "https://files.pythonhosted.org/packages/b5/dd/36d1641414eb0ab3fb50815de8d927b74924a9bfb1e409c53e9aad4a16de/pyinstrument-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fe1f33178a2b0ddb3c6d2321406228bdad41286774e65314d511dcf4a71b83e4", size = 121440 },
+ { url = "https://files.pythonhosted.org/packages/9e/3f/05196fb514735aceef9a9439f56bcaa5ccb8b440685aa4f13fdb9e925182/pyinstrument-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0519d02dee55a87afcf6d787f8d8f5a16d2b89f7ba9533064a986a2d31f27340", size = 144783 },
+ { url = "https://files.pythonhosted.org/packages/73/4b/1b041b974e7e465ca311e712beb8be0bc9cf769bcfc6660b1b2ba630c27c/pyinstrument-5.0.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f59ed9ac9466ff9b30eb7285160fa794aa3f8ce2bcf58a94142f945882d28ab", size = 143717 },
+ { url = "https://files.pythonhosted.org/packages/4a/dc/3fa73e2dde1588b6281e494a14c183a27e1a67db7401fddf9c528fb8e1a9/pyinstrument-5.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbf3114d332e499ba35ca4aedc1ef95bc6fb15c8d819729b5c0aeb35c8b64dd2", size = 145082 },
+ { url = "https://files.pythonhosted.org/packages/91/24/b86d4273cc524a4f334a610a1c4b157146c808d8935e85d44dff3a6b75ee/pyinstrument-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:20f8054e85dd710f5a8c4d6b738867366ceef89671db09c87690ba1b5c66bd67", size = 144737 },
+ { url = "https://files.pythonhosted.org/packages/3c/39/6025a71082122bfbfee4eac6649635e4c688954bdf306bcd3629457c49b2/pyinstrument-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:63e8d75ffa50c3cf6d980844efce0334659e934dcc3832bad08c23c171c545ff", size = 144488 },
+ { url = "https://files.pythonhosted.org/packages/da/ce/679b0e9a278004defc93c277c3f81b456389dd530f89e28a45bd9dae203e/pyinstrument-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a3ca9c8540051513dd633de9d7eac9fee2eda50b78b6eedeaa7e5a7be66026b5", size = 144895 },
+ { url = "https://files.pythonhosted.org/packages/58/d8/cf80bb278e2a071325e4fb244127eb68dce9d0520d20c1fda75414f119ee/pyinstrument-5.0.1-cp312-cp312-win32.whl", hash = "sha256:b549d910b846757ffbf74d94528d1a694a3848a6cfc6a6cab2ce697ee71e4548", size = 123027 },
+ { url = "https://files.pythonhosted.org/packages/39/49/9251fe641d242d4c0dc49178b064f22da1c542d80e4040561428a9f8dd1c/pyinstrument-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:86f20b680223697a8ac5c061fb40a63d3ee519c7dfb1097627bd4480711216d9", size = 123818 },
+ { url = "https://files.pythonhosted.org/packages/0f/ae/f8f84ecd0dc2c4f0d84920cb4ffdbea52a66e4b4abc2110f18879b57f538/pyinstrument-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f5065639dfedc3b8e537161f9aaa8c550c8717c935a962e9bf1e843bf0e8791f", size = 128900 },
+ { url = "https://files.pythonhosted.org/packages/23/2f/b742c46d86d4c1f74ec0819f091bbc2fad0bab786584a18d89d9178802f1/pyinstrument-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b5d20802b0c2bd1ddb95b2e96ebd3e9757dbab1e935792c2629166f1eb267bb2", size = 121445 },
+ { url = "https://files.pythonhosted.org/packages/d9/e0/297dc8454ed437aec0fbdc3cc1a6a5fdf6701935b91dd31caf38c5e3ff92/pyinstrument-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e6f5655d580429e7992c37757cc5f6e74ca81b0f2768b833d9711631a8cb2f7", size = 144904 },
+ { url = "https://files.pythonhosted.org/packages/8b/df/e4faff09fdbad7e685ceb0f96066d434fc8350382acf8df47577653f702b/pyinstrument-5.0.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4c8c9ad93f62f0bf2ddc7fb6fce3a91c008d422873824e01c5e5e83467fd1fb", size = 143801 },
+ { url = "https://files.pythonhosted.org/packages/b1/63/ed2955d980bbebf17155119e2687ac15e170b6221c4bb5f5c37f41323fe5/pyinstrument-5.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db15d1854b360182d242da8de89761a0ffb885eea61cb8652e40b5b9a4ef44bc", size = 145204 },
+ { url = "https://files.pythonhosted.org/packages/c4/18/31b8dcdade9767afc7a36a313d8cf9c5690b662e9755fe7bd0523125e06f/pyinstrument-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c803f7b880394b7bba5939ff8a59d6962589e9a0140fc33c3a6a345c58846106", size = 144881 },
+ { url = "https://files.pythonhosted.org/packages/1f/14/cd19894eb03dd28093f564e8bcf7ae4edc8e315ce962c8155cf795fc0784/pyinstrument-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:84e37ffabcf26fe820d354a1f7e9fc26949f953addab89b590c5000b3ffa60d0", size = 144643 },
+ { url = "https://files.pythonhosted.org/packages/80/54/3dd08f5a869d3b654ff7e4e4c9d2b34f8de73fb0f2f792fac5024a312e0f/pyinstrument-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a0d23d3763ec95da0beb390c2f7df7cbe36ea62b6a4d5b89c4eaab81c1c649cf", size = 145070 },
+ { url = "https://files.pythonhosted.org/packages/5d/dc/ac8e798235a1dbccefc1b204a16709cef36f02c07587763ba8eb510fc8bc/pyinstrument-5.0.1-cp313-cp313-win32.whl", hash = "sha256:967f84bd82f14425543a983956ff9cfcf1e3762755ffcec8cd835c6be22a7a0a", size = 123030 },
+ { url = "https://files.pythonhosted.org/packages/52/59/adcb3e85c9105c59382723a67f682012aa7f49027e270e721f2d59f63fcf/pyinstrument-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:70b16b5915534d8df40dcf04a7cc78d3290464c06fa358a4bc324280af4c74e0", size = 123825 },
+]
+
+[[package]]
+name = "pyjson5"
+version = "1.6.8"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8e/27/76ff4f9c71b353b8171fe9a8bda20612b7b12f9728d619a5c6df1e279bce/pyjson5-1.6.8.tar.gz", hash = "sha256:b3ecee050a8a4b03cc4f1a7e9a0c478be757b46578fda1ea0f16ac8a24ba8e7a", size = 300019 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ff/3a/0ed2cdfdb67eaaa73dc28686eebee1805bd7edfa0e8f85cc0f0a7d71641e/pyjson5-1.6.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d7b4a4b36a8748011c7586d4bba3eb403d82bdb62605e7478f2c8b11c7e01711", size = 327150 },
+ { url = "https://files.pythonhosted.org/packages/60/60/c9e84e3b2520f7b67412173c7d17d98ab24fbef874bcfcf51eb83622fa9a/pyjson5-1.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9ee2f077cf05daa9aaf3c750b63cce5b5671cf8fa848b29beaf1030a08d94fda", size = 173668 },
+ { url = "https://files.pythonhosted.org/packages/ae/dd/4c9569654dc42c42d2a029e77e4371687bfb6f9f4afda6f1c8adda5d655d/pyjson5-1.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2bbfdeeb531f79730899ef674d80dd6b6bc7c29fe3789660115f0ba66eef834f", size = 162740 },
+ { url = "https://files.pythonhosted.org/packages/fb/6f/976aed9c5fe81cafda04bb470196c790fec78bfc057ea0a8a5e84ef4671e/pyjson5-1.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fe8ba077a6ef01e6493696c27455eeae64e39ff4bd71a1a7bb66af40be7232c", size = 174476 },
+ { url = "https://files.pythonhosted.org/packages/da/8b/ab7fcfe3c07ecd1d71dec2b1062755950d8e211808f602ff60cf31264820/pyjson5-1.6.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:701db0660e434fae000e5d4d49efc0b80fbeedf938cbcc8b6d72c229d395feca", size = 177611 },
+ { url = "https://files.pythonhosted.org/packages/6a/64/8e52e7950da4855adbcbffa4a89864685995b692802a768ea31675e2c5c7/pyjson5-1.6.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:515c89e7063100bcc7c67292559bdd926da19b59fe00281e9dd2fa83f30747f1", size = 195618 },
+ { url = "https://files.pythonhosted.org/packages/dd/1a/957fea06a1e6ba34767411f2a4c6a926b32f5181a16e5505de9aca85847f/pyjson5-1.6.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d622733cf671c8104a2936b3ff589903fa4e2fec5db4e2679297219446d944a7", size = 175521 },
+ { url = "https://files.pythonhosted.org/packages/dc/7d/cc11b4283a6f255bea76458d663d1d41de396bc50100f2f7af603dbe6d65/pyjson5-1.6.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4577a18545f3f4461df46d3d38d85659b16a77ca8975289ef6f21e1c228f7bf", size = 185277 },
+ { url = "https://files.pythonhosted.org/packages/94/21/5187cc7105934e7ac1dfbfabd33bc517618f62a78c7357544f53653bf373/pyjson5-1.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0cd98871646bfb2236cfdc0ae87f8ae8f1f631133b99fef5e74307248c4ae8d", size = 196515 },
+ { url = "https://files.pythonhosted.org/packages/6d/05/2f4943349dd6814f3f24ce515ef06864f9d0351b20d69c978dd66c07fa1f/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a379911161545aa57bd6cd97f249cabcfe5990688f4dff9a8f328f5f6f231d3", size = 1119222 },
+ { url = "https://files.pythonhosted.org/packages/40/62/1d78786fbd998937849e9364dc034f68fd43fa1e619dbfc71a0b57e50031/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:24c6206f508c169034fd851eb87af3aec893d2eca3bf14df65eecc520da16883", size = 997285 },
+ { url = "https://files.pythonhosted.org/packages/ad/3a/c57b9724b471e61d38123eef69eed09b6ec7fd2a144f56e49c96b11a7458/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fd21ce9dd4733347b6a426f4f943dd20547befbd6ef502b7480944c84a1425a3", size = 1276952 },
+ { url = "https://files.pythonhosted.org/packages/db/fa/81257989504d1442d272e86e03b9d1c4b7e355e0034c0d6c51f1ac5e3229/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7a11d3cd6114de90364c24876f1cd47dcecaffb47184ffffb01eb585c8810f4b", size = 1229440 },
+ { url = "https://files.pythonhosted.org/packages/89/88/8d63d86d871bd60ec43030509ea58e216a635fdf723290071e159689e4e2/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4a58185b9ac3adfed0adf539be7293d76fe0f7c515b6f9982b225c8084027255", size = 1318444 },
+ { url = "https://files.pythonhosted.org/packages/e4/59/1a89268f650c9d8ef73f97ff9adeab1e0f40b8bf09d82fac840e26f8154d/pyjson5-1.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f4724dcb646c2d40ad45d5aa7a5af86d54dc38c78e27b795418ecca23248bb", size = 1177145 },
+ { url = "https://files.pythonhosted.org/packages/e1/45/cc1967749b08a701ddeb743cd432a9a6ddbff188a1b1294d061823d22993/pyjson5-1.6.8-cp312-cp312-win32.whl", hash = "sha256:cc414b6ab28ed75d761c825f1150c19dd9a8f9b2268ee6af0173d148f018a8c5", size = 127509 },
+ { url = "https://files.pythonhosted.org/packages/d6/07/430e3a960daf322e7f4b82515ec64d6f2febccdeba31a421c2daab8a1786/pyjson5-1.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:3fd513eaffba7b72d56bd5b26a92e2edb3694602adcaf3414a9f7d6c4c5d9be7", size = 143885 },
+ { url = "https://files.pythonhosted.org/packages/74/17/1a2002b6ee6b6bd7abba860afa7c8f76f6cde88a8493f7db6e14b5681fcb/pyjson5-1.6.8-cp312-cp312-win_arm64.whl", hash = "sha256:f8d5a208b8954758c75f8e8ae28d195bac3fae24ce9b51f6261b401e4ccce116", size = 127142 },
+ { url = "https://files.pythonhosted.org/packages/ee/e1/2d85c838a9a702f6d4134cbccc85f8811f96f0889ca0f642dd4e1cecae66/pyjson5-1.6.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:681e52df0705056dc39cf7d7bec4161e2769437fdf89f55084a4b060e9bbbfc9", size = 325120 },
+ { url = "https://files.pythonhosted.org/packages/42/43/3b2a26ca84573209616675d63ffe559a6e8b73488d6c11e4a45f0204fc3e/pyjson5-1.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1550dc70199401056f80acfc503da36de2df70dd4364a0efb654ffe7e9246ac6", size = 172648 },
+ { url = "https://files.pythonhosted.org/packages/9d/cd/ad93170f8b7934b13e5a340daed934e7a8591e5d08abf3f50ab144a2663d/pyjson5-1.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:77005662014809a7b8b78f984131a3751295ff102f4c62b452bbdac946360166", size = 161830 },
+ { url = "https://files.pythonhosted.org/packages/21/d3/dffd61a6b17680f39d5aaea24297ddf13d03064fb9ab5987de4bb619bd79/pyjson5-1.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65f2922cc8fd6b1e9cc8ff7e5fe975f7bf111c03eb06ed9b2ee793e6870d3212", size = 173697 },
+ { url = "https://files.pythonhosted.org/packages/b8/72/9566b6ec24c11293d2bb91be24492afaf9e339781057b355129a7d262050/pyjson5-1.6.8-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d83e0bc87d94baa39703c1d7139c5ce7ff025a53a34251762128713a294cf147", size = 177518 },
+ { url = "https://files.pythonhosted.org/packages/4b/2c/e615aca4b7e8f1c3b4d5520b8ec6b808a5320e19be8ccd6828b016e46b77/pyjson5-1.6.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72fa22291149e8731c4bbc225cf75a41a049a54903018ca670c849658c1edc04", size = 193327 },
+ { url = "https://files.pythonhosted.org/packages/62/64/f06dec3ec3c7501d5a969d9aec1403898b70a2817225db749c8219203229/pyjson5-1.6.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3948742ff2d2f222ab87cc77d8c6ce8a9ef063fe2904f8fa88309611a128147a", size = 174453 },
+ { url = "https://files.pythonhosted.org/packages/d4/ca/f5b147b8a186e37a9339290dd9c8271aae94eab0307169124ec83c74aa99/pyjson5-1.6.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94e1b9d219f40bebbb6285840b094eca523481cf199cd46154044dae333d492d", size = 184161 },
+ { url = "https://files.pythonhosted.org/packages/1e/9d/7e7d2eaef592e350e8988a68b4d38f358894a1fb05237b6aef5cd25fea8a/pyjson5-1.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dea723f88e89dba1d4a6542c5527cac7ecff6755291ad2eb60e1c2f578bb69f", size = 195307 },
+ { url = "https://files.pythonhosted.org/packages/51/c1/1538a2064599e6e77b96e5a58dc212d0fabf18442363a0224f5fdc31a51e/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06b857a5a36f2bad52267d1a57a880cd62c3b0d3f3a719ab8599a1d5465e2417", size = 1121719 },
+ { url = "https://files.pythonhosted.org/packages/21/36/4af2c28aa6a0a9c2f839d2f63613605c11d0294d5a8dadcf65cc6b7e4f5c/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:aebdd4c5a878f125fea8b192244b1e64532561a315725502eee8d7629598882f", size = 995812 },
+ { url = "https://files.pythonhosted.org/packages/55/63/1c7c7797113aee8fd6bbebf56ac2603681635dd7bab73bd14d5ad34b48d1/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:10688e75fd9f18e34dddd111cafd87cca6727837469b8bfb61f2d2685490f976", size = 1279088 },
+ { url = "https://files.pythonhosted.org/packages/b4/c1/1121519c37ce70e4d1d4e5f714f5e0121313b79421ba8495a130cdad5d1e/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e3aee51ef5feb4409ff36713f70251265b04c18c8322bc91d2578759225e918d", size = 1229957 },
+ { url = "https://files.pythonhosted.org/packages/84/39/3618b8e0dbc53233afd99c867d0f4fa7d8cc36489949d18dc833e692f7f3/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5e7f5b92460dc69ce27814d4ab546e3bae84b9b2e26f29701ad7fab637e6bf2f", size = 1318799 },
+ { url = "https://files.pythonhosted.org/packages/90/ae/353ce74183d884b56407d29ebc3aab63d23ca7dfb9e9a75208737a917e11/pyjson5-1.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b77c94296cd0763bc2d7d276cb53dbc97edeacfbc50c02103521d586ca91ff37", size = 1180476 },
+ { url = "https://files.pythonhosted.org/packages/8c/df/f8afe0318b0b628a8c8abce57ffccb7afd0df9aab08bb08f4c2de5008854/pyjson5-1.6.8-cp313-cp313-win32.whl", hash = "sha256:260b6f2d7148f5fa23d817b82e9960a75a44678116d6a5513bed4e88d6697343", size = 127415 },
+ { url = "https://files.pythonhosted.org/packages/67/d9/9bd17bc0c99d2d917900114d548414f609ea81947e58f6525068d673fc77/pyjson5-1.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:fe03568ca61050f00c951501d70aaf68064ab5fecb3d84961ce743102cc81036", size = 143519 },
+ { url = "https://files.pythonhosted.org/packages/ee/6d/8f35cab314cab3b67681ec072e7acb6432bee3ebc45dcf11fd8b6535cb57/pyjson5-1.6.8-cp313-cp313-win_arm64.whl", hash = "sha256:f984d06902b2096206d15bcbc6f0c75c024de295294ca04c8c11aedc871e2da0", size = 126843 },
+]
+
+[[package]]
+name = "pyjwt"
+version = "2.10.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 },
+]
+
+[package.optional-dependencies]
+crypto = [
+ { name = "cryptography" },
+]
+
+[[package]]
+name = "pynacl"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920 },
+ { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722 },
+ { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087 },
+ { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678 },
+ { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660 },
+ { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824 },
+ { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912 },
+ { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624 },
+ { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141 },
+]
+
+[[package]]
+name = "pyright"
+version = "1.1.396"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "nodeenv" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bd/73/f20cb1dea1bdc1774e7f860fb69dc0718c7d8dea854a345faec845eb086a/pyright-1.1.396.tar.gz", hash = "sha256:142901f5908f5a0895be3d3befcc18bedcdb8cc1798deecaec86ef7233a29b03", size = 3814400 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/80/be/ecb7cfb42d242b7ee764b52e6ff4782beeec00e3b943a3ec832b281f9da6/pyright-1.1.396-py3-none-any.whl", hash = "sha256:c635e473095b9138c471abccca22b9fedbe63858e0b40d4fc4b67da041891844", size = 5689355 },
+]
+
+[[package]]
+name = "pytest"
+version = "8.3.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 },
+]
+
+[[package]]
+name = "pytest-snapshot"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9b/7b/ab8f1fc1e687218aa66acec1c3674d9c443f6a2dc8cb6a50f464548ffa34/pytest-snapshot-0.9.0.tar.gz", hash = "sha256:c7013c3abc3e860f9feff899f8b4debe3708650d8d8242a61bf2625ff64db7f3", size = 19877 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/29/518f32faf6edad9f56d6e0107217f7de6b79f297a47170414a2bd4be7f01/pytest_snapshot-0.9.0-py3-none-any.whl", hash = "sha256:4b9fe1c21c868fe53a545e4e3184d36bc1c88946e3f5c1d9dd676962a9b3d4ab", size = 10715 },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 },
+]
+
+[[package]]
+name = "python-gitlab"
+version = "4.13.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "requests" },
+ { name = "requests-toolbelt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c4/ea/e2cde926d63526935c1df259177371a195089b631d67a577fe5c39fbc7e1/python_gitlab-4.13.0.tar.gz", hash = "sha256:576bfb0901faca0c6b2d1ff2592e02944a6ec3e086c3129fb43c2a0df56a1c67", size = 484996 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6b/5e/5fb4dcae9f5af5463c16952823d446ca449cce920efe8669871f600f0ab9/python_gitlab-4.13.0-py3-none-any.whl", hash = "sha256:8299a054fb571da16e1a8c1868fff01f34ac41ea1410c713a4647b3bbb2aa279", size = 145254 },
+]
+
+[[package]]
+name = "python-levenshtein"
+version = "0.27.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "levenshtein" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/13/f6/d865a565b7eeef4b5f9a18accafb03d5730c712420fc84a3a40555f7ea6b/python_levenshtein-0.27.1.tar.gz", hash = "sha256:3a5314a011016d373d309a68e875fd029caaa692ad3f32e78319299648045f11", size = 12326 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/95/8c8fd923b0a702388da4f9e0368f490d123cc5224279e6a083984304a15e/python_levenshtein-0.27.1-py3-none-any.whl", hash = "sha256:e1a4bc2a70284b2ebc4c505646142fecd0f831e49aa04ed972995895aec57396", size = 9426 },
+]
+
+[[package]]
+name = "python-multipart"
+version = "0.0.20"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 },
+]
+
+[[package]]
+name = "python-semantic-release"
+version = "9.21.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "click-option-group" },
+ { name = "deprecated" },
+ { name = "dotty-dict" },
+ { name = "gitpython" },
+ { name = "importlib-resources" },
+ { name = "jinja2" },
+ { name = "pydantic" },
+ { name = "python-gitlab" },
+ { name = "requests" },
+ { name = "rich" },
+ { name = "shellingham" },
+ { name = "tomlkit" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/01/21/d64b81fa9e7326b8c25765ecf0e0f1458dd098a94a9e80d0e6671c827880/python_semantic_release-9.21.0.tar.gz", hash = "sha256:d8673d25cab2acdfeb34f791e271bb8a02ecc63650c5aa5c03d520ddf0cbe887", size = 307256 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/10/75/24ad6ed3832e4616ea9d97fe9644d5efb98c9014f25cd6c83e8dc10ef574/python_semantic_release-9.21.0-py3-none-any.whl", hash = "sha256:1ecf9753283835f1c6cda4702e419d9702863a51b03fa11955429139234f063c", size = 132564 },
+]
+
+[[package]]
+name = "pytz"
+version = "2025.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 },
+]
+
+[[package]]
+name = "pywin32"
+version = "308"
+source = { registry = "https://pypi.org/simple" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/7c/d00d6bdd96de4344e06c4afbf218bc86b54436a94c01c71a8701f613aa56/pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897", size = 5939729 },
+ { url = "https://files.pythonhosted.org/packages/21/27/0c8811fbc3ca188f93b5354e7c286eb91f80a53afa4e11007ef661afa746/pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47", size = 6543015 },
+ { url = "https://files.pythonhosted.org/packages/9d/0f/d40f8373608caed2255781a3ad9a51d03a594a1248cd632d6a298daca693/pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091", size = 7976033 },
+ { url = "https://files.pythonhosted.org/packages/a9/a4/aa562d8935e3df5e49c161b427a3a2efad2ed4e9cf81c3de636f1fdddfd0/pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed", size = 5938579 },
+ { url = "https://files.pythonhosted.org/packages/c7/50/b0efb8bb66210da67a53ab95fd7a98826a97ee21f1d22949863e6d588b22/pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4", size = 6542056 },
+ { url = "https://files.pythonhosted.org/packages/26/df/2b63e3e4f2df0224f8aaf6d131f54fe4e8c96400eb9df563e2aae2e1a1f9/pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd", size = 7974986 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 },
+ { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 },
+ { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 },
+ { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 },
+ { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 },
+ { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 },
+ { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 },
+ { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 },
+ { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 },
+ { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 },
+ { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 },
+ { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 },
+ { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 },
+ { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 },
+ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 },
+ { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 },
+ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 },
+]
+
+[[package]]
+name = "rapidfuzz"
+version = "3.12.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/be/8dff25a6157dfbde9867720b1282157fe7b809e085130bb89d7655c62186/rapidfuzz-3.12.2.tar.gz", hash = "sha256:b0ba1ccc22fff782e7152a3d3d0caca44ec4e32dc48ba01c560b8593965b5aa3", size = 57907839 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a7/d2/e071753227c9e9f7f3550b983f30565f6e994581529815fa5a8879e7cd10/rapidfuzz-3.12.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1d982a651253ffe8434d9934ff0c1089111d60502228464721a2a4587435e159", size = 1944403 },
+ { url = "https://files.pythonhosted.org/packages/aa/d1/4a10d21cc97aa36f4019af24382b5b4dc5ea6444499883c1c1286c6089ba/rapidfuzz-3.12.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02e6466caa0222d5233b1f05640873671cd99549a5c5ba4c29151634a1e56080", size = 1430287 },
+ { url = "https://files.pythonhosted.org/packages/6a/2d/76d39ab0beeb884d432096fe288c41850e37608e0145264081d0cb809f3c/rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e956b3f053e474abae69ac693a52742109d860ac2375fe88e9387d3277f4c96c", size = 1403693 },
+ { url = "https://files.pythonhosted.org/packages/85/1a/719b0f6498c003627e4b83b841bdcd48b11de8a9908a9051c4d2a0bc2245/rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dee7d740a2d5418d4f964f39ab8d89923e6b945850db833e798a1969b19542a", size = 5555878 },
+ { url = "https://files.pythonhosted.org/packages/af/48/14d952a73254b4b0e517141acd27979bd23948adaf197f6ca2dc722fde6a/rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a057cdb0401e42c84b6516c9b1635f7aedd5e430c6e388bd5f6bcd1d6a0686bb", size = 1655301 },
+ { url = "https://files.pythonhosted.org/packages/db/3f/b093e154e9752325d7459aa6dca43b7acbcaffa05133507e2403676e3e75/rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dccf8d4fb5b86d39c581a59463c596b1d09df976da26ff04ae219604223d502f", size = 1678069 },
+ { url = "https://files.pythonhosted.org/packages/d6/7e/88853ecae5b5456eb1a1d8a01cbd534e25b671735d5d974609cbae082542/rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21d5b3793c6f5aecca595cd24164bf9d3c559e315ec684f912146fc4e769e367", size = 3137119 },
+ { url = "https://files.pythonhosted.org/packages/4d/d2/b1f809b815aaf682ddac9c57929149f740b90feeb4f8da2f535c196de821/rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:46a616c0e13cff2de1761b011e0b14bb73b110182f009223f1453d505c9a975c", size = 2491639 },
+ { url = "https://files.pythonhosted.org/packages/61/e4/a908d7b8db6e52ba2f80f6f0d0709ef9fdedb767db4307084331742b67f0/rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19fa5bc4301a1ee55400d4a38a8ecf9522b0391fc31e6da5f4d68513fe5c0026", size = 7821561 },
+ { url = "https://files.pythonhosted.org/packages/f3/83/0250c49deefff15c46f5e590d8ee6abbd0f056e20b85994db55c16ac6ead/rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:544a47190a0d25971658a9365dba7095397b4ce3e897f7dd0a77ca2cf6fa984e", size = 2874048 },
+ { url = "https://files.pythonhosted.org/packages/6c/3f/8d433d964c6e476476ee53eae5fa77b9f16b38d312eb1571e9099a6a3b12/rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f21af27c5e001f0ba1b88c36a0936437dfe034c452548d998891c21125eb640f", size = 3522801 },
+ { url = "https://files.pythonhosted.org/packages/82/85/4931bfa41ef837b1544838e46e0556640d18114b3da9cf05e10defff00ae/rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b63170d9db00629b5b3f2862114d8d6ee19127eaba0eee43762d62a25817dbe0", size = 4567304 },
+ { url = "https://files.pythonhosted.org/packages/b1/fe/fdae322869885115dd19a38c1da71b73a8832aa77757c93f460743d4f54c/rapidfuzz-3.12.2-cp312-cp312-win32.whl", hash = "sha256:6c7152d77b2eb6bfac7baa11f2a9c45fd5a2d848dbb310acd0953b3b789d95c9", size = 1845332 },
+ { url = "https://files.pythonhosted.org/packages/ca/a4/2ccebda5fb8a266d163d57a42c2a6ef6f91815df5d89cf38c12e8aa6ed0b/rapidfuzz-3.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:1a314d170ee272ac87579f25a6cf8d16a031e1f7a7b07663434b41a1473bc501", size = 1617926 },
+ { url = "https://files.pythonhosted.org/packages/a5/bc/aa8a4dc4ebff966dd039cce017c614cfd202049b4d1a2daafee7d018521b/rapidfuzz-3.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:d41e8231326e94fd07c4d8f424f6bed08fead6f5e6688d1e6e787f1443ae7631", size = 864737 },
+ { url = "https://files.pythonhosted.org/packages/96/59/2ea3b5bb82798eae73d6ee892264ebfe42727626c1f0e96c77120f0d5cf6/rapidfuzz-3.12.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941f31038dba5d3dedcfcceba81d61570ad457c873a24ceb13f4f44fcb574260", size = 1936870 },
+ { url = "https://files.pythonhosted.org/packages/54/85/4e486bf9ea05e771ad231731305ed701db1339157f630b76b246ce29cf71/rapidfuzz-3.12.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fe2dfc454ee51ba168a67b1e92b72aad251e45a074972cef13340bbad2fd9438", size = 1424231 },
+ { url = "https://files.pythonhosted.org/packages/dc/60/aeea3eed402c40a8cf055d554678769fbee0dd95c22f04546070a22bb90e/rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78fafaf7f5a48ee35ccd7928339080a0136e27cf97396de45259eca1d331b714", size = 1398055 },
+ { url = "https://files.pythonhosted.org/packages/33/6b/757106f4c21fe3f20ce13ba3df560da60e52fe0dc390fd22bf613761669c/rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0c7989ff32c077bb8fd53253fd6ca569d1bfebc80b17557e60750e6909ba4fe", size = 5526188 },
+ { url = "https://files.pythonhosted.org/packages/1e/a2/7c680cdc5532746dba67ecf302eed975252657094e50ae334fa9268352e8/rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96fa00bc105caa34b6cd93dca14a29243a3a7f0c336e4dcd36348d38511e15ac", size = 1648483 },
+ { url = "https://files.pythonhosted.org/packages/f6/b0/ce942a1448b1a75d64af230dd746dede502224dd29ca9001665bbfd4bee6/rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bccfb30c668620c5bc3490f2dc7d7da1cca0ead5a9da8b755e2e02e2ef0dff14", size = 1676076 },
+ { url = "https://files.pythonhosted.org/packages/ba/71/81f77b08333200be6984b6cdf2bdfd7cfca4943f16b478a2f7838cba8d66/rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9b0adc3d894beb51f5022f64717b6114a6fabaca83d77e93ac7675911c8cc5", size = 3114169 },
+ { url = "https://files.pythonhosted.org/packages/01/16/f3f34b207fdc8c61a33f9d2d61fc96b62c7dadca88bda1df1be4b94afb0b/rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32691aa59577f42864d5535cb6225d0f47e2c7bff59cf4556e5171e96af68cc1", size = 2485317 },
+ { url = "https://files.pythonhosted.org/packages/b2/a6/b954f0766f644eb8dd8df44703e024ab4f5f15a8f8f5ea969963dd036f50/rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:758b10380ad34c1f51753a070d7bb278001b5e6fcf544121c6df93170952d705", size = 7844495 },
+ { url = "https://files.pythonhosted.org/packages/fb/8f/1dc604d05e07150a02b56a8ffc47df75ce316c65467259622c9edf098451/rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:50a9c54c0147b468363119132d514c5024fbad1ed8af12bd8bd411b0119f9208", size = 2873242 },
+ { url = "https://files.pythonhosted.org/packages/78/a9/9c649ace4b7f885e0a5fdcd1f33b057ebd83ecc2837693e6659bd944a2bb/rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e3ceb87c11d2d0fbe8559bb795b0c0604b84cfc8bb7b8720b5c16e9e31e00f41", size = 3519124 },
+ { url = "https://files.pythonhosted.org/packages/f5/81/ce0b774e540a2e22ec802e383131d7ead18347197304d584c4ccf7b8861a/rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f7c9a003002434889255ff5676ca0f8934a478065ab5e702f75dc42639505bba", size = 4557831 },
+ { url = "https://files.pythonhosted.org/packages/13/28/7bf0ee8d35efa7ab14e83d1795cdfd54833aa0428b6f87e987893136c372/rapidfuzz-3.12.2-cp313-cp313-win32.whl", hash = "sha256:cf165a76870cd875567941cf861dfd361a0a6e6a56b936c5d30042ddc9def090", size = 1842802 },
+ { url = "https://files.pythonhosted.org/packages/ef/7e/792d609484776c8a40e1695ebd28b62196be9f8347b785b9104604dc7268/rapidfuzz-3.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:55bcc003541f5f16ec0a73bf6de758161973f9e8d75161954380738dd147f9f2", size = 1615808 },
+ { url = "https://files.pythonhosted.org/packages/4b/43/ca3d1018b392f49131843648e10b08ace23afe8dad3bee5f136e4346b7cd/rapidfuzz-3.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:69f6ecdf1452139f2b947d0c169a605de578efdb72cbb2373cb0a94edca1fd34", size = 863535 },
+]
+
+[[package]]
+name = "regex"
+version = "2024.11.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781 },
+ { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455 },
+ { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759 },
+ { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976 },
+ { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077 },
+ { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160 },
+ { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896 },
+ { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997 },
+ { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725 },
+ { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481 },
+ { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896 },
+ { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138 },
+ { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692 },
+ { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135 },
+ { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567 },
+ { url = "https://files.pythonhosted.org/packages/90/73/bcb0e36614601016552fa9344544a3a2ae1809dc1401b100eab02e772e1f/regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84", size = 483525 },
+ { url = "https://files.pythonhosted.org/packages/0f/3f/f1a082a46b31e25291d830b369b6b0c5576a6f7fb89d3053a354c24b8a83/regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4", size = 288324 },
+ { url = "https://files.pythonhosted.org/packages/09/c9/4e68181a4a652fb3ef5099e077faf4fd2a694ea6e0f806a7737aff9e758a/regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0", size = 284617 },
+ { url = "https://files.pythonhosted.org/packages/fc/fd/37868b75eaf63843165f1d2122ca6cb94bfc0271e4428cf58c0616786dce/regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0", size = 795023 },
+ { url = "https://files.pythonhosted.org/packages/c4/7c/d4cd9c528502a3dedb5c13c146e7a7a539a3853dc20209c8e75d9ba9d1b2/regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7", size = 833072 },
+ { url = "https://files.pythonhosted.org/packages/4f/db/46f563a08f969159c5a0f0e722260568425363bea43bb7ae370becb66a67/regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7", size = 823130 },
+ { url = "https://files.pythonhosted.org/packages/db/60/1eeca2074f5b87df394fccaa432ae3fc06c9c9bfa97c5051aed70e6e00c2/regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c", size = 796857 },
+ { url = "https://files.pythonhosted.org/packages/10/db/ac718a08fcee981554d2f7bb8402f1faa7e868c1345c16ab1ebec54b0d7b/regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3", size = 784006 },
+ { url = "https://files.pythonhosted.org/packages/c2/41/7da3fe70216cea93144bf12da2b87367590bcf07db97604edeea55dac9ad/regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07", size = 781650 },
+ { url = "https://files.pythonhosted.org/packages/a7/d5/880921ee4eec393a4752e6ab9f0fe28009435417c3102fc413f3fe81c4e5/regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e", size = 789545 },
+ { url = "https://files.pythonhosted.org/packages/dc/96/53770115e507081122beca8899ab7f5ae28ae790bfcc82b5e38976df6a77/regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6", size = 853045 },
+ { url = "https://files.pythonhosted.org/packages/31/d3/1372add5251cc2d44b451bd94f43b2ec78e15a6e82bff6a290ef9fd8f00a/regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4", size = 860182 },
+ { url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733 },
+ { url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122 },
+ { url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545 },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
+]
+
+[[package]]
+name = "requests-toolbelt"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481 },
+]
+
+[[package]]
+name = "rich"
+version = "13.9.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 },
+]
+
+[[package]]
+name = "rich-click"
+version = "1.8.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/e3/ff1c715b673ec9e01f4482d8d0edfd9adf891f3630d83e695b38337a3889/rich_click-1.8.6.tar.gz", hash = "sha256:8a2448fd80e3d4e16fcb3815bfbc19be9bae75c9bb6aedf637901e45f3555752", size = 38247 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/09/c20b04b6c9cf273995753f226ca51656e00f8a37f1e723f8c713b93b2ad4/rich_click-1.8.6-py3-none-any.whl", hash = "sha256:55fb571bad7d3d69ac43ca45f05b44616fd019616161b1815ff053567b9a8e22", size = 35076 },
+]
+
+[[package]]
+name = "rich-toolkit"
+version = "0.13.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5b/8a/71cfbf6bf6257ea785d1f030c22468f763eea1b3e5417620f2ba9abd6dca/rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3", size = 72288 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/1b/1c2f43af46456050b27810a7a013af8a7e12bc545a0cdc00eb0df55eb769/rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61", size = 13566 },
+]
+
+[[package]]
+name = "roman-numerals-py"
+version = "3.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.9.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/c3/418441a8170e8d53d05c0b9dad69760dbc7b8a12c10dbe6db1e1205d2377/ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933", size = 3717448 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/c3/2c4afa9ba467555d074b146d9aed0633a56ccdb900839fb008295d037b89/ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367", size = 10027252 },
+ { url = "https://files.pythonhosted.org/packages/33/d1/439e58487cf9eac26378332e25e7d5ade4b800ce1eec7dc2cfc9b0d7ca96/ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7", size = 10840721 },
+ { url = "https://files.pythonhosted.org/packages/50/44/fead822c38281ba0122f1b76b460488a175a9bd48b130650a6fb6dbcbcf9/ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d", size = 10161439 },
+ { url = "https://files.pythonhosted.org/packages/11/ae/d404a2ab8e61ddf6342e09cc6b7f7846cce6b243e45c2007dbe0ca928a5d/ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a", size = 10336264 },
+ { url = "https://files.pythonhosted.org/packages/6a/4e/7c268aa7d84cd709fb6f046b8972313142cffb40dfff1d2515c5e6288d54/ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe", size = 9908774 },
+ { url = "https://files.pythonhosted.org/packages/cc/26/c618a878367ef1b76270fd027ca93692657d3f6122b84ba48911ef5f2edc/ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c", size = 11428127 },
+ { url = "https://files.pythonhosted.org/packages/d7/9a/c5588a93d9bfed29f565baf193fe802fa676a0c837938137ea6cf0576d8c/ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be", size = 12133187 },
+ { url = "https://files.pythonhosted.org/packages/3e/ff/e7980a7704a60905ed7e156a8d73f604c846d9bd87deda9cabfa6cba073a/ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590", size = 11602937 },
+ { url = "https://files.pythonhosted.org/packages/24/78/3690444ad9e3cab5c11abe56554c35f005b51d1d118b429765249095269f/ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb", size = 13771698 },
+ { url = "https://files.pythonhosted.org/packages/6e/bf/e477c2faf86abe3988e0b5fd22a7f3520e820b2ee335131aca2e16120038/ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0", size = 11249026 },
+ { url = "https://files.pythonhosted.org/packages/f7/82/cdaffd59e5a8cb5b14c408c73d7a555a577cf6645faaf83e52fe99521715/ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17", size = 10220432 },
+ { url = "https://files.pythonhosted.org/packages/fe/a4/2507d0026225efa5d4412b6e294dfe54725a78652a5c7e29e6bd0fc492f3/ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1", size = 9874602 },
+ { url = "https://files.pythonhosted.org/packages/d5/be/f3aab1813846b476c4bcffe052d232244979c3cd99d751c17afb530ca8e4/ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57", size = 10851212 },
+ { url = "https://files.pythonhosted.org/packages/8b/45/8e5fd559bea0d2f57c4e12bf197a2fade2fac465aa518284f157dfbca92b/ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e", size = 11327490 },
+ { url = "https://files.pythonhosted.org/packages/42/55/e6c90f13880aeef327746052907e7e930681f26a164fe130ddac28b08269/ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1", size = 10227912 },
+ { url = "https://files.pythonhosted.org/packages/35/b2/da925693cb82a1208aa34966c0f36cb222baca94e729dd22a587bc22d0f3/ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1", size = 11355632 },
+ { url = "https://files.pythonhosted.org/packages/31/d8/de873d1c1b020d668d8ec9855d390764cb90cf8f6486c0983da52be8b7b7/ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf", size = 10435860 },
+]
+
+[[package]]
+name = "rustworkx"
+version = "0.16.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a5/c4/6d6ef39e57610d54c5f106dc3dece9eebce8b9d52d561ae092e3aede1b66/rustworkx-0.16.0.tar.gz", hash = "sha256:9f0dcb83f38d5ca2c3a683eb9b6951c8aec3262fbfe5141946a7ee5ba37e0bb6", size = 349524 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f8/70/36f5916aee41ffe4f604ad75742eb1bb1b849fb568e010555f9d159cd93e/rustworkx-0.16.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:476a6c67b0142acd941691943750cc6737a48372304489969c2b62d30aaf4c27", size = 2141999 },
+ { url = "https://files.pythonhosted.org/packages/94/47/7e7c37fb73efcc87be6414b235534605c4008a4cdbd92a61db23b878eecd/rustworkx-0.16.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:bef2ef42870f806af93979b457e240f6dfa4f867ca33965c620f3a804409ed3a", size = 1940309 },
+ { url = "https://files.pythonhosted.org/packages/c6/42/a6d6b3137be55ef1d887becdf6b64b0917c7d437bd483065a88500a55603/rustworkx-0.16.0-cp39-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0db3a73bf68b3e66c08322a2fc95d3aa663d037d9b4e49c3509da4898d3529cc", size = 2195350 },
+ { url = "https://files.pythonhosted.org/packages/59/d2/1bc99df831c132c4b7420a85ce9150e065f4c993798f31b6a4229f238398/rustworkx-0.16.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f12a13d7486234fa2a84746d5e41f436bf9df43548043e7a232f48804ff8c61", size = 1971689 },
+ { url = "https://files.pythonhosted.org/packages/b5/3b/1125e7eb834f4408bcec3cee79947efd504c715fb7ab1876f8cd4bbca497/rustworkx-0.16.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89efd5c3a4653ddacc55ca39f28b261d43deec7d678f8f8fc6b76b5087f1dfea", size = 3297342 },
+ { url = "https://files.pythonhosted.org/packages/4f/e2/e21187b255c6211d71db0d08a44fc16771038b2af41712d66c408d9bec16/rustworkx-0.16.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec0c12aac8c54910ace20ac6ada4b890cd39f95f69100514715f8ad7af9041e4", size = 2110107 },
+ { url = "https://files.pythonhosted.org/packages/3c/79/e3fcff21f31253ea85ef196bf2fcabad7802b11468f7d3a5d592cd0ac789/rustworkx-0.16.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d650e39fc1a1534335f7517358ebfc3478bb235428463cfcd7c5750d50377b33", size = 2007544 },
+ { url = "https://files.pythonhosted.org/packages/67/04/741ed09c2b0dc0f360f85270c1179ed433785372ac9ab6ab26d3dd3ae02d/rustworkx-0.16.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:293180b83509ee9bff4c3af7ccc1024f6528d61b65d0cb7320bd31924f10cb71", size = 2172787 },
+ { url = "https://files.pythonhosted.org/packages/6d/fd/9c71e90f8cde76fed95dbc1e7d019977b89a29492f49ded232c6fad3055f/rustworkx-0.16.0-cp39-abi3-win32.whl", hash = "sha256:040c4368729cf502f756a3b0ff5f1c6915fc389f74dcc6afc6c3833688c97c01", size = 1840183 },
+ { url = "https://files.pythonhosted.org/packages/3e/79/9bdd52d2a33d468c81c1827de1b588080cb055d1d3561b194ab7bf2635b5/rustworkx-0.16.0-cp39-abi3-win_amd64.whl", hash = "sha256:905df608843c32fa45ac023687769fe13056edf7584474c801d5c50705d76e9b", size = 1953559 },
+]
+
+[[package]]
+name = "sentry-sdk"
+version = "2.22.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/81/b6/662988ecd2345bf6c3a5c306a9a3590852742eff91d0a78a143398b816f3/sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944", size = 303539 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/7f/0e4459173e9671ba5f75a48dda2442bcc48a12c79e54e5789381c8c6a9bc/sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66", size = 325815 },
+]
+
+[[package]]
+name = "setuptools"
+version = "75.8.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/53/43d99d7687e8cdef5ab5f9ec5eaf2c0423c2b35133a2b7e7bc276fc32b21/setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2", size = 1344083 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a9/38/7d7362e031bd6dc121e5081d8cb6aa6f6fedf2b67bf889962134c6da4705/setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f", size = 1229385 },
+]
+
+[[package]]
+name = "setuptools-scm"
+version = "8.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "packaging" },
+ { name = "setuptools" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4b/bd/c5d16dd95900567e09744af92119da7abc5f447320d53ec1d9415ec30263/setuptools_scm-8.2.0.tar.gz", hash = "sha256:a18396a1bc0219c974d1a74612b11f9dce0d5bd8b1dc55c65f6ac7fd609e8c28", size = 77572 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/10/7c/5a9799042320242c383c4485a2771a37d49e8ce2312ca647653d2fd1a7a4/setuptools_scm-8.2.0-py3-none-any.whl", hash = "sha256:136e2b1d393d709d2bcf26f275b8dec06c48b811154167b0fd6bb002aad17d6d", size = 43944 },
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
+]
+
+[[package]]
+name = "sigtools"
+version = "4.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5f/db/669ca14166814da187b3087b908ca924cf83f5b504fe23b3859a3ef67d4f/sigtools-4.0.1.tar.gz", hash = "sha256:4b8e135a9cd4d2ea00da670c093372d74e672ba3abb87f4c98d8e73dea54445c", size = 71910 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1f/91/853dbf6ec096197dba9cd5fd0c836c5fc19142038b7db60ebe6332b1bab1/sigtools-4.0.1-py2.py3-none-any.whl", hash = "sha256:d216b4cf920bbab0fce636ddc429ed8463a5b533d9e1492acb45a2a1bc36ac6c", size = 76419 },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
+]
+
+[[package]]
+name = "slack-sdk"
+version = "3.34.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6e/ff/6eb67fd5bd179fa804dbd859d88d872d3ae343955e63a319a73a132d406f/slack_sdk-3.34.0.tar.gz", hash = "sha256:ff61db7012160eed742285ea91f11c72b7a38a6500a7f6c5335662b4bc6b853d", size = 233629 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/25/2d/8724ef191cb64907de1e4e4436462955501e00f859a53d0aa794d0d060ff/slack_sdk-3.34.0-py2.py3-none-any.whl", hash = "sha256:c61f57f310d85be83466db5a98ab6ae3bb2e5587437b54fa0daa8fae6a0feffa", size = 292480 },
+]
+
+[[package]]
+name = "smmap"
+version = "5.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 },
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 },
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 },
+]
+
+[[package]]
+name = "sphinx"
+version = "8.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "alabaster" },
+ { name = "babel" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "docutils" },
+ { name = "imagesize" },
+ { name = "jinja2" },
+ { name = "packaging" },
+ { name = "pygments" },
+ { name = "requests" },
+ { name = "roman-numerals-py" },
+ { name = "snowballstemmer" },
+ { name = "sphinxcontrib-applehelp" },
+ { name = "sphinxcontrib-devhelp" },
+ { name = "sphinxcontrib-htmlhelp" },
+ { name = "sphinxcontrib-jsmath" },
+ { name = "sphinxcontrib-qthelp" },
+ { name = "sphinxcontrib-serializinghtml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741 },
+]
+
+[[package]]
+name = "sphinx-rtd-theme"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "docutils" },
+ { name = "sphinx" },
+ { name = "sphinxcontrib-jquery" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561 },
+]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 },
+]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 },
+]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 },
+]
+
+[[package]]
+name = "sphinxcontrib-jquery"
+version = "4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sphinx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104 },
+]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 },
+]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 },
+]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 },
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "2.0.38"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e4/08/9a90962ea72acd532bda71249a626344d855c4032603924b1b547694b837/sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb", size = 9634782 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/f8/6d0424af1442c989b655a7b5f608bc2ae5e4f94cdf6df9f6054f629dc587/SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3", size = 2104927 },
+ { url = "https://files.pythonhosted.org/packages/25/80/fc06e65fca0a19533e2bfab633a5633ed8b6ee0b9c8d580acf84609ce4da/SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32", size = 2095317 },
+ { url = "https://files.pythonhosted.org/packages/98/2d/5d66605f76b8e344813237dc160a01f03b987201e974b46056a7fb94a874/SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e", size = 3244735 },
+ { url = "https://files.pythonhosted.org/packages/73/8d/b0539e8dce90861efc38fea3eefb15a5d0cfeacf818614762e77a9f192f9/SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e", size = 3255581 },
+ { url = "https://files.pythonhosted.org/packages/ac/a5/94e1e44bf5bdffd1782807fcc072542b110b950f0be53f49e68b5f5eca1b/SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579", size = 3190877 },
+ { url = "https://files.pythonhosted.org/packages/91/13/f08b09996dce945aec029c64f61c13b4788541ac588d9288e31e0d3d8850/SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd", size = 3217485 },
+ { url = "https://files.pythonhosted.org/packages/13/8f/8cfe2ba5ba6d8090f4de0e658330c53be6b7bf430a8df1b141c2b180dcdf/SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725", size = 2075254 },
+ { url = "https://files.pythonhosted.org/packages/c2/5c/e3c77fae41862be1da966ca98eec7fbc07cdd0b00f8b3e1ef2a13eaa6cca/SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d", size = 2100865 },
+ { url = "https://files.pythonhosted.org/packages/21/77/caa875a1f5a8a8980b564cc0e6fee1bc992d62d29101252561d0a5e9719c/SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd", size = 2100201 },
+ { url = "https://files.pythonhosted.org/packages/f4/ec/94bb036ec78bf9a20f8010c807105da9152dd84f72e8c51681ad2f30b3fd/SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b", size = 2090678 },
+ { url = "https://files.pythonhosted.org/packages/7b/61/63ff1893f146e34d3934c0860209fdd3925c25ee064330e6c2152bacc335/SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727", size = 3177107 },
+ { url = "https://files.pythonhosted.org/packages/a9/4f/b933bea41a602b5f274065cc824fae25780ed38664d735575192490a021b/SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096", size = 3190435 },
+ { url = "https://files.pythonhosted.org/packages/f5/23/9e654b4059e385988de08c5d3b38a369ea042f4c4d7c8902376fd737096a/SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a", size = 3123648 },
+ { url = "https://files.pythonhosted.org/packages/83/59/94c6d804e76ebc6412a08d2b086a8cb3e5a056cd61508e18ddaf3ec70100/SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86", size = 3151789 },
+ { url = "https://files.pythonhosted.org/packages/b2/27/17f143013aabbe1256dce19061eafdce0b0142465ce32168cdb9a18c04b1/SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120", size = 2073023 },
+ { url = "https://files.pythonhosted.org/packages/e2/3e/259404b03c3ed2e7eee4c179e001a07d9b61070334be91124cf4ad32eec7/SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda", size = 2096908 },
+ { url = "https://files.pythonhosted.org/packages/aa/e4/592120713a314621c692211eba034d09becaf6bc8848fabc1dc2a54d8c16/SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753", size = 1896347 },
+]
+
+[[package]]
+name = "sse-starlette"
+version = "2.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "starlette" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 },
+]
+
+[[package]]
+name = "starlette"
+version = "0.46.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/44/b6/fb9a32e3c5d59b1e383c357534c63c2d3caa6f25bf3c59dd89d296ecbaec/starlette-0.46.0.tar.gz", hash = "sha256:b359e4567456b28d473d0193f34c0de0ed49710d75ef183a74a5ce0499324f50", size = 2575568 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/41/94/8af675a62e3c91c2dee47cf92e602cfac86e8767b1a1ac3caf1b327c2ab0/starlette-0.46.0-py3-none-any.whl", hash = "sha256:913f0798bd90ba90a9156383bcf1350a17d6259451d0d8ee27fc0cf2db609038", size = 71991 },
+]
+
+[[package]]
+name = "swebench"
+version = "3.0.15"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beautifulsoup4" },
+ { name = "chardet" },
+ { name = "datasets" },
+ { name = "docker" },
+ { name = "ghapi" },
+ { name = "gitpython" },
+ { name = "modal" },
+ { name = "pre-commit" },
+ { name = "python-dotenv" },
+ { name = "requests" },
+ { name = "rich" },
+ { name = "tenacity" },
+ { name = "tqdm" },
+ { name = "unidiff" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/05/c163c2ee93f306110b27ddcdc7800ca1932c7489a35973e11c113d64d767/swebench-3.0.15.tar.gz", hash = "sha256:24e734fbcce34082665a25719075e6899382b7135103dd8c6cc09a6e23789101", size = 108523 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/6c/febe6bb4398e03aa48d50c555b36d2ac26b2e6d3c427ff9dba499b2557a2/swebench-3.0.15-py3-none-any.whl", hash = "sha256:dd694356f9c155a55d3d2e113fe58446f7385eea0574230af5e2504426f8b85b", size = 125151 },
+]
+
+[[package]]
+name = "swebench-agent-run"
+version = "0.1.0"
+source = { editable = "." }
+dependencies = [
+ { name = "click" },
+ { name = "codegen" },
+ { name = "modal" },
+ { name = "swebench" },
+ { name = "tenacity" },
+ { name = "tqdm" },
+]
+
+[package.optional-dependencies]
+all = [
+ { name = "mypy" },
+ { name = "psycopg2-binary" },
+ { name = "ruff" },
+]
+dev = [
+ { name = "mypy" },
+ { name = "ruff" },
+]
+metrics = [
+ { name = "psycopg2-binary" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "click", specifier = ">=8.1.0" },
+ { name = "codegen", directory = "../../../" },
+ { name = "modal", specifier = ">=0.73.25" },
+ { name = "mypy", marker = "extra == 'dev'" },
+ { name = "psycopg2-binary", marker = "extra == 'metrics'" },
+ { name = "ruff", marker = "extra == 'dev'" },
+ { name = "swebench", specifier = ">=3.0.15" },
+ { name = "swebench-agent-run", extras = ["metrics", "dev"], marker = "extra == 'all'" },
+ { name = "tenacity", specifier = ">=9.0.0" },
+ { name = "tqdm", specifier = ">=4.66.0" },
+]
+
+[[package]]
+name = "synchronicity"
+version = "0.9.11"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sigtools" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b5/52/f34a9ab6d514e0808d0f572affb360411d596b3439107318c00889277dd6/synchronicity-0.9.11.tar.gz", hash = "sha256:cb5dbbcb43d637e516ae50db05a776da51a705d1e1a9c0e301f6049afc3c2cae", size = 50323 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f2/d5/7675cd9b8e18f05b9ea261acad5d197fcb8027d2a65b1a750427ec084593/synchronicity-0.9.11-py3-none-any.whl", hash = "sha256:231129654d2f56b1aa148e85ebd8545231be135771f6d2196d414175b1594ef6", size = 36827 },
+]
+
+[[package]]
+name = "tabulate"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 },
+]
+
+[[package]]
+name = "tenacity"
+version = "9.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 },
+]
+
+[[package]]
+name = "termcolor"
+version = "2.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/37/72/88311445fd44c455c7d553e61f95412cf89054308a1aa2434ab835075fc5/termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f", size = 13057 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/be/df630c387a0a054815d60be6a97eb4e8f17385d5d6fe660e1c02750062b4/termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8", size = 7755 },
+]
+
+[[package]]
+name = "tiktoken"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "regex" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 },
+ { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 },
+ { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 },
+ { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 },
+ { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 },
+ { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 },
+ { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919 },
+ { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877 },
+ { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095 },
+ { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649 },
+ { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465 },
+ { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669 },
+]
+
+[[package]]
+name = "toml"
+version = "0.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 },
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.13.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 },
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 },
+]
+
+[[package]]
+name = "tree-sitter"
+version = "0.24.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a7/a2/698b9d31d08ad5558f8bfbfe3a0781bd4b1f284e89bde3ad18e05101a892/tree-sitter-0.24.0.tar.gz", hash = "sha256:abd95af65ca2f4f7eca356343391ed669e764f37748b5352946f00f7fc78e734", size = 168304 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e9/57/3a590f287b5aa60c07d5545953912be3d252481bf5e178f750db75572bff/tree_sitter-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:14beeff5f11e223c37be7d5d119819880601a80d0399abe8c738ae2288804afc", size = 140788 },
+ { url = "https://files.pythonhosted.org/packages/61/0b/fc289e0cba7dbe77c6655a4dd949cd23c663fd62a8b4d8f02f97e28d7fe5/tree_sitter-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26a5b130f70d5925d67b47db314da209063664585a2fd36fa69e0717738efaf4", size = 133945 },
+ { url = "https://files.pythonhosted.org/packages/86/d7/80767238308a137e0b5b5c947aa243e3c1e3e430e6d0d5ae94b9a9ffd1a2/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fc5c3c26d83c9d0ecb4fc4304fba35f034b7761d35286b936c1db1217558b4e", size = 564819 },
+ { url = "https://files.pythonhosted.org/packages/bf/b3/6c5574f4b937b836601f5fb556b24804b0a6341f2eb42f40c0e6464339f4/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:772e1bd8c0931c866b848d0369b32218ac97c24b04790ec4b0e409901945dd8e", size = 579303 },
+ { url = "https://files.pythonhosted.org/packages/0a/f4/bd0ddf9abe242ea67cca18a64810f8af230fc1ea74b28bb702e838ccd874/tree_sitter-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:24a8dd03b0d6b8812425f3b84d2f4763322684e38baf74e5bb766128b5633dc7", size = 581054 },
+ { url = "https://files.pythonhosted.org/packages/8c/1c/ff23fa4931b6ef1bbeac461b904ca7e49eaec7e7e5398584e3eef836ec96/tree_sitter-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9e8b1605ab60ed43803100f067eed71b0b0e6c1fb9860a262727dbfbbb74751", size = 120221 },
+ { url = "https://files.pythonhosted.org/packages/b2/2a/9979c626f303177b7612a802237d0533155bf1e425ff6f73cc40f25453e2/tree_sitter-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:f733a83d8355fc95561582b66bbea92ffd365c5d7a665bc9ebd25e049c2b2abb", size = 108234 },
+ { url = "https://files.pythonhosted.org/packages/61/cd/2348339c85803330ce38cee1c6cbbfa78a656b34ff58606ebaf5c9e83bd0/tree_sitter-0.24.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d4a6416ed421c4210f0ca405a4834d5ccfbb8ad6692d4d74f7773ef68f92071", size = 140781 },
+ { url = "https://files.pythonhosted.org/packages/8b/a3/1ea9d8b64e8dcfcc0051028a9c84a630301290995cd6e947bf88267ef7b1/tree_sitter-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e0992d483677e71d5c5d37f30dfb2e3afec2f932a9c53eec4fca13869b788c6c", size = 133928 },
+ { url = "https://files.pythonhosted.org/packages/fe/ae/55c1055609c9428a4aedf4b164400ab9adb0b1bf1538b51f4b3748a6c983/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57277a12fbcefb1c8b206186068d456c600dbfbc3fd6c76968ee22614c5cd5ad", size = 564497 },
+ { url = "https://files.pythonhosted.org/packages/ce/d0/f2ffcd04882c5aa28d205a787353130cbf84b2b8a977fd211bdc3b399ae3/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25fa22766d63f73716c6fec1a31ee5cf904aa429484256bd5fdf5259051ed74", size = 578917 },
+ { url = "https://files.pythonhosted.org/packages/af/82/aebe78ea23a2b3a79324993d4915f3093ad1af43d7c2208ee90be9273273/tree_sitter-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d5d9537507e1c8c5fa9935b34f320bfec4114d675e028f3ad94f11cf9db37b9", size = 581148 },
+ { url = "https://files.pythonhosted.org/packages/a1/b4/6b0291a590c2b0417cfdb64ccb8ea242f270a46ed429c641fbc2bfab77e0/tree_sitter-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:f58bb4956917715ec4d5a28681829a8dad5c342cafd4aea269f9132a83ca9b34", size = 120207 },
+ { url = "https://files.pythonhosted.org/packages/a8/18/542fd844b75272630229c9939b03f7db232c71a9d82aadc59c596319ea6a/tree_sitter-0.24.0-cp313-cp313-win_arm64.whl", hash = "sha256:23641bd25dcd4bb0b6fa91b8fb3f46cc9f1c9f475efe4d536d3f1f688d1b84c8", size = 108232 },
+]
+
+[[package]]
+name = "tree-sitter-javascript"
+version = "0.23.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/dc/1c55c33cc6bbe754359b330534cf9f261c1b9b2c26ddf23aef3c5fa67759/tree_sitter_javascript-0.23.1.tar.gz", hash = "sha256:b2059ce8b150162cda05a457ca3920450adbf915119c04b8c67b5241cd7fcfed", size = 110058 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/d3/c67d7d49967344b51208ad19f105233be1afdf07d3dcb35b471900265227/tree_sitter_javascript-0.23.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6ca583dad4bd79d3053c310b9f7208cd597fd85f9947e4ab2294658bb5c11e35", size = 59333 },
+ { url = "https://files.pythonhosted.org/packages/a5/db/ea0ee1547679d1750e80a0c4bc60b3520b166eeaf048764cfdd1ba3fd5e5/tree_sitter_javascript-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:94100e491a6a247aa4d14caf61230c171b6376c863039b6d9cd71255c2d815ec", size = 61071 },
+ { url = "https://files.pythonhosted.org/packages/67/6e/07c4857e08be37bfb55bfb269863df8ec908b2f6a3f1893cd852b893ecab/tree_sitter_javascript-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a6bc1055b061c5055ec58f39ee9b2e9efb8e6e0ae970838af74da0afb811f0a", size = 96999 },
+ { url = "https://files.pythonhosted.org/packages/5f/f5/4de730afe8b9422845bc2064020a8a8f49ebd1695c04261c38d1b3e3edec/tree_sitter_javascript-0.23.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:056dc04fb6b24293f8c5fec43c14e7e16ba2075b3009c643abf8c85edc4c7c3c", size = 94020 },
+ { url = "https://files.pythonhosted.org/packages/77/0a/f980520da86c4eff8392867840a945578ef43372c9d4a37922baa6b121fe/tree_sitter_javascript-0.23.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a11ca1c0f736da42967586b568dff8a465ee148a986c15ebdc9382806e0ce871", size = 92927 },
+ { url = "https://files.pythonhosted.org/packages/ff/5c/36a98d512aa1d1082409d6b7eda5d26b820bd4477a54100ad9f62212bc55/tree_sitter_javascript-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:041fa22b34250ea6eb313d33104d5303f79504cb259d374d691e38bbdc49145b", size = 58824 },
+ { url = "https://files.pythonhosted.org/packages/dc/79/ceb21988e6de615355a63eebcf806cd2a0fe875bec27b429d58b63e7fb5f/tree_sitter_javascript-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:eb28130cd2fb30d702d614cbf61ef44d1c7f6869e7d864a9cc17111e370be8f7", size = 57027 },
+]
+
+[[package]]
+name = "tree-sitter-python"
+version = "0.23.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1c/30/6766433b31be476fda6569a3a374c2220e45ffee0bff75460038a57bf23b/tree_sitter_python-0.23.6.tar.gz", hash = "sha256:354bfa0a2f9217431764a631516f85173e9711af2c13dbd796a8815acfe505d9", size = 155868 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ab/67/577a02acae5f776007c924ca86ef14c19c12e71de0aa9d2a036f3c248e7b/tree_sitter_python-0.23.6-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:28fbec8f74eeb2b30292d97715e60fac9ccf8a8091ce19b9d93e9b580ed280fb", size = 74361 },
+ { url = "https://files.pythonhosted.org/packages/d2/a6/194b3625a7245c532ad418130d63077ce6cd241152524152f533e4d6edb0/tree_sitter_python-0.23.6-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:680b710051b144fedf61c95197db0094f2245e82551bf7f0c501356333571f7a", size = 76436 },
+ { url = "https://files.pythonhosted.org/packages/d0/62/1da112689d6d282920e62c40e67ab39ea56463b0e7167bfc5e81818a770e/tree_sitter_python-0.23.6-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a9dcef55507b6567207e8ee0a6b053d0688019b47ff7f26edc1764b7f4dc0a4", size = 112060 },
+ { url = "https://files.pythonhosted.org/packages/5d/62/c9358584c96e38318d69b6704653684fd8467601f7b74e88aa44f4e6903f/tree_sitter_python-0.23.6-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29dacdc0cd2f64e55e61d96c6906533ebb2791972bec988450c46cce60092f5d", size = 112338 },
+ { url = "https://files.pythonhosted.org/packages/1a/58/c5e61add45e34fb8ecbf057c500bae9d96ed7c9ca36edb7985da8ae45526/tree_sitter_python-0.23.6-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7e048733c36f564b379831689006801feb267d8194f9e793fbb395ef1723335d", size = 109382 },
+ { url = "https://files.pythonhosted.org/packages/e9/f3/9b30893cae9b3811fe652dc6f90aaadfda12ae0b2757f5722fc7266f423c/tree_sitter_python-0.23.6-cp39-abi3-win_amd64.whl", hash = "sha256:a24027248399fb41594b696f929f9956828ae7cc85596d9f775e6c239cd0c2be", size = 75904 },
+ { url = "https://files.pythonhosted.org/packages/87/cb/ce35a65f83a47b510d8a2f1eddf3bdbb0d57aabc87351c8788caf3309f76/tree_sitter_python-0.23.6-cp39-abi3-win_arm64.whl", hash = "sha256:71334371bd73d5fe080aed39fbff49ed8efb9506edebe16795b0c7567ed6a272", size = 73649 },
+]
+
+[[package]]
+name = "tree-sitter-typescript"
+version = "0.23.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/1e/fc/bb52958f7e399250aee093751e9373a6311cadbe76b6e0d109b853757f35/tree_sitter_typescript-0.23.2.tar.gz", hash = "sha256:7b167b5827c882261cb7a50dfa0fb567975f9b315e87ed87ad0a0a3aedb3834d", size = 773053 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/28/95/4c00680866280e008e81dd621fd4d3f54aa3dad1b76b857a19da1b2cc426/tree_sitter_typescript-0.23.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3cd752d70d8e5371fdac6a9a4df9d8924b63b6998d268586f7d374c9fba2a478", size = 286677 },
+ { url = "https://files.pythonhosted.org/packages/8f/2f/1f36fda564518d84593f2740d5905ac127d590baf5c5753cef2a88a89c15/tree_sitter_typescript-0.23.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:c7cc1b0ff5d91bac863b0e38b1578d5505e718156c9db577c8baea2557f66de8", size = 302008 },
+ { url = "https://files.pythonhosted.org/packages/96/2d/975c2dad292aa9994f982eb0b69cc6fda0223e4b6c4ea714550477d8ec3a/tree_sitter_typescript-0.23.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b1eed5b0b3a8134e86126b00b743d667ec27c63fc9de1b7bb23168803879e31", size = 351987 },
+ { url = "https://files.pythonhosted.org/packages/49/d1/a71c36da6e2b8a4ed5e2970819b86ef13ba77ac40d9e333cb17df6a2c5db/tree_sitter_typescript-0.23.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e96d36b85bcacdeb8ff5c2618d75593ef12ebaf1b4eace3477e2bdb2abb1752c", size = 344960 },
+ { url = "https://files.pythonhosted.org/packages/7f/cb/f57b149d7beed1a85b8266d0c60ebe4c46e79c9ba56bc17b898e17daf88e/tree_sitter_typescript-0.23.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8d4f0f9bcb61ad7b7509d49a1565ff2cc363863644a234e1e0fe10960e55aea0", size = 340245 },
+ { url = "https://files.pythonhosted.org/packages/8b/ab/dd84f0e2337296a5f09749f7b5483215d75c8fa9e33738522e5ed81f7254/tree_sitter_typescript-0.23.2-cp39-abi3-win_amd64.whl", hash = "sha256:3f730b66396bc3e11811e4465c41ee45d9e9edd6de355a58bbbc49fa770da8f9", size = 278015 },
+ { url = "https://files.pythonhosted.org/packages/9f/e4/81f9a935789233cf412a0ed5fe04c883841d2c8fb0b7e075958a35c65032/tree_sitter_typescript-0.23.2-cp39-abi3-win_arm64.whl", hash = "sha256:05db58f70b95ef0ea126db5560f3775692f609589ed6f8dd0af84b7f19f1cbb7", size = 274052 },
+]
+
+[[package]]
+name = "trove-classifiers"
+version = "2025.3.3.18"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/08/e9/eb59303bac7aca949c4a4b0fa03a9b270be165d303a84cf2733d35a840ce/trove_classifiers-2025.3.3.18.tar.gz", hash = "sha256:3ffcfa90a428adfde1a5d90e3aa1b87fe474c5dbdbf5ccbca74ed69ba83c5ca7", size = 16239 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/bf/44195f3d9c3c4fe4cccf1c261c80d50781b9e8a0a6febf084c09c66740ff/trove_classifiers-2025.3.3.18-py3-none-any.whl", hash = "sha256:215630da61cf8757c373f81b602fc1283ec5a691cf12c5f9f96f11d6ad5fc7f2", size = 13629 },
+]
+
+[[package]]
+name = "typer"
+version = "0.15.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "rich" },
+ { name = "shellingham" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 },
+]
+
+[[package]]
+name = "types-certifi"
+version = "2021.10.8.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/68/943c3aeaf14624712a0357c4a67814dba5cea36d194f5c764dad7959a00c/types-certifi-2021.10.8.3.tar.gz", hash = "sha256:72cf7798d165bc0b76e1c10dd1ea3097c7063c42c21d664523b928e88b554a4f", size = 2095 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b5/63/2463d89481e811f007b0e1cd0a91e52e141b47f9de724d20db7b861dcfec/types_certifi-2021.10.8.3-py3-none-any.whl", hash = "sha256:b2d1e325e69f71f7c78e5943d410e650b4707bb0ef32e4ddf3da37f54176e88a", size = 2136 },
+]
+
+[[package]]
+name = "types-toml"
+version = "0.10.8.20240310"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/86/47/3e4c75042792bff8e90d7991aa5c51812cc668828cc6cce711e97f63a607/types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331", size = 4392 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/da/a2/d32ab58c0b216912638b140ab2170ee4b8644067c293b170e19fba340ccc/types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d", size = 4777 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
+]
+
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 },
+]
+
+[[package]]
+name = "tzdata"
+version = "2025.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 },
+]
+
+[[package]]
+name = "unidiff"
+version = "0.7.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a3/48/81be0ac96e423a877754153699731ef439fd7b80b4c8b5425c94ed079ebd/unidiff-0.7.5.tar.gz", hash = "sha256:2e5f0162052248946b9f0970a40e9e124236bf86c82b70821143a6fc1dea2574", size = 20931 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/54/57c411a6e8f7bd7848c8b66e4dcaffa586bf4c02e63f2280db0327a4e6eb/unidiff-0.7.5-py2.py3-none-any.whl", hash = "sha256:c93bf2265cc1ba2a520e415ab05da587370bc2a3ae9e0414329f54f0c2fc09e8", size = 14386 },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.34.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "h11" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 },
+]
+
+[package.optional-dependencies]
+standard = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "httptools" },
+ { name = "python-dotenv" },
+ { name = "pyyaml" },
+ { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
+ { name = "watchfiles" },
+ { name = "websockets" },
+]
+
+[[package]]
+name = "uvloop"
+version = "0.21.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 },
+ { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 },
+ { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 },
+ { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 },
+ { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 },
+ { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 },
+ { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 },
+ { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 },
+ { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 },
+ { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 },
+ { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 },
+ { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 },
+]
+
+[[package]]
+name = "virtualenv"
+version = "20.29.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "distlib" },
+ { name = "filelock" },
+ { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f1/88/dacc875dd54a8acadb4bcbfd4e3e86df8be75527116c91d8f9784f5e9cab/virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728", size = 4320272 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/93/fa/849483d56773ae29740ae70043ad88e068f98a6401aa819b5d6bee604683/virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a", size = 4301478 },
+]
+
+[[package]]
+name = "watchfiles"
+version = "1.0.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 },
+ { url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 },
+ { url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 },
+ { url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 },
+ { url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 },
+ { url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 },
+ { url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 },
+ { url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 },
+ { url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 },
+ { url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 },
+ { url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 },
+ { url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 },
+ { url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 },
+ { url = "https://files.pythonhosted.org/packages/08/98/f03efabec64b5b1fa58c0daab25c68ef815b0f320e54adcacd0d6847c339/watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9", size = 390954 },
+ { url = "https://files.pythonhosted.org/packages/16/09/4dd49ba0a32a45813debe5fb3897955541351ee8142f586303b271a02b40/watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60", size = 381133 },
+ { url = "https://files.pythonhosted.org/packages/76/59/5aa6fc93553cd8d8ee75c6247763d77c02631aed21551a97d94998bf1dae/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407", size = 449516 },
+ { url = "https://files.pythonhosted.org/packages/4c/aa/df4b6fe14b6317290b91335b23c96b488d365d65549587434817e06895ea/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d", size = 454820 },
+ { url = "https://files.pythonhosted.org/packages/5e/71/185f8672f1094ce48af33252c73e39b48be93b761273872d9312087245f6/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d", size = 481550 },
+ { url = "https://files.pythonhosted.org/packages/85/d7/50ebba2c426ef1a5cb17f02158222911a2e005d401caf5d911bfca58f4c4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b", size = 518647 },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/4c009342e393c545d68987e8010b937f72f47937731225b2b29b7231428f/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590", size = 497547 },
+ { url = "https://files.pythonhosted.org/packages/0f/7c/1cf50b35412d5c72d63b2bf9a4fffee2e1549a245924960dd087eb6a6de4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902", size = 452179 },
+ { url = "https://files.pythonhosted.org/packages/d6/a9/3db1410e1c1413735a9a472380e4f431ad9a9e81711cda2aaf02b7f62693/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1", size = 614125 },
+ { url = "https://files.pythonhosted.org/packages/f2/e1/0025d365cf6248c4d1ee4c3d2e3d373bdd3f6aff78ba4298f97b4fad2740/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303", size = 611911 },
+ { url = "https://files.pythonhosted.org/packages/55/55/035838277d8c98fc8c917ac9beeb0cd6c59d675dc2421df5f9fcf44a0070/watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80", size = 271152 },
+ { url = "https://files.pythonhosted.org/packages/f0/e5/96b8e55271685ddbadc50ce8bc53aa2dff278fb7ac4c2e473df890def2dc/watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc", size = 285216 },
+]
+
+[[package]]
+name = "websockets"
+version = "15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2e/7a/8bc4d15af7ff30f7ba34f9a172063bfcee9f5001d7cef04bee800a658f33/websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab", size = 175574 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/1e/92c4547d7b2a93f848aedaf37e9054111bc00dc11bff4385ca3f80dbb412/websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f", size = 174709 },
+ { url = "https://files.pythonhosted.org/packages/9f/37/eae4830a28061ba552516d84478686b637cd9e57d6a90b45ad69e89cb0af/websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d", size = 172372 },
+ { url = "https://files.pythonhosted.org/packages/46/2f/b409f8b8aa9328d5a47f7a301a43319d540d70cf036d1e6443675978a988/websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276", size = 172607 },
+ { url = "https://files.pythonhosted.org/packages/d6/81/d7e2e4542d4b4df849b0110df1b1f94f2647b71ab4b65d672090931ad2bb/websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc", size = 182422 },
+ { url = "https://files.pythonhosted.org/packages/b6/91/3b303160938d123eea97f58be363f7dbec76e8c59d587e07b5bc257dd584/websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72", size = 181362 },
+ { url = "https://files.pythonhosted.org/packages/f2/8b/df6807f1ca339c567aba9a7ab03bfdb9a833f625e8d2b4fc7529e4c701de/websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d", size = 181787 },
+ { url = "https://files.pythonhosted.org/packages/21/37/e6d3d5ebb0ebcaf98ae84904205c9dcaf3e0fe93e65000b9f08631ed7309/websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab", size = 182058 },
+ { url = "https://files.pythonhosted.org/packages/c9/df/6aca296f2be4c638ad20908bb3d7c94ce7afc8d9b4b2b0780d1fc59b359c/websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99", size = 181434 },
+ { url = "https://files.pythonhosted.org/packages/88/f1/75717a982bab39bbe63c83f9df0e7753e5c98bab907eb4fb5d97fe5c8c11/websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc", size = 181431 },
+ { url = "https://files.pythonhosted.org/packages/e7/15/cee9e63ed9ac5bfc1a3ae8fc6c02c41745023c21eed622eef142d8fdd749/websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904", size = 175678 },
+ { url = "https://files.pythonhosted.org/packages/4e/00/993974c60f40faabb725d4dbae8b072ef73b4c4454bd261d3b1d34ace41f/websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa", size = 176119 },
+ { url = "https://files.pythonhosted.org/packages/12/23/be28dc1023707ac51768f848d28a946443041a348ee3a54abdf9f6283372/websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1", size = 174714 },
+ { url = "https://files.pythonhosted.org/packages/8f/ff/02b5e9fbb078e7666bf3d25c18c69b499747a12f3e7f2776063ef3fb7061/websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7", size = 172374 },
+ { url = "https://files.pythonhosted.org/packages/8e/61/901c8d4698e0477eff4c3c664d53f898b601fa83af4ce81946650ec2a4cb/websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081", size = 172605 },
+ { url = "https://files.pythonhosted.org/packages/d2/4b/dc47601a80dff317aecf8da7b4ab278d11d3494b2c373b493e4887561f90/websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9", size = 182380 },
+ { url = "https://files.pythonhosted.org/packages/83/f7/b155d2b38f05ed47a0b8de1c9ea245fcd7fc625d89f35a37eccba34b42de/websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b", size = 181325 },
+ { url = "https://files.pythonhosted.org/packages/d3/ff/040a20c01c294695cac0e361caf86f33347acc38f164f6d2be1d3e007d9f/websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f", size = 181763 },
+ { url = "https://files.pythonhosted.org/packages/cb/6a/af23e93678fda8341ac8775e85123425e45c608389d3514863c702896ea5/websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6", size = 182097 },
+ { url = "https://files.pythonhosted.org/packages/7e/3e/1069e159c30129dc03c01513b5830237e576f47cedb888777dd885cae583/websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375", size = 181485 },
+ { url = "https://files.pythonhosted.org/packages/9a/a7/c91c47103f1cd941b576bbc452601e9e01f67d5c9be3e0a9abe726491ab5/websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72", size = 181466 },
+ { url = "https://files.pythonhosted.org/packages/16/32/a4ca6e3d56c24aac46b0cf5c03b841379f6409d07fc2044b244f90f54105/websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c", size = 175673 },
+ { url = "https://files.pythonhosted.org/packages/c0/31/25a417a23e985b61ffa5544f9facfe4a118cb64d664c886f1244a8baeca5/websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8", size = 176115 },
+ { url = "https://files.pythonhosted.org/packages/e8/b2/31eec524b53f01cd8343f10a8e429730c52c1849941d1f530f8253b6d934/websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3", size = 169023 },
+]
+
+[[package]]
+name = "wrapt"
+version = "1.17.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 },
+ { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 },
+ { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 },
+ { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 },
+ { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 },
+ { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 },
+ { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 },
+ { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 },
+ { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 },
+ { url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672 },
+ { url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865 },
+ { url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800 },
+ { url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824 },
+ { url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920 },
+ { url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690 },
+ { url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861 },
+ { url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174 },
+ { url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721 },
+ { url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763 },
+ { url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585 },
+ { url = "https://files.pythonhosted.org/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676 },
+ { url = "https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871 },
+ { url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312 },
+ { url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062 },
+ { url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155 },
+ { url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471 },
+ { url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208 },
+ { url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339 },
+ { url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232 },
+ { url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476 },
+ { url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377 },
+ { url = "https://files.pythonhosted.org/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986 },
+ { url = "https://files.pythonhosted.org/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750 },
+ { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 },
+]
+
+[[package]]
+name = "xmltodict"
+version = "0.14.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981 },
+]
+
+[[package]]
+name = "xxhash"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/00/5e/d6e5258d69df8b4ed8c83b6664f2b47d30d2dec551a29ad72a6c69eafd31/xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f", size = 84241 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/0e/1bfce2502c57d7e2e787600b31c83535af83746885aa1a5f153d8c8059d6/xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00", size = 31969 },
+ { url = "https://files.pythonhosted.org/packages/3f/d6/8ca450d6fe5b71ce521b4e5db69622383d039e2b253e9b2f24f93265b52c/xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9", size = 30787 },
+ { url = "https://files.pythonhosted.org/packages/5b/84/de7c89bc6ef63d750159086a6ada6416cc4349eab23f76ab870407178b93/xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84", size = 220959 },
+ { url = "https://files.pythonhosted.org/packages/fe/86/51258d3e8a8545ff26468c977101964c14d56a8a37f5835bc0082426c672/xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793", size = 200006 },
+ { url = "https://files.pythonhosted.org/packages/02/0a/96973bd325412feccf23cf3680fd2246aebf4b789122f938d5557c54a6b2/xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be", size = 428326 },
+ { url = "https://files.pythonhosted.org/packages/11/a7/81dba5010f7e733de88af9555725146fc133be97ce36533867f4c7e75066/xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6", size = 194380 },
+ { url = "https://files.pythonhosted.org/packages/fb/7d/f29006ab398a173f4501c0e4977ba288f1c621d878ec217b4ff516810c04/xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90", size = 207934 },
+ { url = "https://files.pythonhosted.org/packages/8a/6e/6e88b8f24612510e73d4d70d9b0c7dff62a2e78451b9f0d042a5462c8d03/xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27", size = 216301 },
+ { url = "https://files.pythonhosted.org/packages/af/51/7862f4fa4b75a25c3b4163c8a873f070532fe5f2d3f9b3fc869c8337a398/xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2", size = 203351 },
+ { url = "https://files.pythonhosted.org/packages/22/61/8d6a40f288f791cf79ed5bb113159abf0c81d6efb86e734334f698eb4c59/xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d", size = 210294 },
+ { url = "https://files.pythonhosted.org/packages/17/02/215c4698955762d45a8158117190261b2dbefe9ae7e5b906768c09d8bc74/xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab", size = 414674 },
+ { url = "https://files.pythonhosted.org/packages/31/5c/b7a8db8a3237cff3d535261325d95de509f6a8ae439a5a7a4ffcff478189/xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e", size = 192022 },
+ { url = "https://files.pythonhosted.org/packages/78/e3/dd76659b2811b3fd06892a8beb850e1996b63e9235af5a86ea348f053e9e/xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8", size = 30170 },
+ { url = "https://files.pythonhosted.org/packages/d9/6b/1c443fe6cfeb4ad1dcf231cdec96eb94fb43d6498b4469ed8b51f8b59a37/xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e", size = 30040 },
+ { url = "https://files.pythonhosted.org/packages/0f/eb/04405305f290173acc0350eba6d2f1a794b57925df0398861a20fbafa415/xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2", size = 26796 },
+ { url = "https://files.pythonhosted.org/packages/c9/b8/e4b3ad92d249be5c83fa72916c9091b0965cb0faeff05d9a0a3870ae6bff/xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6", size = 31795 },
+ { url = "https://files.pythonhosted.org/packages/fc/d8/b3627a0aebfbfa4c12a41e22af3742cf08c8ea84f5cc3367b5de2d039cce/xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5", size = 30792 },
+ { url = "https://files.pythonhosted.org/packages/c3/cc/762312960691da989c7cd0545cb120ba2a4148741c6ba458aa723c00a3f8/xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc", size = 220950 },
+ { url = "https://files.pythonhosted.org/packages/fe/e9/cc266f1042c3c13750e86a535496b58beb12bf8c50a915c336136f6168dc/xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3", size = 199980 },
+ { url = "https://files.pythonhosted.org/packages/bf/85/a836cd0dc5cc20376de26b346858d0ac9656f8f730998ca4324921a010b9/xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c", size = 428324 },
+ { url = "https://files.pythonhosted.org/packages/b4/0e/15c243775342ce840b9ba34aceace06a1148fa1630cd8ca269e3223987f5/xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb", size = 194370 },
+ { url = "https://files.pythonhosted.org/packages/87/a1/b028bb02636dfdc190da01951d0703b3d904301ed0ef6094d948983bef0e/xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f", size = 207911 },
+ { url = "https://files.pythonhosted.org/packages/80/d5/73c73b03fc0ac73dacf069fdf6036c9abad82de0a47549e9912c955ab449/xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7", size = 216352 },
+ { url = "https://files.pythonhosted.org/packages/b6/2a/5043dba5ddbe35b4fe6ea0a111280ad9c3d4ba477dd0f2d1fe1129bda9d0/xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326", size = 203410 },
+ { url = "https://files.pythonhosted.org/packages/a2/b2/9a8ded888b7b190aed75b484eb5c853ddd48aa2896e7b59bbfbce442f0a1/xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf", size = 210322 },
+ { url = "https://files.pythonhosted.org/packages/98/62/440083fafbc917bf3e4b67c2ade621920dd905517e85631c10aac955c1d2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7", size = 414725 },
+ { url = "https://files.pythonhosted.org/packages/75/db/009206f7076ad60a517e016bb0058381d96a007ce3f79fa91d3010f49cc2/xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c", size = 192070 },
+ { url = "https://files.pythonhosted.org/packages/1f/6d/c61e0668943a034abc3a569cdc5aeae37d686d9da7e39cf2ed621d533e36/xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637", size = 30172 },
+ { url = "https://files.pythonhosted.org/packages/96/14/8416dce965f35e3d24722cdf79361ae154fa23e2ab730e5323aa98d7919e/xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43", size = 30041 },
+ { url = "https://files.pythonhosted.org/packages/27/ee/518b72faa2073f5aa8e3262408d284892cb79cf2754ba0c3a5870645ef73/xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b", size = 26801 },
+]
+
+[[package]]
+name = "yarl"
+version = "1.18.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "idna" },
+ { name = "multidict" },
+ { name = "propcache" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644 },
+ { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962 },
+ { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795 },
+ { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368 },
+ { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314 },
+ { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987 },
+ { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914 },
+ { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765 },
+ { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444 },
+ { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760 },
+ { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484 },
+ { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864 },
+ { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537 },
+ { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861 },
+ { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097 },
+ { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399 },
+ { url = "https://files.pythonhosted.org/packages/30/c7/c790513d5328a8390be8f47be5d52e141f78b66c6c48f48d241ca6bd5265/yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb", size = 140789 },
+ { url = "https://files.pythonhosted.org/packages/30/aa/a2f84e93554a578463e2edaaf2300faa61c8701f0898725842c704ba5444/yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa", size = 94144 },
+ { url = "https://files.pythonhosted.org/packages/c6/fc/d68d8f83714b221a85ce7866832cba36d7c04a68fa6a960b908c2c84f325/yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782", size = 91974 },
+ { url = "https://files.pythonhosted.org/packages/56/4e/d2563d8323a7e9a414b5b25341b3942af5902a2263d36d20fb17c40411e2/yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0", size = 333587 },
+ { url = "https://files.pythonhosted.org/packages/25/c9/cfec0bc0cac8d054be223e9f2c7909d3e8442a856af9dbce7e3442a8ec8d/yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482", size = 344386 },
+ { url = "https://files.pythonhosted.org/packages/ab/5d/4c532190113b25f1364d25f4c319322e86232d69175b91f27e3ebc2caf9a/yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186", size = 345421 },
+ { url = "https://files.pythonhosted.org/packages/23/d1/6cdd1632da013aa6ba18cee4d750d953104a5e7aac44e249d9410a972bf5/yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58", size = 339384 },
+ { url = "https://files.pythonhosted.org/packages/9a/c4/6b3c39bec352e441bd30f432cda6ba51681ab19bb8abe023f0d19777aad1/yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53", size = 326689 },
+ { url = "https://files.pythonhosted.org/packages/23/30/07fb088f2eefdc0aa4fc1af4e3ca4eb1a3aadd1ce7d866d74c0f124e6a85/yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2", size = 345453 },
+ { url = "https://files.pythonhosted.org/packages/63/09/d54befb48f9cd8eec43797f624ec37783a0266855f4930a91e3d5c7717f8/yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8", size = 341872 },
+ { url = "https://files.pythonhosted.org/packages/91/26/fd0ef9bf29dd906a84b59f0cd1281e65b0c3e08c6aa94b57f7d11f593518/yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1", size = 347497 },
+ { url = "https://files.pythonhosted.org/packages/d9/b5/14ac7a256d0511b2ac168d50d4b7d744aea1c1aa20c79f620d1059aab8b2/yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a", size = 359981 },
+ { url = "https://files.pythonhosted.org/packages/ca/b3/d493221ad5cbd18bc07e642894030437e405e1413c4236dd5db6e46bcec9/yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10", size = 366229 },
+ { url = "https://files.pythonhosted.org/packages/04/56/6a3e2a5d9152c56c346df9b8fb8edd2c8888b1e03f96324d457e5cf06d34/yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8", size = 360383 },
+ { url = "https://files.pythonhosted.org/packages/fd/b7/4b3c7c7913a278d445cc6284e59b2e62fa25e72758f888b7a7a39eb8423f/yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d", size = 310152 },
+ { url = "https://files.pythonhosted.org/packages/f5/d5/688db678e987c3e0fb17867970700b92603cadf36c56e5fb08f23e822a0c/yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c", size = 315723 },
+ { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 },
+]
+
+[[package]]
+name = "zstandard"
+version = "0.23.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "platform_python_implementation == 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713 },
+ { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459 },
+ { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707 },
+ { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545 },
+ { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533 },
+ { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510 },
+ { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973 },
+ { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968 },
+ { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179 },
+ { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577 },
+ { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899 },
+ { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964 },
+ { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398 },
+ { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313 },
+ { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877 },
+ { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595 },
+ { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975 },
+ { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448 },
+ { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269 },
+ { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228 },
+ { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891 },
+ { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310 },
+ { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912 },
+ { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946 },
+ { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994 },
+ { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681 },
+ { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239 },
+ { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149 },
+ { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392 },
+ { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299 },
+ { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862 },
+ { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578 },
+]
diff --git a/codegen-examples/pyproject.toml b/codegen-examples/pyproject.toml
index 80fb4aff8..11ec96b52 100644
--- a/codegen-examples/pyproject.toml
+++ b/codegen-examples/pyproject.toml
@@ -31,8 +31,6 @@ dev-dependencies = [
"deptry>=0.22.0",
]
-[tool.uv.workspace]
-members = ["examples/swebench_agent_run"]
[tool.pre-commit-uv]
requirements = ["strict-requirements"]
diff --git a/codegen-examples/uv.lock b/codegen-examples/uv.lock
index 138a09454..7c30ff898 100644
--- a/codegen-examples/uv.lock
+++ b/codegen-examples/uv.lock
@@ -1,5 +1,4 @@
version = 1
-revision = 1
requires-python = ">=3.12, <3.14"
resolution-markers = [
"python_full_version >= '3.12.4'",
@@ -488,6 +487,128 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5e/06/5ec9ae8bedb4a590e939a9064c7fee805e620a5e578f1bbf10dfd35c86d0/codegen-0.51.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_34_x86_64.whl", hash = "sha256:d3bdb1d29b4f910a041245bbc1df4c535827fe95986993991e824d172bd7f009", size = 2232761 },
]
+
+[package.metadata]
+requires-dist = [
+ { name = "astor", specifier = ">=0.8.1,<1.0.0" },
+ { name = "attrs", marker = "extra == 'lsp'", specifier = ">=25.1.0" },
+ { name = "click", specifier = ">=8.1.7" },
+ { name = "codeowners", specifier = ">=0.6.0,<1.0.0" },
+ { name = "colorlog", specifier = ">=6.9.0" },
+ { name = "dataclasses-json", specifier = ">=0.6.4,<1.0.0" },
+ { name = "datamodel-code-generator", specifier = ">=0.26.5" },
+ { name = "datasets" },
+ { name = "dicttoxml", specifier = ">=1.7.16,<2.0.0" },
+ { name = "docker", specifier = ">=6.1.3" },
+ { name = "docstring-parser", specifier = ">=0.16,<1.0" },
+ { name = "fastapi", extras = ["standard"], specifier = ">=0.115.2,<1.0.0" },
+ { name = "gitpython", specifier = "==3.1.44" },
+ { name = "giturlparse" },
+ { name = "hatch-vcs", specifier = ">=0.4.0" },
+ { name = "hatchling", specifier = ">=1.25.0" },
+ { name = "httpx", specifier = ">=0.28.1" },
+ { name = "humanize", specifier = ">=4.10.0,<5.0.0" },
+ { name = "langchain", extras = ["openai"] },
+ { name = "langchain-anthropic", specifier = ">=0.3.7" },
+ { name = "langchain-core" },
+ { name = "langchain-openai" },
+ { name = "langgraph" },
+ { name = "langgraph-prebuilt" },
+ { name = "langsmith" },
+ { name = "lazy-object-proxy", specifier = ">=0.0.0" },
+ { name = "lox", specifier = ">=0.12.0" },
+ { name = "lsprotocol", marker = "extra == 'lsp'", specifier = "==2024.0.0b1" },
+ { name = "mcp", extras = ["cli"] },
+ { name = "mini-racer", specifier = ">=0.12.4" },
+ { name = "modal", specifier = ">=0.73.45" },
+ { name = "neo4j" },
+ { name = "networkx", specifier = ">=3.4.1" },
+ { name = "numpy", specifier = ">=2.2.2" },
+ { name = "openai", specifier = "==1.65.2" },
+ { name = "packaging", specifier = ">=24.2" },
+ { name = "pip", specifier = ">=24.3.1" },
+ { name = "plotly", specifier = ">=5.24.0,<7.0.0" },
+ { name = "psutil", specifier = ">=5.8.0" },
+ { name = "pydantic", specifier = ">=2.9.2,<3.0.0" },
+ { name = "pydantic-core", specifier = ">=2.23.4" },
+ { name = "pydantic-settings", specifier = ">=2.0.0" },
+ { name = "pygit2", specifier = ">=1.16.0" },
+ { name = "pygithub", specifier = "==2.6.1" },
+ { name = "pygls", marker = "extra == 'lsp'", specifier = ">=2.0.0a2" },
+ { name = "pyinstrument", specifier = ">=5.0.0" },
+ { name = "pyjson5", specifier = "==1.6.8" },
+ { name = "pyright", specifier = ">=1.1.372,<2.0.0" },
+ { name = "pytest-snapshot", specifier = ">=0.9.0" },
+ { name = "python-dotenv", specifier = ">=1.0.1" },
+ { name = "python-levenshtein", specifier = ">=0.25.1,<1.0.0" },
+ { name = "python-semantic-release" },
+ { name = "requests", specifier = ">=2.32.3" },
+ { name = "rich", specifier = ">=13.7.1,<14.0.0" },
+ { name = "rich-click", specifier = ">=1.8.5" },
+ { name = "rustworkx", specifier = ">=0.15.1" },
+ { name = "sentry-sdk", specifier = "==2.22.0" },
+ { name = "slack-sdk" },
+ { name = "starlette", specifier = ">=0.16.0,<1.0.0" },
+ { name = "tabulate", specifier = ">=0.9.0,<1.0.0" },
+ { name = "termcolor", specifier = ">=2.4.0" },
+ { name = "tiktoken", specifier = ">=0.5.1,<1.0.0" },
+ { name = "tomlkit", specifier = ">=0.13.2" },
+ { name = "tqdm", specifier = ">=4.67.1" },
+ { name = "tree-sitter", specifier = ">=0.23.1" },
+ { name = "tree-sitter-javascript", specifier = ">=0.23.1" },
+ { name = "tree-sitter-python", specifier = ">=0.23.4" },
+ { name = "tree-sitter-typescript", specifier = ">=0.23.2" },
+ { name = "types-networkx", marker = "extra == 'types'", specifier = ">=3.2.1.20240918" },
+ { name = "types-requests", marker = "extra == 'types'", specifier = ">=2.32.0.20241016" },
+ { name = "types-tabulate", marker = "extra == 'types'", specifier = ">=0.9.0.20240106" },
+ { name = "types-toml", marker = "extra == 'types'", specifier = ">=0.10.8.20240310" },
+ { name = "typing-extensions", specifier = ">=4.12.2" },
+ { name = "unidiff", specifier = ">=0.7.5" },
+ { name = "urllib3", specifier = ">=2.0.0" },
+ { name = "uvicorn", extras = ["standard"], specifier = ">=0.30.0" },
+ { name = "watchfiles", specifier = ">=1.0.0,<1.1.0" },
+ { name = "wrapt", specifier = ">=1.16.0,<2.0.0" },
+ { name = "xmltodict", specifier = ">=0.13.0,<1.0.0" },
+]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "austin-dist", specifier = ">=3.7.0" },
+ { name = "austin-python", specifier = ">=1.7.1" },
+ { name = "autoflake", specifier = ">=2.3.1" },
+ { name = "black", specifier = ">=24.8.0" },
+ { name = "braintrust", specifier = ">=0.0.160" },
+ { name = "cibuildwheel", extras = ["uv"], specifier = ">=2.22.0" },
+ { name = "coverage", specifier = ">=7.6.1,<8.0.0" },
+ { name = "cython", specifier = ">=3.0.11" },
+ { name = "deptry", specifier = ">=0.22.0" },
+ { name = "emoji", specifier = ">=2.14.0" },
+ { name = "filelock", specifier = ">=3.15.4,<4.0.0" },
+ { name = "httpx", specifier = ">=0.28.1,<0.28.2" },
+ { name = "inflection", specifier = ">=0.5.1,<1.0.0" },
+ { name = "isort", specifier = ">=5.13.2" },
+ { name = "jsbeautifier", specifier = ">=1.15.1,<2.0.0" },
+ { name = "jupyterlab", specifier = ">=4.3.5" },
+ { name = "loguru", specifier = ">=0.7.3" },
+ { name = "modal", specifier = ">=0.73.25" },
+ { name = "mypy", extras = ["mypyc", "faster-cache"], specifier = ">=1.13.0" },
+ { name = "pre-commit", specifier = ">=4.0.1" },
+ { name = "pre-commit-uv", specifier = ">=4.1.4" },
+ { name = "pytest", specifier = ">=8.3.3" },
+ { name = "pytest-asyncio", specifier = ">=0.21.1,<1.0.0" },
+ { name = "pytest-benchmark", extras = ["histogram"], specifier = ">=5.1.0" },
+ { name = "pytest-cov", specifier = ">=6.0.0,<6.0.1" },
+ { name = "pytest-lsp", specifier = ">=1.0.0b1" },
+ { name = "pytest-mock", specifier = ">=3.14.0,<4.0.0" },
+ { name = "pytest-timeout", specifier = ">=2.3.1" },
+ { name = "pytest-xdist", specifier = ">=3.6.1,<4.0.0" },
+ { name = "ruff", specifier = ">=0.6.8" },
+ { name = "ruff-lsp", specifier = ">=0.0.55,<1.0.0" },
+ { name = "sybil", extras = ["pytest"], specifier = ">=9.0.0" },
+ { name = "typer", specifier = ">=0.12.5" },
+ { name = "uv", specifier = ">=0.4.25" },
+]
+
[[package]]
name = "codegen-examples"
version = "0.0.0"
@@ -2085,6 +2206,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 },
]
+[[package]]
+name = "mypy"
+version = "1.15.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 },
+ { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 },
+ { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 },
+ { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 },
+ { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 },
+ { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 },
+ { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 },
+ { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 },
+ { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 },
+ { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 },
+ { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 },
+ { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 },
+ { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 },
+]
+
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -2600,6 +2746,37 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 },
]
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 },
+ { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 },
+ { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 },
+ { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 },
+ { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 },
+ { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 },
+ { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 },
+ { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 },
+ { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 },
+ { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 },
+ { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 },
+ { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 },
+ { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699 },
+ { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245 },
+ { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631 },
+ { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140 },
+ { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762 },
+ { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967 },
+ { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326 },
+ { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712 },
+ { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155 },
+ { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356 },
+ { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224 },
+]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -3345,6 +3522,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/68/15/6d22d07e063ce5e9bfbd96db9ec2fbb4693591b4503e3a76996639474d02/rpds_py-0.23.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6f6512a90bd5cd9030a6237f5346f046c6f0e40af98657568fa45695d4de59d", size = 235415 },
]
+[[package]]
+name = "ruff"
+version = "0.9.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/c3/418441a8170e8d53d05c0b9dad69760dbc7b8a12c10dbe6db1e1205d2377/ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933", size = 3717448 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/c3/2c4afa9ba467555d074b146d9aed0633a56ccdb900839fb008295d037b89/ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367", size = 10027252 },
+ { url = "https://files.pythonhosted.org/packages/33/d1/439e58487cf9eac26378332e25e7d5ade4b800ce1eec7dc2cfc9b0d7ca96/ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7", size = 10840721 },
+ { url = "https://files.pythonhosted.org/packages/50/44/fead822c38281ba0122f1b76b460488a175a9bd48b130650a6fb6dbcbcf9/ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d", size = 10161439 },
+ { url = "https://files.pythonhosted.org/packages/11/ae/d404a2ab8e61ddf6342e09cc6b7f7846cce6b243e45c2007dbe0ca928a5d/ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a", size = 10336264 },
+ { url = "https://files.pythonhosted.org/packages/6a/4e/7c268aa7d84cd709fb6f046b8972313142cffb40dfff1d2515c5e6288d54/ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe", size = 9908774 },
+ { url = "https://files.pythonhosted.org/packages/cc/26/c618a878367ef1b76270fd027ca93692657d3f6122b84ba48911ef5f2edc/ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c", size = 11428127 },
+ { url = "https://files.pythonhosted.org/packages/d7/9a/c5588a93d9bfed29f565baf193fe802fa676a0c837938137ea6cf0576d8c/ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be", size = 12133187 },
+ { url = "https://files.pythonhosted.org/packages/3e/ff/e7980a7704a60905ed7e156a8d73f604c846d9bd87deda9cabfa6cba073a/ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590", size = 11602937 },
+ { url = "https://files.pythonhosted.org/packages/24/78/3690444ad9e3cab5c11abe56554c35f005b51d1d118b429765249095269f/ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb", size = 13771698 },
+ { url = "https://files.pythonhosted.org/packages/6e/bf/e477c2faf86abe3988e0b5fd22a7f3520e820b2ee335131aca2e16120038/ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0", size = 11249026 },
+ { url = "https://files.pythonhosted.org/packages/f7/82/cdaffd59e5a8cb5b14c408c73d7a555a577cf6645faaf83e52fe99521715/ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17", size = 10220432 },
+ { url = "https://files.pythonhosted.org/packages/fe/a4/2507d0026225efa5d4412b6e294dfe54725a78652a5c7e29e6bd0fc492f3/ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1", size = 9874602 },
+ { url = "https://files.pythonhosted.org/packages/d5/be/f3aab1813846b476c4bcffe052d232244979c3cd99d751c17afb530ca8e4/ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57", size = 10851212 },
+ { url = "https://files.pythonhosted.org/packages/8b/45/8e5fd559bea0d2f57c4e12bf197a2fade2fac465aa518284f157dfbca92b/ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e", size = 11327490 },
+ { url = "https://files.pythonhosted.org/packages/42/55/e6c90f13880aeef327746052907e7e930681f26a164fe130ddac28b08269/ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1", size = 10227912 },
+ { url = "https://files.pythonhosted.org/packages/35/b2/da925693cb82a1208aa34966c0f36cb222baca94e729dd22a587bc22d0f3/ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1", size = 11355632 },
+ { url = "https://files.pythonhosted.org/packages/31/d8/de873d1c1b020d668d8ec9855d390764cb90cf8f6486c0983da52be8b7b7/ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf", size = 10435860 },
+]
+
[[package]]
name = "rustworkx"
version = "0.16.0"
@@ -3547,13 +3749,39 @@ wheels = [
[[package]]
name = "swebench-agent-run"
version = "0.1.0"
-source = { virtual = "examples/swebench_agent_run" }
+source = { editable = "examples/swebench_agent_run" }
dependencies = [
+ { name = "click" },
+ { name = "codegen" },
{ name = "modal" },
+ { name = "tqdm" },
+]
+
+[package.optional-dependencies]
+all = [
+ { name = "mypy" },
+ { name = "psycopg2-binary" },
+ { name = "ruff" },
+]
+dev = [
+ { name = "mypy" },
+ { name = "ruff" },
+]
+metrics = [
+ { name = "psycopg2-binary" },
]
[package.metadata]
-requires-dist = [{ name = "modal", specifier = ">=0.73.25" }]
+requires-dist = [
+ { name = "click", specifier = ">=8.1.0" },
+ { name = "codegen", directory = "../" },
+ { name = "modal", specifier = ">=0.73.25" },
+ { name = "mypy", marker = "extra == 'dev'" },
+ { name = "psycopg2-binary", marker = "extra == 'metrics'" },
+ { name = "ruff", marker = "extra == 'dev'" },
+ { name = "swebench-agent-run", extras = ["metrics", "dev"], marker = "extra == 'all'" },
+ { name = "tqdm", specifier = ">=4.66.0" },
+]
[[package]]
name = "synchronicity"
diff --git a/codegen-on-oss/.dockerignore b/codegen-on-oss/.dockerignore
new file mode 100644
index 000000000..7b435ab1e
--- /dev/null
+++ b/codegen-on-oss/.dockerignore
@@ -0,0 +1,5 @@
+.git/
+repositories/
+.venv/
+.vscode/
+output/
diff --git a/codegen-on-oss/.gitignore b/codegen-on-oss/.gitignore
new file mode 100644
index 000000000..780eabf11
--- /dev/null
+++ b/codegen-on-oss/.gitignore
@@ -0,0 +1,140 @@
+docs/source
+
+# From https://raw.githubusercontent.com/github/gitignore/main/Python.gitignore
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# Vscode config files
+.vscode/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/codegen-on-oss/Dockerfile b/codegen-on-oss/Dockerfile
new file mode 100644
index 000000000..458758a84
--- /dev/null
+++ b/codegen-on-oss/Dockerfile
@@ -0,0 +1,34 @@
+# Install uv
+FROM python:3.12-slim AS installer
+COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv
+
+# Change the working directory to the `app` directory
+WORKDIR /app
+
+# Copy the lockfile and `pyproject.toml` into the image
+COPY uv.lock /app/uv.lock
+COPY pyproject.toml /app/pyproject.toml
+
+# Install dependencies
+RUN apt-get update && apt-get install -y git \
+ && uv sync --frozen --no-install-project \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+
+FROM python:3.12-slim
+
+ENV PATH="/venv/bin:/app/scripts:$PATH"
+# Copy the project into the image
+COPY --from=installer /app/.venv/ /venv
+
+RUN apt-get update && apt-get install -y postgresql-client \
+ && rm -rf /var/lib/apt/lists/* \
+ && apt-get clean
+
+
+WORKDIR /app
+
+COPY . .
+
+CMD ["python", "modal_run.py"]
diff --git a/codegen-on-oss/Makefile b/codegen-on-oss/Makefile
new file mode 100644
index 000000000..dba86014c
--- /dev/null
+++ b/codegen-on-oss/Makefile
@@ -0,0 +1,38 @@
+.PHONY: install
+install: ## Install the virtual environment and install the pre-commit hooks
+ @echo "🚀 Creating virtual environment using uv"
+ @uv sync
+ @uv run pre-commit install
+
+.PHONY: check
+check: ## Run code quality tools.
+ @echo "🚀 Checking lock file consistency with 'pyproject.toml'"
+ @uv lock --locked
+ @echo "🚀 Linting code: Running pre-commit"
+ @uv run pre-commit run -a
+ @echo "🚀 Static type checking: Running mypy"
+ @uv run mypy
+ @echo "🚀 Checking for obsolete dependencies: Running deptry"
+ @uv run deptry .
+
+.PHONY: test
+test: ## Test the code with pytest
+ @echo "🚀 Testing code: Running pytest"
+ @uv run python -m pytest --cov --cov-config=pyproject.toml --cov-report=xml
+
+.PHONY: build
+build: clean-build ## Build wheel file
+ @echo "🚀 Creating wheel file"
+ @uvx --from build pyproject-build --installer uv
+
+.PHONY: clean-build
+clean-build: ## Clean build artifacts
+ @echo "🚀 Removing build artifacts"
+ @uv run python -c "import shutil; import os; shutil.rmtree('dist') if os.path.exists('dist') else None"
+
+.PHONY: help
+help:
+ @uv run python -c "import re; \
+ [[print(f'\033[36m{m[0]:<20}\033[0m {m[1]}') for m in re.findall(r'^([a-zA-Z_-]+):.*?## (.*)$$', open(makefile).read(), re.M)] for makefile in ('$(MAKEFILE_LIST)').strip().split()]"
+
+.DEFAULT_GOAL := help
diff --git a/codegen-on-oss/README.md b/codegen-on-oss/README.md
new file mode 100644
index 000000000..a7700eb77
--- /dev/null
+++ b/codegen-on-oss/README.md
@@ -0,0 +1,337 @@
+# Overview
+
+The **Codegen on OSS** package provides a modular pipeline that:
+
+- **Collects repository URLs** from different sources (e.g., CSV files or GitHub searches).
+- **Parses repositories** using the codegen tool.
+- **Profiles performance** and logs metrics for each parsing run.
+- **Logs errors** to help pinpoint parsing failures or performance bottlenecks.
+
+______________________________________________________________________
+
+## Package Structure
+
+The package is composed of several modules:
+
+- `sources`
+
+ - Defines the Repository source classes and settings. Settings are all configurable via environment variables
+
+ - Github Source
+
+ ```python
+ class GithubSettings(SourceSettings):
+ language: Literal["python", "typescript"] = "python"
+ heuristic: Literal[
+ "stars",
+ "forks",
+ "updated",
+ # "watchers",
+ # "contributors",
+ # "commit_activity",
+ # "issues",
+ # "dependency",
+ ] = "stars"
+ github_token: str | None = None
+ ```
+
+ - The three options available now are the three supported by the Github API.
+ - Future Work Additional options will require different strategies
+
+ - CSV Source
+
+ - Simply reads repo URLs from CSV
+
+- `cache`
+
+ - Currently only specifies the cache directory. It is used for caching git repositories pulled by the pipeline `--force-pull` can be used to re-pull from the remote.
+
+- `cli`
+
+ - Built with Click, the CLI provides two main commands:
+ - `run-one`: Parses a single repository specified by URL.
+ - `run`: Iterates over repositories obtained from a selected source and parses each one.
+
+- **`metrics`**
+
+ - Provides profiling tools to measure performance during the parse:
+ - `MetricsProfiler`: A context manager that creates a profiling session.
+ - `MetricsProfile`: Represents a "span" or a "run" of a specific repository. Records step-by-step metrics (clock duration, CPU time, memory usage) and writes them to a CSV file specified by `--output-path`
+
+- **`parser`**
+
+ Contains the `CodegenParser` class that orchestrates the parsing process:
+
+ - Clones the repository (or forces a pull if specified).
+ - Initializes a `Codebase` (from the codegen tool).
+ - Runs post-initialization validation.
+ - Integrates with the `MetricsProfiler` to log measurements at key steps.
+
+______________________________________________________________________
+
+## Getting Started
+
+1. **Configure the Repository Source**
+
+ Decide whether you want to read from a CSV file or query GitHub:
+
+ - For CSV, ensure that your CSV file (default: `input.csv`) exists and contains repository URLs in its first column \[`repo_url`\] and commit hash \[`commit_hash`\] (or empty) in the second column.
+ - For GitHub, configure your desired settings (e.g., `language`, `heuristic`, and optionally a GitHub token) via environment variables (`GITHUB_` prefix)
+
+1. **Run the Parser**
+
+ Use the CLI to start parsing:
+
+ - To parse one repository:
+
+ ```bash
+ uv run cgparse run-one --help
+ ```
+
+ - To parse multiple repositories from a source:
+
+ ```bash
+ uv run cgparse run --help
+ ```
+
+1. **Review Metrics and Logs**
+
+ After parsing, check the CSV (default: `metrics.csv` ) to review performance measurements per repository. Error logs are written to the specified error output file (default: `errors.log`)
+
+______________________________________________________________________
+
+## Running on Modal
+
+```shell
+$ uv run modal run modal_run.py
+```
+
+Codegen runs this parser on modal using the CSV source file `input.csv` tracked in this repository.
+
+### Modal Configuration
+
+- **Compute Resources**: Allocates 4 CPUs and 16GB of memory.
+- **Secrets & Volumes**: Uses secrets (for bucket credentials) and mounts a volume for caching repositories.
+- **Image Setup**: Builds on a Debian slim image with Python 3.12, installs required packages (`uv` and `git` )
+- **Environment Configuration**: Environment variables (e.g., GitHub settings) are injected at runtime.
+
+The function `parse_repo_on_modal` performs the following steps:
+
+1. **Environment Setup**: Updates environment variables and configures logging using Loguru.
+1. **Source Initialization**: Creates a repository source based on the provided type (e.g., GitHub).
+1. **Metrics Profiling**: Instantiates `MetricsProfiler` to capture and log performance data.
+1. **Repository Parsing**: Iterates over repository URLs and parses each using the `CodegenParser`.
+1. **Error Handling**: Logs any exceptions encountered during parsing.
+1. **Result Upload**: Uses the `BucketStore` class to upload the configuration, logs, and metrics to an S3 bucket.
+
+### Bucket Storage
+
+**Bucket (public):** [codegen-oss-parse](https://s3.amazonaws.com/codegen-oss-parse/)
+
+The results of each run are saved under the version of `codegen` lib that the run installed and the source type it was run with. Within this prefix:
+
+- Source Settings
+ - `https://s3.amazonaws.com/codegen-oss-parse/{version}/{source}/config.json`
+- Metrics
+ - `https://s3.amazonaws.com/codegen-oss-parse/{version}/{source}/metrics.csv`
+- Logs
+ - `https://s3.amazonaws.com/codegen-oss-parse/{version}/{source}/output.logs`
+
+______________________________________________________________________
+
+### Running it yourself
+
+You can also run `modal_run.py` yourself. It is designed to be run via Modal for cloud-based parsing. It offers additional configuration methods:
+
+```shell
+$ uv run modal run modal_run.py
+```
+
+- **CSV and Repository Volumes:**
+ The script defines two Modal volumes:
+
+ - `codegen-oss-input-volume`: For uploading and reloading CSV inputs.
+ - `codegen-oss-repo-volume`: For caching repository data during parsing.
+ The repository and input volume names are configurable via environment variables (`CODEGEN_MODAL_REPO_VOLUME` and `CODEGEN_MODAL_INPUT_VOLUME`).
+
+- **Secrets Handling:**
+ The script loads various credentials via Modal secrets. It first checks for a pre-configured Modal secret (`codegen-oss-bucket-credentials` configurable via environment variable `CODEGEN_MODAL_SECRET_NAME`) and falls back to dynamically created Modal secret from local `.env` or environment variables if not found.
+
+- **Entrypoint Parameters:**
+ The main function supports multiple source types:
+
+ - **csv:** Uploads a CSV file (`--csv-file input.csv`) for batch processing.
+ - **single:** Parses a single repository specified by its URL (`--single-url "https://github.com/codegen-sh/codegen-sdk.git"`) and an optional commit hash (`--single-commit ...`)
+ - **github:** Uses GitHub settings, language (`--github-language python`) and heuristic (`--github-heuristic stars`) to query for top repositories.
+
+- **Result Storage:**
+ Upon completion, logs and metrics are automatically uploaded to the S3 bucket specified by the environment variable `BUCKET_NAME` (default: `codegen-oss-parse`). This allows for centralized storage and easy retrieval of run outputs. The AWS Credentials provided in the secret are used for this operation.
+
+______________________________________________________________________
+
+## Extensibility
+
+**Adding New Sources:**
+
+You can define additional repository sources by subclassing `RepoSource` and providing a corresponding settings class. Make sure to set the `source_type` and register your new source by following the pattern established in `CSVInputSource` or `GithubSource`.
+
+**Improving Testing:**
+
+The detailed metrics collected can help you understand where parsing failures occur or where performance lags. Use these insights to improve error handling and optimize the codegen parsing logic.
+
+**Containerization and Automation:**
+
+There is a Dockerfile that can be used to create an image capable of running the parse tests. Runtime environment variables can be used to configure the run and output.
+
+**Input & Configuration**
+
+Explore a better CLI for providing options to the Modal run.
+
+______________________________________________________________________
+
+## Example Log Output
+
+```shell
+[codegen-on-oss*] codegen/codegen-on-oss/$ uv run cgparse run --source csv
+ 21:32:36 INFO Cloning repository https://github.com/JohnSnowLabs/spark-nlp.git
+ 21:36:57 INFO {
+ "profile_name": "https://github.com/JohnSnowLabs/spark-nlp.git",
+ "step": "codebase_init",
+ "delta_time": 7.186550649999845,
+ "cumulative_time": 7.186550649999845,
+ "cpu_time": 180.3553702,
+ "memory_usage": 567525376,
+ "memory_delta": 317095936,
+ "error": null
+}
+ 21:36:58 INFO {
+ "profile_name": "https://github.com/JohnSnowLabs/spark-nlp.git",
+ "step": "post_init_validation",
+ "delta_time": 0.5465090990001045,
+ "cumulative_time": 7.733059748999949,
+ "cpu_time": 180.9174761,
+ "memory_usage": 569249792,
+ "memory_delta": 1724416,
+ "error": null
+}
+ 21:36:58 ERROR Repository: https://github.com/JohnSnowLabs/spark-nlp.git
+Traceback (most recent call last):
+
+ File "/home/codegen/codegen/codegen-on-oss/.venv/bin/cgparse", line 10, in
+ sys.exit(cli())
+ │ │ └
+ │ └
+ └
+ File "/home/codegen/codegen/codegen-on-oss/.venv/lib/python3.12/site-packages/click/core.py", line 1161, in __call__
+ return self.main(*args, **kwargs)
+ │ │ │ └ {}
+ │ │ └ ()
+ │ └
+ └
+ File "/home/codegen/codegen/codegen-on-oss/.venv/lib/python3.12/site-packages/click/core.py", line 1082, in main
+ rv = self.invoke(ctx)
+ │ │ └
+ │ └
+ └
+ File "/home/codegen/codegen/codegen-on-oss/.venv/lib/python3.12/site-packages/click/core.py", line 1697, in invoke
+ return _process_result(sub_ctx.command.invoke(sub_ctx))
+ │ │ │ │ └
+ │ │ │ └
+ │ │ └
+ │ └
+ └ ._process_result at 0x7f466597fb00>
+ File "/home/codegen/codegen/codegen-on-oss/.venv/lib/python3.12/site-packages/click/core.py", line 1443, in invoke
+ return ctx.invoke(self.callback, **ctx.params)
+ │ │ │ │ │ └ {'source': 'csv', 'output_path': 'metrics.csv', 'error_output_path': 'errors.log', 'cache_dir': PosixPath('/home/.cache...
+ │ │ │ │ └
+ │ │ │ └
+ │ │ └
+ │ └
+ └
+ File "/home/codegen/codegen/codegen-on-oss/.venv/lib/python3.12/site-packages/click/core.py", line 788, in invoke
+ return __callback(*args, **kwargs)
+ │ └ {'source': 'csv', 'output_path': 'metrics.csv', 'error_output_path': 'errors.log', 'cache_dir': PosixPath('/home/.cache...
+ └ ()
+
+ File "/home/codegen/codegen/codegen-on-oss/codegen_on_oss/cli.py", line 121, in run
+ parser.parse(repo_url)
+ │ │ └ 'https://github.com/JohnSnowLabs/spark-nlp.git'
+ │ └
+ └
+
+ File "/home/codegen/codegen/codegen-on-oss/codegen_on_oss/parser.py", line 52, in parse
+ with self.metrics_profiler.start_profiler(
+ │ │ └
+ │ └
+ └
+
+ File "/home/.local/share/uv/python/cpython-3.12.6-linux-x86_64-gnu/lib/python3.12/contextlib.py", line 158, in __exit__
+ self.gen.throw(value)
+ │ │ │ └ ParseRunError()
+ │ │ └
+ │ └
+ └
+
+> File "/home/codegen/codegen/codegen-on-oss/codegen_on_oss/metrics.py", line 41, in start_profiler
+ yield profile
+ └
+
+ File "/home/codegen/codegen/codegen-on-oss/codegen_on_oss/parser.py", line 64, in parse
+ raise ParseRunError(validation_status)
+ │ └
+ └
+
+codegen_on_oss.parser.ParseRunError: LOW_IMPORT_RESOLUTION_RATE
+ 21:36:58 INFO {
+ "profile_name": "https://github.com/JohnSnowLabs/spark-nlp.git",
+ "step": "TOTAL",
+ "delta_time": 7.740976418000173,
+ "cumulative_time": 7.740976418000173,
+ "cpu_time": 180.9221699,
+ "memory_usage": 569249792,
+ "memory_delta": 0,
+ "error": "LOW_IMPORT_RESOLUTION_RATE"
+}
+ 21:36:58 INFO Cloning repository https://github.com/Lightning-AI/lightning.git
+ 21:37:53 INFO {
+ "profile_name": "https://github.com/Lightning-AI/lightning.git",
+ "step": "codebase_init",
+ "delta_time": 24.256577352999557,
+ "cumulative_time": 24.256577352999557,
+ "cpu_time": 211.3604081,
+ "memory_usage": 1535971328,
+ "memory_delta": 966184960,
+ "error": null
+}
+ 21:37:53 INFO {
+ "profile_name": "https://github.com/Lightning-AI/lightning.git",
+ "step": "post_init_validation",
+ "delta_time": 0.137609629000508,
+ "cumulative_time": 24.394186982000065,
+ "cpu_time": 211.5082702,
+ "memory_usage": 1536241664,
+ "memory_delta": 270336,
+ "error": null
+}
+ 21:37:53 INFO {
+ "profile_name": "https://github.com/Lightning-AI/lightning.git",
+ "step": "TOTAL",
+ "delta_time": 24.394700584999555,
+ "cumulative_time": 24.394700584999555,
+ "cpu_time": 211.5088282,
+ "memory_usage": 1536241664,
+ "memory_delta": 0,
+ "error": null
+}
+```
+
+## Example Metrics Output
+
+| profile_name | step | delta_time | cumulative_time | cpu_time | memory_usage | memory_delta | error |
+| ---------------------- | -------------------- | ------------------ | ------------------ | ----------- | ------------ | ------------ | -------------------------- |
+| JohnSnowLabs/spark-nlp | codebase_init | 7.186550649999845 | 7.186550649999845 | 180.3553702 | 567525376 | 317095936 | |
+| JohnSnowLabs/spark-nlp | post_init_validation | 0.5465090990001045 | 7.733059748999949 | 180.9174761 | 569249792 | 1724416 | |
+| JohnSnowLabs/spark-nlp | TOTAL | 7.740976418000173 | 7.740976418000173 | 180.9221699 | 569249792 | 0 | LOW_IMPORT_RESOLUTION_RATE |
+| Lightning-AI/lightning | codebase_init | 24.256577352999557 | 24.256577352999557 | 211.3604081 | 1535971328 | 966184960 | |
+| Lightning-AI/lightning | post_init_validation | 0.137609629000508 | 24.394186982000065 | 211.5082702 | 1536241664 | 270336 | |
+| Lightning-AI/lightning | TOTAL | 24.394700584999555 | 24.394700584999555 | 211.5088282 | 1536241664 | 0 | |
diff --git a/codegen-on-oss/codecov.yaml b/codegen-on-oss/codecov.yaml
new file mode 100644
index 000000000..058cfb765
--- /dev/null
+++ b/codegen-on-oss/codecov.yaml
@@ -0,0 +1,9 @@
+coverage:
+ range: 70..100
+ round: down
+ precision: 1
+ status:
+ project:
+ default:
+ target: 90%
+ threshold: 0.5%
diff --git a/codegen-on-oss/codegen_modal_deploy.py b/codegen-on-oss/codegen_modal_deploy.py
new file mode 100644
index 000000000..a0fa03539
--- /dev/null
+++ b/codegen-on-oss/codegen_modal_deploy.py
@@ -0,0 +1,76 @@
+import sys
+from pathlib import Path
+
+import modal
+from loguru import logger
+
+from codegen_on_oss.cache import cachedir
+from codegen_on_oss.metrics import MetricsProfiler
+from codegen_on_oss.outputs.sql_output import ParseMetricsSQLOutput
+from codegen_on_oss.parser import CodegenParser
+
+app = modal.App("codegen-oss-parse")
+
+
+codegen_repo_volume = modal.Volume.from_name(
+ "codegen-oss-repo-volume",
+ create_if_missing=True,
+)
+
+
+aws_secrets = modal.Secret.from_name(
+ "codegen-oss-parse-secrets",
+)
+
+
+@app.function(
+ name="parse_repo",
+ concurrency_limit=10,
+ cpu=4,
+ memory=16384,
+ timeout=3600 * 8,
+ secrets=[aws_secrets],
+ volumes={
+ str(cachedir.absolute()): codegen_repo_volume,
+ },
+ proxy=modal.Proxy.from_name("codegen-parse-proxy"),
+ image=modal.Image.debian_slim(python_version="3.13")
+ .pip_install("uv")
+ .apt_install("git") # required by codegen sdk
+ .env({"PATH": "/app/.venv/bin:$PATH"})
+ .workdir("/app")
+ .add_local_file("uv.lock", remote_path="/app/uv.lock", copy=True)
+ .add_local_file("pyproject.toml", remote_path="/app/pyproject.toml", copy=True)
+ .run_commands("uv sync --frozen --no-install-project --extra sql")
+ .add_local_python_source("codegen_on_oss", copy=True),
+ # .add_local_python_source("codegen_on_oss"),
+ # .add_local_dir("codegen_on_oss", remote_path="/app/codegen_on_oss"),
+)
+def parse_repo(
+ repo_url: str,
+ commit_hash: str | None,
+ language: str | None = None,
+):
+ """
+ Parse repositories on Modal.
+
+ Args:
+ repo_url: The URL of the repository to parse.
+ commit_hash: The commit hash of the repository to parse.
+ """
+ logger.add(sys.stdout, format="{time: HH:mm:ss} {level} {message}", level="DEBUG")
+
+ output = ParseMetricsSQLOutput(
+ modal_function_call_id=modal.current_function_call_id()
+ )
+ metrics_profiler = MetricsProfiler(output)
+ parser = CodegenParser(Path(cachedir) / "repositories", metrics_profiler)
+ # Refresh any updating repo data from other instances
+ codegen_repo_volume.reload()
+ try:
+ parser.parse(repo_url, language, commit_hash)
+ except Exception as e:
+ logger.exception(f"Error parsing repository {repo_url}: {e}")
+ finally:
+ # Commit any cache changes to the repo volume
+ codegen_repo_volume.commit()
diff --git a/codegen-on-oss/codegen_modal_run.py b/codegen-on-oss/codegen_modal_run.py
new file mode 100644
index 000000000..ab0ad8ecb
--- /dev/null
+++ b/codegen-on-oss/codegen_modal_run.py
@@ -0,0 +1,29 @@
+import modal
+
+from codegen_on_oss.sources import GithubSettings, GithubSource
+
+app = modal.App("codegen-oss-parse")
+
+
+@app.local_entrypoint()
+def main(
+ languages: str = "python,typescript",
+ heuristic: str = "stars",
+ num_repos: int = 100,
+):
+ """
+ Main entrypoint for the parse app.
+ """
+ parse_repo_on_modal_fn = modal.Function.from_name("codegen-oss-parse", "parse_repo")
+ for language in languages.split(","):
+ repo_source = GithubSource(
+ GithubSettings(
+ language=language.strip(), heuristic=heuristic, num_repos=num_repos
+ )
+ )
+ for repo_url, commit_hash in repo_source:
+ parse_repo_on_modal_fn.spawn(
+ repo_url=repo_url,
+ commit_hash=commit_hash,
+ language=language,
+ )
diff --git a/codegen-on-oss/codegen_on_oss/__init__.py b/codegen-on-oss/codegen_on_oss/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/codegen-on-oss/codegen_on_oss/analyzer.py b/codegen-on-oss/codegen_on_oss/analyzer.py
new file mode 100644
index 000000000..a38bf9de0
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzer.py
@@ -0,0 +1,2183 @@
+#!/usr/bin/env python3
+"""
+Comprehensive Codebase Analyzer
+
+This module provides a complete static code analysis system using the Codegen SDK.
+It analyzes a codebase and provides extensive information about its structure,
+dependencies, code quality, and more.
+"""
+
+import argparse
+import datetime
+import json
+import logging
+import math
+import re
+import sys
+import tempfile
+from typing import Any
+
+import networkx as nx
+from rich.console import Console
+from rich.progress import (
+ BarColumn,
+ Progress,
+ SpinnerColumn,
+ TextColumn,
+ TimeElapsedColumn,
+)
+from rich.table import Table
+
+try:
+ from codegen.configs.models.codebase import CodebaseConfig
+ from codegen.configs.models.secrets import SecretsConfig
+ from codegen.sdk.core.codebase import Codebase
+ from codegen.shared.enums.programming_language import ProgrammingLanguage
+except ImportError:
+ print("Codegen SDK not found. Please install it first.")
+ sys.exit(1)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+# Constants
+METRICS_CATEGORIES = {
+ "codebase_structure": [
+ "get_file_count",
+ "get_files_by_language",
+ "get_file_size_distribution",
+ "get_directory_structure",
+ "get_symbol_count",
+ "get_symbol_type_distribution",
+ "get_symbol_hierarchy",
+ "get_top_level_vs_nested_symbols",
+ "get_import_dependency_map",
+ "get_external_vs_internal_dependencies",
+ "get_circular_imports",
+ "get_unused_imports",
+ "get_module_coupling_metrics",
+ "get_module_cohesion_analysis",
+ "get_package_structure",
+ "get_module_dependency_graph",
+ ],
+ "symbol_level": [
+ "get_function_parameter_analysis",
+ "get_return_type_analysis",
+ "get_function_complexity_metrics",
+ "get_call_site_tracking",
+ "get_async_function_detection",
+ "get_function_overload_analysis",
+ "get_inheritance_hierarchy",
+ "get_method_analysis",
+ "get_attribute_analysis",
+ "get_constructor_analysis",
+ "get_interface_implementation_verification",
+ "get_access_modifier_usage",
+ "get_type_inference",
+ "get_usage_tracking",
+ "get_scope_analysis",
+ "get_constant_vs_mutable_usage",
+ "get_global_variable_detection",
+ "get_type_alias_resolution",
+ "get_generic_type_usage",
+ "get_type_consistency_checking",
+ "get_union_intersection_type_analysis",
+ ],
+ "dependency_flow": [
+ "get_function_call_relationships",
+ "get_call_hierarchy_visualization",
+ "get_entry_point_analysis",
+ "get_dead_code_detection",
+ "get_variable_usage_tracking",
+ "get_data_transformation_paths",
+ "get_input_output_parameter_analysis",
+ "get_conditional_branch_analysis",
+ "get_loop_structure_analysis",
+ "get_exception_handling_paths",
+ "get_return_statement_analysis",
+ "get_symbol_reference_tracking",
+ "get_usage_frequency_metrics",
+ "get_cross_file_symbol_usage",
+ ],
+ "code_quality": [
+ "get_unused_functions",
+ "get_unused_classes",
+ "get_unused_variables",
+ "get_unused_imports",
+ "get_similar_function_detection",
+ "get_repeated_code_patterns",
+ "get_refactoring_opportunities",
+ "get_cyclomatic_complexity",
+ "get_cognitive_complexity",
+ "get_nesting_depth_analysis",
+ "get_function_size_metrics",
+ "get_naming_convention_consistency",
+ "get_comment_coverage",
+ "get_documentation_completeness",
+ "get_code_formatting_consistency",
+ ],
+ "visualization": [
+ "get_module_dependency_visualization",
+ "get_symbol_dependency_visualization",
+ "get_import_relationship_graphs",
+ "get_function_call_visualization",
+ "get_call_hierarchy_trees",
+ "get_entry_point_flow_diagrams",
+ "get_class_hierarchy_visualization",
+ "get_symbol_relationship_diagrams",
+ "get_package_structure_visualization",
+ "get_code_complexity_heat_maps",
+ "get_usage_frequency_visualization",
+ "get_change_frequency_analysis",
+ ],
+ "language_specific": [
+ "get_decorator_usage_analysis",
+ "get_dynamic_attribute_access_detection",
+ "get_type_hint_coverage",
+ "get_magic_method_usage",
+ "get_interface_implementation_verification",
+ "get_type_definition_completeness",
+ "get_jsx_tsx_component_analysis",
+ "get_type_narrowing_pattern_detection",
+ ],
+ "code_metrics": [
+ "get_monthly_commits",
+ "calculate_cyclomatic_complexity",
+ "cc_rank",
+ "get_operators_and_operands",
+ "calculate_halstead_volume",
+ "count_lines",
+ "calculate_maintainability_index",
+ "get_maintainability_rank",
+ ],
+}
+
+
+class CodebaseAnalyzer:
+ """
+ Comprehensive codebase analyzer using Codegen SDK.
+
+ This class provides methods to analyze a codebase and extract detailed information
+ about its structure, dependencies, code quality, and more.
+ """
+
+ def __init__(
+ self,
+ repo_url: str | None = None,
+ repo_path: str | None = None,
+ language: str | None = None,
+ ):
+ """
+ Initialize the CodebaseAnalyzer.
+
+ Args:
+ repo_url: URL of the repository to analyze
+ repo_path: Local path to the repository to analyze
+ language: Programming language of the codebase (auto-detected if not provided)
+ """
+ self.repo_url = repo_url
+ self.repo_path = repo_path
+ self.language = language
+ self.codebase = None
+ self.console = Console()
+ self.results = {}
+
+ # Initialize the codebase
+ if repo_url:
+ self._init_from_url(repo_url, language)
+ elif repo_path:
+ self._init_from_path(repo_path, language)
+
+ def _init_from_url(self, repo_url: str, language: str | None = None):
+ """Initialize codebase from a repository URL."""
+ try:
+ # Extract owner and repo name from URL
+ if repo_url.endswith(".git"):
+ repo_url = repo_url[:-4]
+
+ parts = repo_url.rstrip("/").split("/")
+ repo_name = parts[-1]
+ owner = parts[-2]
+ repo_full_name = f"{owner}/{repo_name}"
+
+ # Create a temporary directory for cloning
+ tmp_dir = tempfile.mkdtemp(prefix="codebase_analyzer_")
+
+ # Configure the codebase
+ config = CodebaseConfig(
+ debug=False,
+ allow_external=True,
+ py_resolve_syspath=True,
+ )
+
+ secrets = SecretsConfig()
+
+ # Initialize the codebase
+ self.console.print(
+ f"[bold green]Initializing codebase from {repo_url}...[/bold green]"
+ )
+
+ prog_lang = None
+ if language:
+ prog_lang = ProgrammingLanguage(language.upper())
+
+ self.codebase = Codebase.from_github(
+ repo_full_name=repo_full_name,
+ tmp_dir=tmp_dir,
+ language=prog_lang,
+ config=config,
+ secrets=secrets,
+ full_history=True,
+ )
+
+ self.console.print(
+ f"[bold green]Successfully initialized codebase from {repo_url}[/bold green]"
+ )
+
+ except Exception as e:
+ self.console.print(
+ f"[bold red]Error initializing codebase from URL: {e}[/bold red]"
+ )
+ raise
+
+ def _init_from_path(self, repo_path: str, language: str | None = None):
+ """Initialize codebase from a local repository path."""
+ try:
+ # Configure the codebase
+ config = CodebaseConfig(
+ debug=False,
+ allow_external=True,
+ py_resolve_syspath=True,
+ )
+
+ secrets = SecretsConfig()
+
+ # Initialize the codebase
+ self.console.print(
+ f"[bold green]Initializing codebase from {repo_path}...[/bold green]"
+ )
+
+ prog_lang = None
+ if language:
+ prog_lang = ProgrammingLanguage(language.upper())
+
+ self.codebase = Codebase(
+ repo_path=repo_path, language=prog_lang, config=config, secrets=secrets
+ )
+
+ self.console.print(
+ f"[bold green]Successfully initialized codebase from {repo_path}[/bold green]"
+ )
+
+ except Exception as e:
+ self.console.print(
+ f"[bold red]Error initializing codebase from path: {e}[/bold red]"
+ )
+ raise
+
+ def analyze(
+ self,
+ categories: list[str] | None = None,
+ output_format: str = "json",
+ output_file: str | None = None,
+ ):
+ """
+ Perform a comprehensive analysis of the codebase.
+
+ Args:
+ categories: List of categories to analyze. If None, all categories are analyzed.
+ output_format: Format of the output (json, html, console)
+ output_file: Path to the output file
+
+ Returns:
+ Dict containing the analysis results
+ """
+ if not self.codebase:
+ raise ValueError(
+ "Codebase not initialized. Please initialize the codebase first."
+ )
+
+ # If no categories specified, analyze all
+ if not categories:
+ categories = list(METRICS_CATEGORIES.keys())
+
+ # Initialize results dictionary
+ self.results = {
+ "metadata": {
+ "repo_name": self.codebase.ctx.repo_name,
+ "analysis_time": datetime.datetime.now().isoformat(),
+ "language": str(self.codebase.ctx.programming_language),
+ },
+ "categories": {},
+ }
+
+ # Analyze each category
+ with Progress(
+ SpinnerColumn(),
+ TextColumn("[bold blue]{task.description}"),
+ BarColumn(),
+ TextColumn("[bold green]{task.completed}/{task.total}"),
+ TimeElapsedColumn(),
+ ) as progress:
+ task = progress.add_task(
+ "[bold green]Analyzing codebase...", total=len(categories)
+ )
+
+ for category in categories:
+ if category not in METRICS_CATEGORIES:
+ self.console.print(
+ f"[bold yellow]Warning: Unknown category '{category}'. Skipping.[/bold yellow]"
+ )
+ progress.update(task, advance=1)
+ continue
+
+ self.console.print(f"[bold blue]Analyzing {category}...[/bold blue]")
+
+ # Get the metrics for this category
+ metrics = METRICS_CATEGORIES[category]
+ category_results = {}
+
+ # Run each metric
+ for metric in metrics:
+ try:
+ method = getattr(self, metric, None)
+ if method and callable(method):
+ result = method()
+ category_results[metric] = result
+ else:
+ category_results[metric] = {
+ "error": f"Method {metric} not implemented"
+ }
+ except Exception as e:
+ category_results[metric] = {"error": str(e)}
+
+ # Add the results to the main results dictionary
+ self.results["categories"][category] = category_results
+
+ progress.update(task, advance=1)
+
+ # Output the results
+ if output_format == "json":
+ if output_file:
+ with open(output_file, "w") as f:
+ json.dump(self.results, f, indent=2)
+ self.console.print(
+ f"[bold green]Results saved to {output_file}[/bold green]"
+ )
+ else:
+ return self.results
+ elif output_format == "html":
+ self._generate_html_report(output_file)
+ elif output_format == "console":
+ self._print_console_report()
+
+ return self.results
+
+ #
+ # Codebase Structure Analysis Methods
+ #
+
+ def get_file_count(self) -> dict[str, int]:
+ """Get the total number of files in the codebase."""
+ files = list(self.codebase.files)
+ return {
+ "total_files": len(files),
+ "source_files": len([f for f in files if not f.is_binary]),
+ }
+
+ def get_files_by_language(self) -> dict[str, int]:
+ """Get the distribution of files by language/extension."""
+ files = list(self.codebase.files)
+ extensions = {}
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ ext = file.extension
+ if not ext:
+ ext = "(no extension)"
+
+ if ext in extensions:
+ extensions[ext] += 1
+ else:
+ extensions[ext] = 1
+
+ return extensions
+
+ def get_file_size_distribution(self) -> dict[str, int]:
+ """Get the distribution of file sizes."""
+ files = list(self.codebase.files)
+ size_ranges = {
+ "small (< 1KB)": 0,
+ "medium (1KB - 10KB)": 0,
+ "large (10KB - 100KB)": 0,
+ "very large (> 100KB)": 0,
+ }
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ size = len(file.content)
+
+ if size < 1024:
+ size_ranges["small (< 1KB)"] += 1
+ elif size < 10240:
+ size_ranges["medium (1KB - 10KB)"] += 1
+ elif size < 102400:
+ size_ranges["large (10KB - 100KB)"] += 1
+ else:
+ size_ranges["very large (> 100KB)"] += 1
+
+ return size_ranges
+
+ def get_directory_structure(self) -> dict[str, Any]:
+ """Get the directory structure of the codebase."""
+ directories = {}
+
+ for directory in self.codebase.directories:
+ path = str(directory.path)
+ parent_path = (
+ str(directory.path.parent)
+ if directory.path.parent != self.codebase.repo_path
+ else "/"
+ )
+
+ if parent_path not in directories:
+ directories[parent_path] = []
+
+ directories[parent_path].append({
+ "name": directory.path.name,
+ "path": path,
+ "files": len(directory.files),
+ "subdirectories": len(directory.subdirectories),
+ })
+
+ return directories
+
+ def get_symbol_count(self) -> dict[str, int]:
+ """Get the total count of symbols in the codebase."""
+ return {
+ "total_symbols": len(list(self.codebase.symbols)),
+ "classes": len(list(self.codebase.classes)),
+ "functions": len(list(self.codebase.functions)),
+ "global_vars": len(list(self.codebase.global_vars)),
+ "interfaces": len(list(self.codebase.interfaces)),
+ }
+
+ def get_symbol_type_distribution(self) -> dict[str, int]:
+ """Get the distribution of symbol types."""
+ symbols = list(self.codebase.symbols)
+ distribution = {}
+
+ for symbol in symbols:
+ symbol_type = str(symbol.symbol_type)
+
+ if symbol_type in distribution:
+ distribution[symbol_type] += 1
+ else:
+ distribution[symbol_type] = 1
+
+ return distribution
+
+ def get_symbol_hierarchy(self) -> dict[str, Any]:
+ """Get the hierarchy of symbols in the codebase."""
+ classes = list(self.codebase.classes)
+ hierarchy = {}
+
+ for cls in classes:
+ class_name = cls.name
+ parent_classes = []
+
+ # Get parent classes if available
+ if hasattr(cls, "parent_class_names"):
+ parent_classes = cls.parent_class_names
+
+ hierarchy[class_name] = {
+ "parent_classes": parent_classes,
+ "methods": [method.name for method in cls.methods],
+ "attributes": [attr.name for attr in cls.attributes]
+ if hasattr(cls, "attributes")
+ else [],
+ }
+
+ return hierarchy
+
+ def get_top_level_vs_nested_symbols(self) -> dict[str, int]:
+ """Get the count of top-level vs nested symbols."""
+ symbols = list(self.codebase.symbols)
+ top_level = 0
+ nested = 0
+
+ for symbol in symbols:
+ if hasattr(symbol, "is_top_level") and symbol.is_top_level:
+ top_level += 1
+ else:
+ nested += 1
+
+ return {"top_level": top_level, "nested": nested}
+
+ def get_import_dependency_map(self) -> dict[str, list[str]]:
+ """Get a map of import dependencies."""
+ files = list(self.codebase.files)
+ dependency_map = {}
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ file_path = file.file_path
+ imports = []
+
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and imp.imported_symbol:
+ imported_symbol = imp.imported_symbol
+ if hasattr(imported_symbol, "file") and imported_symbol.file:
+ imports.append(imported_symbol.file.file_path)
+
+ dependency_map[file_path] = imports
+
+ return dependency_map
+
+ def get_external_vs_internal_dependencies(self) -> dict[str, int]:
+ """Get the count of external vs internal dependencies."""
+ files = list(self.codebase.files)
+ internal = 0
+ external = 0
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and imp.imported_symbol:
+ imported_symbol = imp.imported_symbol
+ if hasattr(imported_symbol, "file") and imported_symbol.file:
+ internal += 1
+ else:
+ external += 1
+ else:
+ external += 1
+
+ return {"internal": internal, "external": external}
+
+ def get_circular_imports(self) -> list[list[str]]:
+ """Detect circular imports in the codebase."""
+ files = list(self.codebase.files)
+ dependency_map = {}
+
+ # Build dependency graph
+ for file in files:
+ if file.is_binary:
+ continue
+
+ file_path = file.file_path
+ imports = []
+
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and imp.imported_symbol:
+ imported_symbol = imp.imported_symbol
+ if hasattr(imported_symbol, "file") and imported_symbol.file:
+ imports.append(imported_symbol.file.file_path)
+
+ dependency_map[file_path] = imports
+
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # Add nodes and edges
+ for file_path, imports in dependency_map.items():
+ G.add_node(file_path)
+ for imp in imports:
+ G.add_edge(file_path, imp)
+
+ # Find cycles
+ cycles = list(nx.simple_cycles(G))
+
+ return cycles
+
+ def get_unused_imports(self) -> list[dict[str, str]]:
+ """Get a list of unused imports."""
+ files = list(self.codebase.files)
+ unused_imports = []
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ for imp in file.imports:
+ if hasattr(imp, "usages") and len(imp.usages) == 0:
+ unused_imports.append({
+ "file": file.file_path,
+ "import": imp.source,
+ })
+
+ return unused_imports
+
+ def get_module_coupling_metrics(self) -> dict[str, float]:
+ """Calculate module coupling metrics."""
+ files = list(self.codebase.files)
+ dependency_map = {}
+
+ # Build dependency graph
+ for file in files:
+ if file.is_binary:
+ continue
+
+ file_path = file.file_path
+ imports = []
+
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and imp.imported_symbol:
+ imported_symbol = imp.imported_symbol
+ if hasattr(imported_symbol, "file") and imported_symbol.file:
+ imports.append(imported_symbol.file.file_path)
+
+ dependency_map[file_path] = imports
+
+ # Calculate metrics
+ total_files = len(dependency_map)
+ total_dependencies = sum(len(deps) for deps in dependency_map.values())
+
+ if total_files == 0:
+ return {
+ "average_dependencies_per_file": 0,
+ "max_dependencies": 0,
+ "coupling_factor": 0,
+ }
+
+ max_dependencies = (
+ max(len(deps) for deps in dependency_map.values()) if dependency_map else 0
+ )
+ coupling_factor = (
+ total_dependencies / (total_files * (total_files - 1))
+ if total_files > 1
+ else 0
+ )
+
+ return {
+ "average_dependencies_per_file": total_dependencies / total_files,
+ "max_dependencies": max_dependencies,
+ "coupling_factor": coupling_factor,
+ }
+
+ def get_module_cohesion_analysis(self) -> dict[str, float]:
+ """Analyze module cohesion."""
+ files = list(self.codebase.files)
+ cohesion_metrics = {}
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ symbols = list(file.symbols)
+ total_symbols = len(symbols)
+
+ if total_symbols <= 1:
+ continue
+
+ # Count internal references
+ internal_refs = 0
+
+ for symbol in symbols:
+ if hasattr(symbol, "symbol_usages"):
+ for usage in symbol.symbol_usages:
+ if hasattr(usage, "file") and usage.file == file:
+ internal_refs += 1
+
+ max_possible_refs = total_symbols * (total_symbols - 1)
+ cohesion = internal_refs / max_possible_refs if max_possible_refs > 0 else 0
+
+ cohesion_metrics[file.file_path] = cohesion
+
+ # Calculate average cohesion
+ if cohesion_metrics:
+ avg_cohesion = sum(cohesion_metrics.values()) / len(cohesion_metrics)
+ else:
+ avg_cohesion = 0
+
+ return {"average_cohesion": avg_cohesion, "file_cohesion": cohesion_metrics}
+
+ def get_package_structure(self) -> dict[str, Any]:
+ """Get the package structure of the codebase."""
+ directories = {}
+
+ for directory in self.codebase.directories:
+ path = str(directory.path)
+ parent_path = (
+ str(directory.path.parent)
+ if directory.path.parent != self.codebase.repo_path
+ else "/"
+ )
+
+ if parent_path not in directories:
+ directories[parent_path] = []
+
+ # Check if this is a package (has __init__.py)
+ is_package = any(f.name == "__init__.py" for f in directory.files)
+
+ directories[parent_path].append({
+ "name": directory.path.name,
+ "path": path,
+ "is_package": is_package,
+ "files": len(directory.files),
+ "subdirectories": len(directory.subdirectories),
+ })
+
+ return directories
+
+ def get_module_dependency_graph(self) -> dict[str, list[str]]:
+ """Get the module dependency graph."""
+ files = list(self.codebase.files)
+ dependency_graph = {}
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ file_path = file.file_path
+ imports = []
+
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and imp.imported_symbol:
+ imported_symbol = imp.imported_symbol
+ if hasattr(imported_symbol, "file") and imported_symbol.file:
+ imports.append(imported_symbol.file.file_path)
+
+ dependency_graph[file_path] = imports
+
+ return dependency_graph
+
+ #
+ # Symbol-Level Analysis Methods
+ #
+
+ def get_function_parameter_analysis(self) -> dict[str, Any]:
+ """Analyze function parameters."""
+ functions = list(self.codebase.functions)
+ parameter_stats = {
+ "total_parameters": 0,
+ "avg_parameters_per_function": 0,
+ "functions_with_no_parameters": 0,
+ "functions_with_many_parameters": 0, # > 5 parameters
+ "parameter_type_coverage": 0,
+ "functions_with_default_params": 0,
+ }
+
+ if not functions:
+ return parameter_stats
+
+ total_params = 0
+ functions_with_types = 0
+ functions_with_defaults = 0
+
+ for func in functions:
+ params = func.parameters
+ param_count = len(params)
+ total_params += param_count
+
+ if param_count == 0:
+ parameter_stats["functions_with_no_parameters"] += 1
+ elif param_count > 5:
+ parameter_stats["functions_with_many_parameters"] += 1
+
+ # Check for type annotations
+ has_type_annotations = all(hasattr(p, "type") and p.type for p in params)
+ if has_type_annotations:
+ functions_with_types += 1
+
+ # Check for default values
+ has_defaults = any(hasattr(p, "default") and p.default for p in params)
+ if has_defaults:
+ functions_with_defaults += 1
+
+ parameter_stats["total_parameters"] = total_params
+ parameter_stats["avg_parameters_per_function"] = total_params / len(functions)
+ parameter_stats["parameter_type_coverage"] = (
+ functions_with_types / len(functions) if functions else 0
+ )
+ parameter_stats["functions_with_default_params"] = functions_with_defaults
+
+ return parameter_stats
+
+ def get_return_type_analysis(self) -> dict[str, Any]:
+ """Analyze function return types."""
+ functions = list(self.codebase.functions)
+ return_type_stats = {
+ "functions_with_return_type": 0,
+ "return_type_coverage": 0,
+ "common_return_types": {},
+ }
+
+ if not functions:
+ return return_type_stats
+
+ functions_with_return_type = 0
+ return_types = {}
+
+ for func in functions:
+ if hasattr(func, "return_type") and func.return_type:
+ functions_with_return_type += 1
+
+ return_type = (
+ str(func.return_type.source)
+ if hasattr(func.return_type, "source")
+ else str(func.return_type)
+ )
+
+ if return_type in return_types:
+ return_types[return_type] += 1
+ else:
+ return_types[return_type] = 1
+
+ return_type_stats["functions_with_return_type"] = functions_with_return_type
+ return_type_stats["return_type_coverage"] = functions_with_return_type / len(
+ functions
+ )
+
+ # Get the most common return types
+ sorted_types = sorted(return_types.items(), key=lambda x: x[1], reverse=True)
+ return_type_stats["common_return_types"] = dict(
+ sorted_types[:10]
+ ) # Top 10 return types
+
+ return return_type_stats
+
+ def get_function_complexity_metrics(self) -> dict[str, Any]:
+ """Calculate function complexity metrics."""
+ functions = list(self.codebase.functions)
+ complexity_metrics = {
+ "avg_function_length": 0,
+ "max_function_length": 0,
+ "functions_by_complexity": {
+ "simple": 0, # < 10 lines
+ "moderate": 0, # 10-30 lines
+ "complex": 0, # 30-100 lines
+ "very_complex": 0, # > 100 lines
+ },
+ }
+
+ if not functions:
+ return complexity_metrics
+
+ total_length = 0
+ max_length = 0
+
+ for func in functions:
+ # Calculate function length in lines
+ func_source = func.source
+ func_lines = func_source.count("\n") + 1
+
+ total_length += func_lines
+ max_length = max(max_length, func_lines)
+
+ # Categorize by complexity
+ if func_lines < 10:
+ complexity_metrics["functions_by_complexity"]["simple"] += 1
+ elif func_lines < 30:
+ complexity_metrics["functions_by_complexity"]["moderate"] += 1
+ elif func_lines < 100:
+ complexity_metrics["functions_by_complexity"]["complex"] += 1
+ else:
+ complexity_metrics["functions_by_complexity"]["very_complex"] += 1
+
+ complexity_metrics["avg_function_length"] = total_length / len(functions)
+ complexity_metrics["max_function_length"] = max_length
+
+ return complexity_metrics
+
+ def get_call_site_tracking(self) -> dict[str, Any]:
+ """Track function call sites."""
+ functions = list(self.codebase.functions)
+ call_site_stats = {
+ "functions_with_no_calls": 0,
+ "functions_with_many_calls": 0, # > 10 calls
+ "avg_call_sites_per_function": 0,
+ "most_called_functions": [],
+ }
+
+ if not functions:
+ return call_site_stats
+
+ function_calls = {}
+ total_calls = 0
+
+ for func in functions:
+ if hasattr(func, "call_sites"):
+ call_count = len(func.call_sites)
+ total_calls += call_count
+
+ if call_count == 0:
+ call_site_stats["functions_with_no_calls"] += 1
+ elif call_count > 10:
+ call_site_stats["functions_with_many_calls"] += 1
+
+ function_calls[func.name] = call_count
+
+ call_site_stats["avg_call_sites_per_function"] = total_calls / len(functions)
+
+ # Get the most called functions
+ sorted_functions = sorted(
+ function_calls.items(), key=lambda x: x[1], reverse=True
+ )
+ call_site_stats["most_called_functions"] = [
+ {"name": name, "calls": calls} for name, calls in sorted_functions[:10]
+ ]
+
+ return call_site_stats
+
+ def get_async_function_detection(self) -> dict[str, Any]:
+ """Detect async functions."""
+ functions = list(self.codebase.functions)
+ async_stats = {
+ "total_async_functions": 0,
+ "async_function_percentage": 0,
+ "async_functions": [],
+ }
+
+ if not functions:
+ return async_stats
+
+ async_functions = []
+
+ for func in functions:
+ if hasattr(func, "is_async") and func.is_async:
+ async_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ })
+
+ async_stats["total_async_functions"] = len(async_functions)
+ async_stats["async_function_percentage"] = len(async_functions) / len(functions)
+ async_stats["async_functions"] = async_functions
+
+ return async_stats
+
+ def get_function_overload_analysis(self) -> dict[str, Any]:
+ """Analyze function overloads."""
+ functions = list(self.codebase.functions)
+ overload_stats = {
+ "total_overloaded_functions": 0,
+ "overloaded_function_percentage": 0,
+ "overloaded_functions": [],
+ }
+
+ if not functions:
+ return overload_stats
+
+ overloaded_functions = []
+ function_names = {}
+
+ for func in functions:
+ name = func.name
+
+ if name in function_names:
+ function_names[name].append(func)
+ else:
+ function_names[name] = [func]
+
+ for name, funcs in function_names.items():
+ if len(funcs) > 1:
+ overloaded_functions.append({
+ "name": name,
+ "overloads": len(funcs),
+ "file": funcs[0].file.file_path
+ if hasattr(funcs[0], "file")
+ else "Unknown",
+ })
+
+ overload_stats["total_overloaded_functions"] = len(overloaded_functions)
+ overload_stats["overloaded_function_percentage"] = (
+ len(overloaded_functions) / len(function_names) if function_names else 0
+ )
+ overload_stats["overloaded_functions"] = overloaded_functions
+
+ return overload_stats
+
+ def get_inheritance_hierarchy(self) -> dict[str, Any]:
+ """Get the inheritance hierarchy of classes."""
+ classes = list(self.codebase.classes)
+ hierarchy = {}
+
+ for cls in classes:
+ class_name = cls.name
+ parent_classes = []
+
+ # Get parent classes if available
+ if hasattr(cls, "parent_class_names"):
+ parent_classes = cls.parent_class_names
+
+ hierarchy[class_name] = {
+ "parent_classes": parent_classes,
+ "file": cls.file.file_path if hasattr(cls, "file") else "Unknown",
+ }
+
+ # Build inheritance tree
+ inheritance_tree = {}
+
+ for class_name, info in hierarchy.items():
+ if not info["parent_classes"]:
+ if class_name not in inheritance_tree:
+ inheritance_tree[class_name] = []
+ else:
+ for parent in info["parent_classes"]:
+ if parent not in inheritance_tree:
+ inheritance_tree[parent] = []
+ inheritance_tree[parent].append(class_name)
+
+ return {"class_hierarchy": hierarchy, "inheritance_tree": inheritance_tree}
+
+ def get_method_analysis(self) -> dict[str, Any]:
+ """Analyze class methods."""
+ classes = list(self.codebase.classes)
+ method_stats = {
+ "total_methods": 0,
+ "avg_methods_per_class": 0,
+ "classes_with_no_methods": 0,
+ "classes_with_many_methods": 0, # > 10 methods
+ "method_types": {"instance": 0, "static": 0, "class": 0, "property": 0},
+ }
+
+ if not classes:
+ return method_stats
+
+ total_methods = 0
+
+ for cls in classes:
+ methods = cls.methods if hasattr(cls, "methods") else []
+ method_count = len(methods)
+ total_methods += method_count
+
+ if method_count == 0:
+ method_stats["classes_with_no_methods"] += 1
+ elif method_count > 10:
+ method_stats["classes_with_many_methods"] += 1
+
+ # Analyze method types
+ for method in methods:
+ if hasattr(method, "is_static") and method.is_static:
+ method_stats["method_types"]["static"] += 1
+ elif hasattr(method, "is_class_method") and method.is_class_method:
+ method_stats["method_types"]["class"] += 1
+ elif hasattr(method, "is_property") and method.is_property:
+ method_stats["method_types"]["property"] += 1
+ else:
+ method_stats["method_types"]["instance"] += 1
+
+ method_stats["total_methods"] = total_methods
+ method_stats["avg_methods_per_class"] = (
+ total_methods / len(classes) if classes else 0
+ )
+
+ return method_stats
+
+ def get_attribute_analysis(self) -> dict[str, Any]:
+ """Analyze class attributes."""
+ classes = list(self.codebase.classes)
+ attribute_stats = {
+ "total_attributes": 0,
+ "avg_attributes_per_class": 0,
+ "classes_with_no_attributes": 0,
+ "classes_with_many_attributes": 0, # > 10 attributes
+ "attribute_types": {},
+ }
+
+ if not classes:
+ return attribute_stats
+
+ total_attributes = 0
+ attribute_types = {}
+
+ for cls in classes:
+ attributes = cls.attributes if hasattr(cls, "attributes") else []
+ attr_count = len(attributes)
+ total_attributes += attr_count
+
+ if attr_count == 0:
+ attribute_stats["classes_with_no_attributes"] += 1
+ elif attr_count > 10:
+ attribute_stats["classes_with_many_attributes"] += 1
+
+ # Analyze attribute types
+ for attr in attributes:
+ if hasattr(attr, "type") and attr.type:
+ attr_type = (
+ str(attr.type.source)
+ if hasattr(attr.type, "source")
+ else str(attr.type)
+ )
+
+ if attr_type in attribute_types:
+ attribute_types[attr_type] += 1
+ else:
+ attribute_types[attr_type] = 1
+
+ attribute_stats["total_attributes"] = total_attributes
+ attribute_stats["avg_attributes_per_class"] = (
+ total_attributes / len(classes) if classes else 0
+ )
+ attribute_stats["attribute_types"] = attribute_types
+
+ return attribute_stats
+
+ def get_constructor_analysis(self) -> dict[str, Any]:
+ """Analyze class constructors."""
+ classes = list(self.codebase.classes)
+ constructor_stats = {
+ "classes_with_constructor": 0,
+ "constructor_percentage": 0,
+ "avg_constructor_params": 0,
+ }
+
+ if not classes:
+ return constructor_stats
+
+ classes_with_constructor = 0
+ total_constructor_params = 0
+
+ for cls in classes:
+ constructor = None
+
+ # Find constructor
+ for method in cls.methods:
+ if hasattr(method, "is_constructor") and method.is_constructor:
+ constructor = method
+ break
+
+ if constructor:
+ classes_with_constructor += 1
+ param_count = (
+ len(constructor.parameters)
+ if hasattr(constructor, "parameters")
+ else 0
+ )
+ total_constructor_params += param_count
+
+ constructor_stats["classes_with_constructor"] = classes_with_constructor
+ constructor_stats["constructor_percentage"] = classes_with_constructor / len(
+ classes
+ )
+ constructor_stats["avg_constructor_params"] = (
+ total_constructor_params / classes_with_constructor
+ if classes_with_constructor
+ else 0
+ )
+
+ return constructor_stats
+
+ def get_interface_implementation_verification(self) -> dict[str, Any]:
+ """Verify interface implementations."""
+ classes = list(self.codebase.classes)
+ interfaces = list(self.codebase.interfaces)
+ implementation_stats = {
+ "total_interfaces": len(interfaces),
+ "classes_implementing_interfaces": 0,
+ "interface_implementations": {},
+ }
+
+ if not interfaces or not classes:
+ return implementation_stats
+
+ # Map interfaces to implementing classes
+ interface_implementations = {}
+
+ for interface in interfaces:
+ interface_name = interface.name
+ implementing_classes = []
+
+ for cls in classes:
+ if (
+ hasattr(cls, "parent_class_names")
+ and interface_name in cls.parent_class_names
+ ):
+ implementing_classes.append(cls.name)
+
+ interface_implementations[interface_name] = implementing_classes
+
+ # Count classes implementing interfaces
+ classes_implementing = set()
+ for implementers in interface_implementations.values():
+ classes_implementing.update(implementers)
+
+ implementation_stats["classes_implementing_interfaces"] = len(
+ classes_implementing
+ )
+ implementation_stats["interface_implementations"] = interface_implementations
+
+ return implementation_stats
+
+ def get_access_modifier_usage(self) -> dict[str, Any]:
+ """Analyze access modifier usage."""
+ symbols = list(self.codebase.symbols)
+ access_stats = {
+ "public": 0,
+ "private": 0,
+ "protected": 0,
+ "internal": 0,
+ "unknown": 0,
+ }
+
+ for symbol in symbols:
+ if hasattr(symbol, "is_private") and symbol.is_private:
+ access_stats["private"] += 1
+ elif hasattr(symbol, "is_protected") and symbol.is_protected:
+ access_stats["protected"] += 1
+ elif hasattr(symbol, "is_internal") and symbol.is_internal:
+ access_stats["internal"] += 1
+ elif hasattr(symbol, "is_public") and symbol.is_public:
+ access_stats["public"] += 1
+ else:
+ access_stats["unknown"] += 1
+
+ return access_stats
+
+ #
+ # Code Quality Analysis Methods
+ #
+
+ def get_unused_functions(self) -> list[dict[str, str]]:
+ """Get a list of unused functions."""
+ functions = list(self.codebase.functions)
+ unused_functions = []
+
+ for func in functions:
+ if hasattr(func, "call_sites") and len(func.call_sites) == 0:
+ # Skip special methods like __init__, __str__, etc.
+ if hasattr(func, "is_magic") and func.is_magic:
+ continue
+
+ # Skip entry points and main functions
+ if func.name in ["main", "__main__"]:
+ continue
+
+ unused_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ })
+
+ return unused_functions
+
+ def get_unused_classes(self) -> list[dict[str, str]]:
+ """Get a list of unused classes."""
+ classes = list(self.codebase.classes)
+ unused_classes = []
+
+ for cls in classes:
+ if hasattr(cls, "symbol_usages") and len(cls.symbol_usages) == 0:
+ unused_classes.append({
+ "name": cls.name,
+ "file": cls.file.file_path if hasattr(cls, "file") else "Unknown",
+ })
+
+ return unused_classes
+
+ def get_unused_variables(self) -> list[dict[str, str]]:
+ """Get a list of unused variables."""
+ global_vars = list(self.codebase.global_vars)
+ unused_vars = []
+
+ for var in global_vars:
+ if hasattr(var, "symbol_usages") and len(var.symbol_usages) == 0:
+ unused_vars.append({
+ "name": var.name,
+ "file": var.file.file_path if hasattr(var, "file") else "Unknown",
+ })
+
+ return unused_vars
+
+ def get_unused_imports(self) -> list[dict[str, str]]:
+ """Get a list of unused imports."""
+ files = list(self.codebase.files)
+ unused_imports = []
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ for imp in file.imports:
+ if hasattr(imp, "usages") and len(imp.usages) == 0:
+ unused_imports.append({
+ "file": file.file_path,
+ "import": imp.source,
+ })
+
+ return unused_imports
+
+ def get_similar_function_detection(self) -> list[dict[str, Any]]:
+ """Detect similar functions."""
+ functions = list(self.codebase.functions)
+ similar_functions = []
+
+ # Group functions by name
+ function_groups = {}
+
+ for func in functions:
+ name = func.name
+
+ if name in function_groups:
+ function_groups[name].append(func)
+ else:
+ function_groups[name] = [func]
+
+ # Find similar functions
+ for name, funcs in function_groups.items():
+ if len(funcs) > 1:
+ similar_functions.append({
+ "name": name,
+ "count": len(funcs),
+ "files": [
+ func.file.file_path if hasattr(func, "file") else "Unknown"
+ for func in funcs
+ ],
+ })
+
+ return similar_functions
+
+ def get_repeated_code_patterns(self) -> dict[str, Any]:
+ """Detect repeated code patterns."""
+ functions = list(self.codebase.functions)
+
+ # This is a simplified implementation that looks for functions with similar structure
+ # A more advanced implementation would use code clone detection algorithms
+
+ # Group functions by length (in lines)
+ functions_by_length = {}
+
+ for func in functions:
+ func_source = func.source
+ func_lines = func_source.count("\n") + 1
+
+ if func_lines in functions_by_length:
+ functions_by_length[func_lines].append(func)
+ else:
+ functions_by_length[func_lines] = [func]
+
+ # Find potential code clones (functions with same length)
+ potential_clones = {}
+
+ for length, funcs in functions_by_length.items():
+ if len(funcs) > 1:
+ potential_clones[length] = [func.name for func in funcs]
+
+ return {"potential_code_clones": potential_clones}
+
+ def get_refactoring_opportunities(self) -> dict[str, Any]:
+ """Identify refactoring opportunities."""
+ refactoring_opportunities = {
+ "long_functions": [],
+ "large_classes": [],
+ "high_coupling_files": [],
+ "low_cohesion_files": [],
+ }
+
+ # Find long functions
+ functions = list(self.codebase.functions)
+ for func in functions:
+ func_source = func.source
+ func_lines = func_source.count("\n") + 1
+
+ if func_lines > 50: # Threshold for long functions
+ refactoring_opportunities["long_functions"].append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ "lines": func_lines,
+ })
+
+ # Find large classes
+ classes = list(self.codebase.classes)
+ for cls in classes:
+ methods = cls.methods if hasattr(cls, "methods") else []
+ attributes = cls.attributes if hasattr(cls, "attributes") else []
+
+ if len(methods) + len(attributes) > 20: # Threshold for large classes
+ refactoring_opportunities["large_classes"].append({
+ "name": cls.name,
+ "file": cls.file.file_path if hasattr(cls, "file") else "Unknown",
+ "methods": len(methods),
+ "attributes": len(attributes),
+ })
+
+ # Find high coupling files
+ files = list(self.codebase.files)
+ for file in files:
+ if file.is_binary:
+ continue
+
+ imports = file.imports
+ if len(imports) > 15: # Threshold for high coupling
+ refactoring_opportunities["high_coupling_files"].append({
+ "file": file.file_path,
+ "imports": len(imports),
+ })
+
+ # Find low cohesion files
+ cohesion_metrics = self.get_module_cohesion_analysis()
+ file_cohesion = cohesion_metrics.get("file_cohesion", {})
+
+ for file_path, cohesion in file_cohesion.items():
+ if cohesion < 0.3: # Threshold for low cohesion
+ refactoring_opportunities["low_cohesion_files"].append({
+ "file": file_path,
+ "cohesion": cohesion,
+ })
+
+ return refactoring_opportunities
+
+ def calculate_cyclomatic_complexity(self) -> dict[str, Any]:
+ """Calculate cyclomatic complexity for functions."""
+ functions = list(self.codebase.functions)
+ complexity_results = {
+ "avg_complexity": 0,
+ "max_complexity": 0,
+ "complexity_distribution": {
+ "low": 0, # 1-5
+ "moderate": 0, # 6-10
+ "high": 0, # 11-20
+ "very_high": 0, # > 20
+ },
+ "complex_functions": [],
+ }
+
+ if not functions:
+ return complexity_results
+
+ total_complexity = 0
+ max_complexity = 0
+ complex_functions = []
+
+ for func in functions:
+ # A simple approximation of cyclomatic complexity
+ # In a real implementation, we would parse the AST and count decision points
+ source = func.source
+
+ # Count decision points
+ if_count = source.count("if ") + source.count("elif ")
+ for_count = source.count("for ")
+ while_count = source.count("while ")
+ case_count = (
+ source.count("case ") + source.count("switch ") + source.count("match ")
+ )
+ catch_count = source.count("catch ") + source.count("except ")
+ and_count = source.count(" && ") + source.count(" and ")
+ or_count = source.count(" || ") + source.count(" or ")
+
+ # Calculate complexity
+ complexity = (
+ 1
+ + if_count
+ + for_count
+ + while_count
+ + case_count
+ + catch_count
+ + and_count
+ + or_count
+ )
+
+ total_complexity += complexity
+ max_complexity = max(max_complexity, complexity)
+
+ # Categorize complexity
+ if complexity <= 5:
+ complexity_results["complexity_distribution"]["low"] += 1
+ elif complexity <= 10:
+ complexity_results["complexity_distribution"]["moderate"] += 1
+ elif complexity <= 20:
+ complexity_results["complexity_distribution"]["high"] += 1
+ else:
+ complexity_results["complexity_distribution"]["very_high"] += 1
+
+ # Track complex functions
+ if complexity > 10:
+ complex_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ "complexity": complexity,
+ })
+
+ complexity_results["avg_complexity"] = total_complexity / len(functions)
+ complexity_results["max_complexity"] = max_complexity
+ complexity_results["complex_functions"] = sorted(
+ complex_functions, key=lambda x: x["complexity"], reverse=True
+ )[:10] # Top 10 most complex
+
+ return complexity_results
+
+ def cc_rank(self) -> dict[str, str]:
+ """Rank the codebase based on cyclomatic complexity."""
+ complexity_results = self.calculate_cyclomatic_complexity()
+ avg_complexity = complexity_results["avg_complexity"]
+
+ if avg_complexity < 5:
+ rank = "A"
+ description = "Excellent: Low complexity, highly maintainable code"
+ elif avg_complexity < 10:
+ rank = "B"
+ description = "Good: Moderate complexity, maintainable code"
+ elif avg_complexity < 15:
+ rank = "C"
+ description = (
+ "Fair: Moderate to high complexity, some maintenance challenges"
+ )
+ elif avg_complexity < 20:
+ rank = "D"
+ description = "Poor: High complexity, difficult to maintain"
+ else:
+ rank = "F"
+ description = (
+ "Very Poor: Very high complexity, extremely difficult to maintain"
+ )
+
+ return {
+ "rank": rank,
+ "description": description,
+ "avg_complexity": avg_complexity,
+ }
+
+ def get_operators_and_operands(self) -> dict[str, Any]:
+ """Get operators and operands for Halstead metrics."""
+ files = list(self.codebase.files)
+
+ # Define common operators
+ operators = [
+ "+",
+ "-",
+ "*",
+ "/",
+ "%",
+ "=",
+ "==",
+ "!=",
+ "<",
+ ">",
+ "<=",
+ ">=",
+ "&&",
+ "||",
+ "!",
+ "&",
+ "|",
+ "^",
+ "~",
+ "<<",
+ ">>",
+ "++",
+ "--",
+ "+=",
+ "-=",
+ "*=",
+ "/=",
+ "%=",
+ "&=",
+ "|=",
+ "^=",
+ "<<=",
+ ">>=",
+ ]
+
+ # Count operators and operands
+ operator_count = {}
+ operand_count = {}
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ content = file.content
+
+ # Count operators
+ for op in operators:
+ count = content.count(op)
+ if count > 0:
+ if op in operator_count:
+ operator_count[op] += count
+ else:
+ operator_count[op] = count
+
+ # Simplified operand counting (this is a rough approximation)
+ # In a real implementation, we would parse the AST and extract identifiers
+ words = re.findall(r"\b[a-zA-Z_][a-zA-Z0-9_]*\b", content)
+ for word in words:
+ if word not in [
+ "if",
+ "else",
+ "for",
+ "while",
+ "return",
+ "break",
+ "continue",
+ "class",
+ "def",
+ "function",
+ "import",
+ "from",
+ "as",
+ "try",
+ "except",
+ "finally",
+ "with",
+ "in",
+ "is",
+ "not",
+ "and",
+ "or",
+ ]:
+ if word in operand_count:
+ operand_count[word] += 1
+ else:
+ operand_count[word] = 1
+
+ return {
+ "unique_operators": len(operator_count),
+ "total_operators": sum(operator_count.values()),
+ "unique_operands": len(operand_count),
+ "total_operands": sum(operand_count.values()),
+ "top_operators": dict(
+ sorted(operator_count.items(), key=lambda x: x[1], reverse=True)[:10]
+ ),
+ "top_operands": dict(
+ sorted(operand_count.items(), key=lambda x: x[1], reverse=True)[:10]
+ ),
+ }
+
+ def calculate_halstead_volume(self) -> dict[str, float]:
+ """Calculate Halstead volume metrics."""
+ operators_and_operands = self.get_operators_and_operands()
+
+ n1 = operators_and_operands["unique_operators"]
+ n2 = operators_and_operands["unique_operands"]
+ N1 = operators_and_operands["total_operators"]
+ N2 = operators_and_operands["total_operands"]
+
+ # Calculate Halstead metrics
+ vocabulary = n1 + n2
+ length = N1 + N2
+ volume = length * math.log2(vocabulary) if vocabulary > 0 else 0
+ difficulty = (n1 / 2) * (N2 / n2) if n2 > 0 else 0
+ effort = volume * difficulty
+ time = effort / 18 # Time in seconds (18 is a constant from empirical studies)
+ bugs = (
+ volume / 3000
+ ) # Estimated bugs (3000 is a constant from empirical studies)
+
+ return {
+ "vocabulary": vocabulary,
+ "length": length,
+ "volume": volume,
+ "difficulty": difficulty,
+ "effort": effort,
+ "time": time, # in seconds
+ "bugs": bugs,
+ }
+
+ def count_lines(self) -> dict[str, int]:
+ """Count lines of code."""
+ files = list(self.codebase.files)
+
+ total_lines = 0
+ code_lines = 0
+ comment_lines = 0
+ blank_lines = 0
+
+ for file in files:
+ if file.is_binary:
+ continue
+
+ content = file.content
+ lines = content.split("\n")
+
+ total_lines += len(lines)
+
+ for line in lines:
+ line = line.strip()
+
+ if not line:
+ blank_lines += 1
+ elif (
+ line.startswith("#")
+ or line.startswith("//")
+ or line.startswith("/*")
+ or line.startswith("*")
+ ):
+ comment_lines += 1
+ else:
+ code_lines += 1
+
+ return {
+ "total_lines": total_lines,
+ "code_lines": code_lines,
+ "comment_lines": comment_lines,
+ "blank_lines": blank_lines,
+ "comment_ratio": comment_lines / code_lines if code_lines > 0 else 0,
+ }
+
+ def calculate_maintainability_index(self) -> dict[str, float]:
+ """Calculate maintainability index."""
+ halstead = self.calculate_halstead_volume()
+ complexity = self.calculate_cyclomatic_complexity()
+ lines = self.count_lines()
+
+ # Calculate maintainability index
+ # MI = 171 - 5.2 * ln(V) - 0.23 * CC - 16.2 * ln(LOC)
+ volume = halstead["volume"]
+ avg_complexity = complexity["avg_complexity"]
+ loc = lines["code_lines"]
+
+ mi = (
+ 171 - 5.2 * math.log(volume) - 0.23 * avg_complexity - 16.2 * math.log(loc)
+ if volume > 0 and loc > 0
+ else 0
+ )
+
+ # Normalize to 0-100 scale
+ normalized_mi = max(0, min(100, mi * 100 / 171))
+
+ return {
+ "maintainability_index": mi,
+ "normalized_maintainability_index": normalized_mi,
+ }
+
+ def get_maintainability_rank(self) -> dict[str, str]:
+ """Rank the codebase based on maintainability index."""
+ mi = self.calculate_maintainability_index()["normalized_maintainability_index"]
+
+ if mi >= 85:
+ rank = "A"
+ description = "Highly maintainable"
+ elif mi >= 65:
+ rank = "B"
+ description = "Maintainable"
+ elif mi >= 40:
+ rank = "C"
+ description = "Moderately maintainable"
+ elif mi >= 20:
+ rank = "D"
+ description = "Difficult to maintain"
+ else:
+ rank = "F"
+ description = "Very difficult to maintain"
+
+ return {"rank": rank, "description": description, "maintainability_index": mi}
+
+ def get_cognitive_complexity(self) -> dict[str, Any]:
+ """Calculate cognitive complexity for functions."""
+ functions = list(self.codebase.functions)
+ complexity_results = {
+ "avg_complexity": 0,
+ "max_complexity": 0,
+ "complexity_distribution": {
+ "low": 0, # 0-5
+ "moderate": 0, # 6-10
+ "high": 0, # 11-20
+ "very_high": 0, # > 20
+ },
+ "complex_functions": [],
+ }
+
+ if not functions:
+ return complexity_results
+
+ total_complexity = 0
+ max_complexity = 0
+ complex_functions = []
+
+ for func in functions:
+ # A simple approximation of cognitive complexity
+ # In a real implementation, we would parse the AST and analyze control flow
+ source = func.source
+
+ # Count decision points with nesting
+ nesting_level = 0
+ cognitive_complexity = 0
+
+ lines = source.split("\n")
+ for line in lines:
+ line = line.strip()
+
+ # Increase nesting level
+ if re.search(r"\b(if|for|while|switch|case|catch|try)\b", line):
+ cognitive_complexity += 1 + nesting_level
+ nesting_level += 1
+
+ # Decrease nesting level
+ if line.startswith("}") or line.endswith(":"):
+ nesting_level = max(0, nesting_level - 1)
+
+ # Add complexity for boolean operators
+ cognitive_complexity += line.count(" && ") + line.count(" and ")
+ cognitive_complexity += line.count(" || ") + line.count(" or ")
+
+ # Add complexity for jumps
+ if re.search(r"\b(break|continue|goto|return)\b", line):
+ cognitive_complexity += 1
+
+ total_complexity += cognitive_complexity
+ max_complexity = max(max_complexity, cognitive_complexity)
+
+ # Categorize complexity
+ if cognitive_complexity <= 5:
+ complexity_results["complexity_distribution"]["low"] += 1
+ elif cognitive_complexity <= 10:
+ complexity_results["complexity_distribution"]["moderate"] += 1
+ elif cognitive_complexity <= 20:
+ complexity_results["complexity_distribution"]["high"] += 1
+ else:
+ complexity_results["complexity_distribution"]["very_high"] += 1
+
+ # Track complex functions
+ if cognitive_complexity > 10:
+ complex_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ "complexity": cognitive_complexity,
+ })
+
+ complexity_results["avg_complexity"] = total_complexity / len(functions)
+ complexity_results["max_complexity"] = max_complexity
+ complexity_results["complex_functions"] = sorted(
+ complex_functions, key=lambda x: x["complexity"], reverse=True
+ )[:10] # Top 10 most complex
+
+ return complexity_results
+
+ def get_nesting_depth_analysis(self) -> dict[str, Any]:
+ """Analyze nesting depth in functions."""
+ functions = list(self.codebase.functions)
+ nesting_results = {
+ "avg_max_nesting": 0,
+ "max_nesting": 0,
+ "nesting_distribution": {
+ "low": 0, # 0-2
+ "moderate": 0, # 3-4
+ "high": 0, # 5-6
+ "very_high": 0, # > 6
+ },
+ "deeply_nested_functions": [],
+ }
+
+ if not functions:
+ return nesting_results
+
+ total_max_nesting = 0
+ max_nesting_overall = 0
+ deeply_nested_functions = []
+
+ for func in functions:
+ source = func.source
+ lines = source.split("\n")
+
+ max_nesting = 0
+ current_nesting = 0
+
+ for line in lines:
+ line = line.strip()
+
+ # Increase nesting level
+ if re.search(
+ r"\b(if|for|while|switch|case|catch|try)\b", line
+ ) and not line.startswith("}"):
+ current_nesting += 1
+ max_nesting = max(max_nesting, current_nesting)
+
+ # Decrease nesting level
+ if line.startswith("}"):
+ current_nesting = max(0, current_nesting - 1)
+
+ total_max_nesting += max_nesting
+ max_nesting_overall = max(max_nesting_overall, max_nesting)
+
+ # Categorize nesting
+ if max_nesting <= 2:
+ nesting_results["nesting_distribution"]["low"] += 1
+ elif max_nesting <= 4:
+ nesting_results["nesting_distribution"]["moderate"] += 1
+ elif max_nesting <= 6:
+ nesting_results["nesting_distribution"]["high"] += 1
+ else:
+ nesting_results["nesting_distribution"]["very_high"] += 1
+
+ # Track deeply nested functions
+ if max_nesting > 4:
+ deeply_nested_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ "max_nesting": max_nesting,
+ })
+
+ nesting_results["avg_max_nesting"] = total_max_nesting / len(functions)
+ nesting_results["max_nesting"] = max_nesting_overall
+ nesting_results["deeply_nested_functions"] = sorted(
+ deeply_nested_functions, key=lambda x: x["max_nesting"], reverse=True
+ )[:10] # Top 10 most nested
+
+ return nesting_results
+
+ def get_function_size_metrics(self) -> dict[str, Any]:
+ """Get function size metrics."""
+ functions = list(self.codebase.functions)
+ size_metrics = {
+ "avg_function_length": 0,
+ "max_function_length": 0,
+ "function_size_distribution": {
+ "small": 0, # < 10 lines
+ "medium": 0, # 10-30 lines
+ "large": 0, # 30-100 lines
+ "very_large": 0, # > 100 lines
+ },
+ "largest_functions": [],
+ }
+
+ if not functions:
+ return size_metrics
+
+ total_length = 0
+ max_length = 0
+ largest_functions = []
+
+ for func in functions:
+ func_source = func.source
+ func_lines = func_source.count("\n") + 1
+
+ total_length += func_lines
+ max_length = max(max_length, func_lines)
+
+ # Categorize by size
+ if func_lines < 10:
+ size_metrics["function_size_distribution"]["small"] += 1
+ elif func_lines < 30:
+ size_metrics["function_size_distribution"]["medium"] += 1
+ elif func_lines < 100:
+ size_metrics["function_size_distribution"]["large"] += 1
+ else:
+ size_metrics["function_size_distribution"]["very_large"] += 1
+
+ # Track large functions
+ if func_lines > 30:
+ largest_functions.append({
+ "name": func.name,
+ "file": func.file.file_path if hasattr(func, "file") else "Unknown",
+ "lines": func_lines,
+ })
+
+ size_metrics["avg_function_length"] = total_length / len(functions)
+ size_metrics["max_function_length"] = max_length
+ size_metrics["largest_functions"] = sorted(
+ largest_functions, key=lambda x: x["lines"], reverse=True
+ )[:10] # Top 10 largest
+
+ return size_metrics
+
+ #
+ # Visualization and Output Methods
+ #
+
+ def _generate_html_report(self, output_file: str) -> None:
+ """Generate an HTML report of the analysis results."""
+ if not output_file:
+ output_file = "codebase_analysis_report.html"
+
+ # Simple HTML template
+ html = f"""
+
+
+
+ Codebase Analysis Report
+
+
+
+ Codebase Analysis Report
+
+
Metadata
+
Repository: {self.results["metadata"]["repo_name"]}
+
Analysis Time: {self.results["metadata"]["analysis_time"]}
+
Language: {self.results["metadata"]["language"]}
+
+ """
+
+ # Add each category
+ for category, metrics in self.results["categories"].items():
+ html += f"""
+
+
{category.replace("_", " ").title()}
+ """
+
+ for metric_name, metric_value in metrics.items():
+ html += f"""
+
+
{metric_name.replace("_", " ").title()}
+
{json.dumps(metric_value, indent=2)}
+
+ """
+
+ html += "
"
+
+ html += """
+
+
+ """
+
+ with open(output_file, "w") as f:
+ f.write(html)
+
+ self.console.print(
+ f"[bold green]HTML report saved to {output_file}[/bold green]"
+ )
+
+ def _print_console_report(self) -> None:
+ """Print a summary report to the console."""
+ self.console.print(
+ f"[bold blue]Codebase Analysis Report for {self.results['metadata']['repo_name']}[/bold blue]"
+ )
+ self.console.print(
+ f"[bold]Analysis Time:[/bold] {self.results['metadata']['analysis_time']}"
+ )
+ self.console.print(
+ f"[bold]Language:[/bold] {self.results['metadata']['language']}"
+ )
+
+ for category, metrics in self.results["categories"].items():
+ self.console.print(
+ f"\n[bold green]{category.replace('_', ' ').title()}[/bold green]"
+ )
+
+ for metric_name, metric_value in metrics.items():
+ self.console.print(
+ f"[bold]{metric_name.replace('_', ' ').title()}:[/bold]"
+ )
+
+ if isinstance(metric_value, dict):
+ table = Table(show_header=True)
+ table.add_column("Key")
+ table.add_column("Value")
+
+ for k, v in metric_value.items():
+ if isinstance(v, dict):
+ table.add_row(k, str(v))
+ else:
+ table.add_row(str(k), str(v))
+
+ self.console.print(table)
+ elif isinstance(metric_value, list):
+ if len(metric_value) > 0 and isinstance(metric_value[0], dict):
+ if len(metric_value) > 0:
+ table = Table(show_header=True)
+ for key in metric_value[0]:
+ table.add_column(key)
+
+ for item in metric_value[:10]: # Show only first 10 items
+ table.add_row(*[str(v) for v in item.values()])
+
+ self.console.print(table)
+ if len(metric_value) > 10:
+ self.console.print(
+ f"... and {len(metric_value) - 10} more items"
+ )
+ else:
+ self.console.print(str(metric_value))
+ else:
+ self.console.print(str(metric_value))
+
+ def get_monthly_commits(self) -> dict[str, int]:
+ """Get the number of commits per month."""
+ try:
+ # Get commit history
+ commits = list(self.codebase.github.repo.get_commits())
+
+ # Group commits by month
+ commits_by_month = {}
+
+ for commit in commits:
+ date = commit.commit.author.date
+ month_key = f"{date.year}-{date.month:02d}"
+
+ if month_key in commits_by_month:
+ commits_by_month[month_key] += 1
+ else:
+ commits_by_month[month_key] = 1
+
+ # Sort by month
+ sorted_commits = dict(sorted(commits_by_month.items()))
+
+ return sorted_commits
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def main():
+ """Main entry point for the codebase analyzer."""
+ parser = argparse.ArgumentParser(description="Comprehensive Codebase Analyzer")
+
+ # Repository source
+ source_group = parser.add_mutually_exclusive_group(required=True)
+ source_group.add_argument("--repo-url", help="URL of the repository to analyze")
+ source_group.add_argument(
+ "--repo-path", help="Local path to the repository to analyze"
+ )
+
+ # Analysis options
+ parser.add_argument(
+ "--language",
+ help="Programming language of the codebase (auto-detected if not provided)",
+ )
+ parser.add_argument(
+ "--categories", nargs="+", help="Categories to analyze (default: all)"
+ )
+
+ # Output options
+ parser.add_argument(
+ "--output-format",
+ choices=["json", "html", "console"],
+ default="console",
+ help="Output format",
+ )
+ parser.add_argument("--output-file", help="Path to the output file")
+
+ args = parser.parse_args()
+
+ try:
+ # Initialize the analyzer
+ analyzer = CodebaseAnalyzer(
+ repo_url=args.repo_url, repo_path=args.repo_path, language=args.language
+ )
+
+ # Perform the analysis
+ analyzer.analyze(
+ categories=args.categories,
+ output_format=args.output_format,
+ output_file=args.output_file,
+ )
+
+ # Print success message
+ if args.output_format == "json" and args.output_file:
+ print(f"Analysis results saved to {args.output_file}")
+ elif args.output_format == "html":
+ print(
+ f"HTML report saved to {args.output_file or 'codebase_analysis_report.html'}"
+ )
+
+ except Exception as e:
+ print(f"Error: {e}")
+ import traceback
+
+ traceback.print_exc()
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/__init__.py b/codegen-on-oss/codegen_on_oss/analyzers/__init__.py
new file mode 100644
index 000000000..467d14b0f
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/__init__.py
@@ -0,0 +1,132 @@
+"""
+Codebase Analysis Module
+
+This package provides comprehensive codebase analysis tools for static code analysis,
+quality checking, dependency analysis, and PR validation. It's designed to be used
+as an API backend for frontend applications.
+"""
+
+# Modern analyzer architecture
+from codegen_on_oss.analyzers.analyzer import (
+ AnalyzerManager,
+ AnalyzerPlugin,
+ AnalyzerRegistry,
+ CodeQualityPlugin,
+ DependencyPlugin,
+)
+# Main API interface
+from codegen_on_oss.analyzers.api import (
+ CodegenAnalyzerAPI,
+ api_analyze_codebase,
+ api_analyze_pr,
+ api_get_static_errors,
+ api_get_visualization,
+ create_api,
+)
+
+# Legacy analyzer interfaces (for backward compatibility)
+from codegen_on_oss.analyzers.base_analyzer import BaseCodeAnalyzer
+
+# Core analysis modules
+from codegen_on_oss.analyzers.code_quality import CodeQualityAnalyzer
+from codegen_on_oss.analyzers.codebase_analysis import (
+ get_class_summary,
+ get_codebase_summary,
+ get_dependency_graph,
+ get_file_complexity_metrics,
+ get_file_summary,
+ get_function_summary,
+ get_symbol_references,
+ get_symbol_summary,
+)
+from codegen_on_oss.analyzers.codebase_analyzer import CodebaseAnalyzer
+from codegen_on_oss.analyzers.dependencies import DependencyAnalyzer
+# Diff tracking
+from codegen_on_oss.analyzers.diff_lite import ChangeType, DiffLite
+from codegen_on_oss.analyzers.error_analyzer import CodebaseAnalyzer as ErrorAnalyzer
+
+# Issue tracking system
+from codegen_on_oss.analyzers.issues import (
+ AnalysisType,
+ CodeLocation,
+ Issue,
+ IssueCategory,
+ IssueCollection,
+ IssueSeverity,
+)
+# Analysis result models
+from codegen_on_oss.analyzers.models.analysis_result import (
+ AnalysisResult,
+ CodeQualityResult,
+ DependencyResult,
+ PrAnalysisResult,
+)
+# Parser module
+from codegen_on_oss.analyzers.parser import (
+ ASTNode,
+ BaseParser,
+ CodegenParser,
+ JavaScriptParser,
+ PythonParser,
+ TypeScriptParser,
+ create_parser,
+ parse_code,
+ parse_file,
+)
+
+__all__ = [
+ # Main API
+ "CodegenAnalyzerAPI",
+ "create_api",
+ "api_analyze_codebase",
+ "api_analyze_pr",
+ "api_get_visualization",
+ "api_get_static_errors",
+ # Modern architecture
+ "AnalyzerManager",
+ "AnalyzerPlugin",
+ "AnalyzerRegistry",
+ "CodeQualityPlugin",
+ "DependencyPlugin",
+ # Issue tracking
+ "Issue",
+ "IssueCollection",
+ "IssueSeverity",
+ "AnalysisType",
+ "IssueCategory",
+ "CodeLocation",
+ # Analysis results
+ "AnalysisResult",
+ "CodeQualityResult",
+ "DependencyResult",
+ "PrAnalysisResult",
+ # Core analyzers
+ "CodeQualityAnalyzer",
+ "DependencyAnalyzer",
+ # Codebase analysis utilities
+ "get_codebase_summary",
+ "get_file_summary",
+ "get_class_summary",
+ "get_function_summary",
+ "get_symbol_summary",
+ "get_dependency_graph",
+ "get_symbol_references",
+ "get_file_complexity_metrics",
+ # Diff tracking
+ "ChangeType",
+ "DiffLite",
+ # Legacy interfaces (for backward compatibility)
+ "BaseCodeAnalyzer",
+ "CodebaseAnalyzer",
+ "ErrorAnalyzer",
+ # Parser module
+ "ASTNode",
+ "BaseParser",
+ "CodegenParser",
+ "JavaScriptParser",
+ "PythonParser",
+ "TypeScriptParser",
+ "create_parser",
+ "parse_code",
+ "parse_file",
+]
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/analysis_result.py b/codegen-on-oss/codegen_on_oss/analyzers/analysis_result.py
new file mode 100644
index 000000000..140bb73ae
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/analysis_result.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python3
+"""
+Analysis Result Model
+
+This module defines data models for analysis results, providing a standardized
+way to represent and serialize analysis outcomes.
+"""
+
+import json
+from dataclasses import asdict, dataclass, field
+from datetime import datetime
+from typing import Any
+
+from codegen_on_oss.analyzers.issues import AnalysisType, IssueCollection
+
+
+@dataclass
+class AnalysisSummary:
+ """Summary statistics for an analysis."""
+
+ total_files: int = 0
+ total_functions: int = 0
+ total_classes: int = 0
+ total_issues: int = 0
+ analysis_time: str = field(default_factory=lambda: datetime.now().isoformat())
+ analysis_duration_ms: int | None = None
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return {k: v for k, v in asdict(self).items() if v is not None}
+
+
+@dataclass
+class CodeQualityResult:
+ """Results of code quality analysis."""
+
+ dead_code: dict[str, Any] = field(default_factory=dict)
+ complexity: dict[str, Any] = field(default_factory=dict)
+ parameter_issues: dict[str, Any] = field(default_factory=dict)
+ style_issues: dict[str, Any] = field(default_factory=dict)
+ implementation_issues: dict[str, Any] = field(default_factory=dict)
+ maintainability: dict[str, Any] = field(default_factory=dict)
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return dict(asdict(self).items())
+
+
+@dataclass
+class DependencyResult:
+ """Results of dependency analysis."""
+
+ import_dependencies: dict[str, Any] = field(default_factory=dict)
+ circular_dependencies: dict[str, Any] = field(default_factory=dict)
+ module_coupling: dict[str, Any] = field(default_factory=dict)
+ external_dependencies: dict[str, Any] = field(default_factory=dict)
+ call_graph: dict[str, Any] = field(default_factory=dict)
+ class_hierarchy: dict[str, Any] = field(default_factory=dict)
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return dict(asdict(self).items())
+
+
+@dataclass
+class PrAnalysisResult:
+ """Results of PR analysis."""
+
+ modified_symbols: list[dict[str, Any]] = field(default_factory=list)
+ added_symbols: list[dict[str, Any]] = field(default_factory=list)
+ removed_symbols: list[dict[str, Any]] = field(default_factory=list)
+ signature_changes: list[dict[str, Any]] = field(default_factory=list)
+ impact: dict[str, Any] = field(default_factory=dict)
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return dict(asdict(self).items())
+
+
+@dataclass
+class SecurityResult:
+ """Results of security analysis."""
+
+ vulnerabilities: list[dict[str, Any]] = field(default_factory=list)
+ secrets: list[dict[str, Any]] = field(default_factory=list)
+ injection_risks: list[dict[str, Any]] = field(default_factory=list)
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return dict(asdict(self).items())
+
+
+@dataclass
+class PerformanceResult:
+ """Results of performance analysis."""
+
+ bottlenecks: list[dict[str, Any]] = field(default_factory=list)
+ optimization_opportunities: list[dict[str, Any]] = field(default_factory=list)
+ memory_issues: list[dict[str, Any]] = field(default_factory=list)
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return dict(asdict(self).items())
+
+
+@dataclass
+class MetadataEntry:
+ """Metadata about an analysis."""
+
+ key: str
+ value: Any
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return {"key": self.key, "value": self.value}
+
+
+@dataclass
+class AnalysisResult:
+ """Comprehensive analysis result."""
+
+ # Core data
+ analysis_types: list[AnalysisType]
+ summary: AnalysisSummary = field(default_factory=AnalysisSummary)
+ issues: IssueCollection = field(default_factory=IssueCollection)
+
+ # Analysis results
+ code_quality: CodeQualityResult | None = None
+ dependencies: DependencyResult | None = None
+ pr_analysis: PrAnalysisResult | None = None
+ security: SecurityResult | None = None
+ performance: PerformanceResult | None = None
+
+ # Metadata
+ metadata: dict[str, Any] = field(default_factory=dict)
+ repo_name: str | None = None
+ repo_path: str | None = None
+ language: str | None = None
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ result = {
+ "analysis_types": [at.value for at in self.analysis_types],
+ "summary": self.summary.to_dict(),
+ "issues": self.issues.to_dict(),
+ "metadata": self.metadata,
+ }
+
+ # Add optional sections if present
+ if self.repo_name:
+ result["repo_name"] = self.repo_name
+
+ if self.repo_path:
+ result["repo_path"] = self.repo_path
+
+ if self.language:
+ result["language"] = self.language
+
+ # Add analysis results if present
+ if self.code_quality:
+ result["code_quality"] = self.code_quality.to_dict()
+
+ if self.dependencies:
+ result["dependencies"] = self.dependencies.to_dict()
+
+ if self.pr_analysis:
+ result["pr_analysis"] = self.pr_analysis.to_dict()
+
+ if self.security:
+ result["security"] = self.security.to_dict()
+
+ if self.performance:
+ result["performance"] = self.performance.to_dict()
+
+ return result
+
+ def save_to_file(self, file_path: str, indent: int = 2):
+ """
+ Save analysis result to a file.
+
+ Args:
+ file_path: Path to save to
+ indent: JSON indentation level
+ """
+ with open(file_path, "w") as f:
+ json.dump(self.to_dict(), f, indent=indent)
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "AnalysisResult":
+ """
+ Create analysis result from dictionary.
+
+ Args:
+ data: Dictionary representation
+
+ Returns:
+ Analysis result object
+ """
+ # Convert analysis types
+ analysis_types = [
+ AnalysisType(at) if isinstance(at, str) else at
+ for at in data.get("analysis_types", [])
+ ]
+
+ # Create summary
+ summary = (
+ AnalysisSummary(**data.get("summary", {}))
+ if "summary" in data
+ else AnalysisSummary()
+ )
+
+ # Create issues collection
+ issues = (
+ IssueCollection.from_dict(data.get("issues", {}))
+ if "issues" in data
+ else IssueCollection()
+ )
+
+ # Create result object
+ result = cls(
+ analysis_types=analysis_types,
+ summary=summary,
+ issues=issues,
+ repo_name=data.get("repo_name"),
+ repo_path=data.get("repo_path"),
+ language=data.get("language"),
+ metadata=data.get("metadata", {}),
+ )
+
+ # Add analysis results if present
+ if "code_quality" in data:
+ result.code_quality = CodeQualityResult(**data["code_quality"])
+
+ if "dependencies" in data:
+ result.dependencies = DependencyResult(**data["dependencies"])
+
+ if "pr_analysis" in data:
+ result.pr_analysis = PrAnalysisResult(**data["pr_analysis"])
+
+ if "security" in data:
+ result.security = SecurityResult(**data["security"])
+
+ if "performance" in data:
+ result.performance = PerformanceResult(**data["performance"])
+
+ return result
+
+ @classmethod
+ def load_from_file(cls, file_path: str) -> "AnalysisResult":
+ """
+ Load analysis result from file.
+
+ Args:
+ file_path: Path to load from
+
+ Returns:
+ Analysis result object
+ """
+ with open(file_path) as f:
+ data = json.load(f)
+
+ return cls.from_dict(data)
+
+ def get_issue_count(
+ self, severity: str | None = None, category: str | None = None
+ ) -> int:
+ """
+ Get count of issues matching criteria.
+
+ Args:
+ severity: Optional severity to filter by
+ category: Optional category to filter by
+
+ Returns:
+ Count of matching issues
+ """
+ issues_dict = self.issues.to_dict()
+
+ if severity and category:
+ # Count issues with specific severity and category
+ return sum(
+ 1
+ for issue in issues_dict.get("issues", [])
+ if issue.get("severity") == severity
+ and issue.get("category") == category
+ )
+ elif severity:
+ # Count issues with specific severity
+ return (
+ issues_dict.get("statistics", {})
+ .get("by_severity", {})
+ .get(severity, 0)
+ )
+ elif category:
+ # Count issues with specific category
+ return (
+ issues_dict.get("statistics", {})
+ .get("by_category", {})
+ .get(category, 0)
+ )
+ else:
+ # Total issues
+ return issues_dict.get("statistics", {}).get("total", 0)
+
+ def merge(self, other: "AnalysisResult") -> "AnalysisResult":
+ """
+ Merge with another analysis result.
+
+ Args:
+ other: Analysis result to merge with
+
+ Returns:
+ New merged analysis result
+ """
+ # Create new result with combined analysis types
+ merged = AnalysisResult(
+ analysis_types=list(set(self.analysis_types + other.analysis_types)),
+ repo_name=self.repo_name or other.repo_name,
+ repo_path=self.repo_path or other.repo_path,
+ language=self.language or other.language,
+ )
+
+ # Merge issues
+ merged.issues.add_issues(self.issues.issues)
+ merged.issues.add_issues(other.issues.issues)
+
+ # Merge metadata
+ merged.metadata = {**self.metadata, **other.metadata}
+
+ # Merge analysis results (take non-None values)
+ merged.code_quality = self.code_quality or other.code_quality
+ merged.dependencies = self.dependencies or other.dependencies
+ merged.pr_analysis = self.pr_analysis or other.pr_analysis
+ merged.security = self.security or other.security
+ merged.performance = self.performance or other.performance
+
+ # Update summary
+ merged.summary = AnalysisSummary(
+ total_files=max(self.summary.total_files, other.summary.total_files),
+ total_functions=max(
+ self.summary.total_functions, other.summary.total_functions
+ ),
+ total_classes=max(self.summary.total_classes, other.summary.total_classes),
+ total_issues=len(merged.issues.issues),
+ analysis_time=datetime.now().isoformat(),
+ )
+
+ return merged
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/api.py b/codegen-on-oss/codegen_on_oss/analyzers/api.py
new file mode 100644
index 000000000..c2fa25779
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/api.py
@@ -0,0 +1,783 @@
+#!/usr/bin/env python3
+"""
+Analyzer API Module
+
+This module provides the API interface for the codegit-on-git frontend to interact
+with the codebase analysis backend. It handles requests for analysis, visualization,
+and data export.
+"""
+
+import logging
+from typing import Any
+
+# Import analyzer components
+from codegen_on_oss.analyzers.analyzer import AnalyzerManager
+from codegen_on_oss.analyzers.issues import (
+ AnalysisType,
+ IssueCategory,
+ IssueSeverity,
+)
+from codegen_on_oss.analyzers.visualization import (
+ Visualizer,
+)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class CodegenAnalyzerAPI:
+ """
+ Backend API for codegit-on-git.
+
+ This class provides a unified interface for the frontend to interact with
+ the codebase analysis backend, including analysis, visualization, and data export.
+ """
+
+ def __init__(self, repo_path: str | None = None, repo_url: str | None = None):
+ """
+ Initialize the API with a repository.
+
+ Args:
+ repo_path: Local path to the repository
+ repo_url: URL of the repository
+ """
+ # Initialize analyzer
+ self.analyzer = AnalyzerManager(repo_path=repo_path, repo_url=repo_url)
+
+ # Initialize visualizer when needed
+ self._visualizer = None
+
+ # Cache for analysis results
+ self._analysis_cache = {}
+
+ @property
+ def visualizer(self) -> Visualizer:
+ """Get or initialize visualizer."""
+ if self._visualizer is None:
+ self._visualizer = Visualizer()
+ return self._visualizer
+
+ def analyze_codebase(
+ self,
+ analysis_types: list[str | AnalysisType] | None = None,
+ force_refresh: bool = False,
+ ) -> dict[str, Any]:
+ """
+ Analyze the entire codebase.
+
+ Args:
+ analysis_types: Types of analysis to perform
+ force_refresh: Whether to force a refresh of the analysis
+
+ Returns:
+ Analysis results
+ """
+ cache_key = str(analysis_types) if analysis_types else "default"
+
+ # Check cache first
+ if not force_refresh and cache_key in self._analysis_cache:
+ return self._analysis_cache[cache_key]
+
+ # Run analysis
+ results = self.analyzer.analyze(analysis_types=analysis_types)
+
+ # Cache results
+ self._analysis_cache[cache_key] = results
+
+ return results
+
+ def analyze_pr(
+ self,
+ pr_number: int,
+ analysis_types: list[str | AnalysisType] | None = None,
+ force_refresh: bool = False,
+ ) -> dict[str, Any]:
+ """
+ Analyze a specific PR.
+
+ Args:
+ pr_number: PR number to analyze
+ analysis_types: Types of analysis to perform
+ force_refresh: Whether to force a refresh of the analysis
+
+ Returns:
+ Analysis results
+ """
+ cache_key = f"pr_{pr_number}_{analysis_types!s}"
+
+ # Check cache first
+ if not force_refresh and cache_key in self._analysis_cache:
+ return self._analysis_cache[cache_key]
+
+ # Set PR number
+ self.analyzer.pr_number = pr_number
+
+ # Use default analysis types if none provided
+ if analysis_types is None:
+ analysis_types = ["pr", "code_quality"]
+
+ # Run analysis
+ results = self.analyzer.analyze(analysis_types=analysis_types)
+
+ # Cache results
+ self._analysis_cache[cache_key] = results
+
+ return results
+
+ def get_issues(
+ self,
+ severity: str | IssueSeverity | None = None,
+ category: str | IssueCategory | None = None,
+ ) -> list[dict[str, Any]]:
+ """
+ Get issues matching criteria.
+
+ Args:
+ severity: Issue severity to filter by
+ category: Issue category to filter by
+
+ Returns:
+ List of matching issues
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase()
+
+ # Convert string severity to enum if needed
+ if isinstance(severity, str):
+ severity = IssueSeverity(severity)
+
+ # Convert string category to enum if needed
+ if isinstance(category, str):
+ category = IssueCategory(category)
+
+ # Get issues
+ issues = self.analyzer.get_issues(severity=severity, category=category)
+
+ # Convert to dictionaries
+ return [issue.to_dict() for issue in issues]
+
+ def find_symbol(self, symbol_name: str) -> dict[str, Any] | None:
+ """
+ Find a specific symbol in the codebase.
+
+ Args:
+ symbol_name: Name of the symbol to find
+
+ Returns:
+ Symbol information if found, None otherwise
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase()
+
+ # Get symbol
+ symbol = self.analyzer.base_codebase.get_symbol(symbol_name)
+
+ if symbol:
+ # Convert to dictionary
+ return self._symbol_to_dict(symbol)
+
+ return None
+
+ def get_module_dependencies(
+ self,
+ module_path: str | None = None,
+ layout: str = "hierarchical",
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Get module dependencies.
+
+ Args:
+ module_path: Path to the module to analyze
+ layout: Layout algorithm to use
+ output_format: Output format
+
+ Returns:
+ Module dependency visualization
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["dependency"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_module_dependency_graph(
+ codebase_context=self.analyzer.base_context,
+ module_path=module_path,
+ layout=layout,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def generate_dependency_graph(
+ self,
+ repo_path: str | None = None,
+ module_path: str | None = None,
+ layout: str = "hierarchical",
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Generate a dependency graph for the codebase.
+
+ Args:
+ repo_path: Path to the repository (optional, uses self.repo_path if not provided)
+ module_path: Path to the specific module to analyze (optional)
+ layout: Graph layout algorithm (hierarchical, force, circular)
+ output_format: Output format (json, dot, graphml)
+
+ Returns:
+ Dictionary containing the dependency graph data
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["dependency"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_module_dependency_graph(
+ codebase_context=self.analyzer.base_context,
+ module_path=module_path,
+ layout=layout,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def get_function_call_graph(
+ self,
+ function_name: str | list[str],
+ depth: int = 2,
+ layout: str = "hierarchical",
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Get function call graph.
+
+ Args:
+ function_name: Name of the function(s) to analyze
+ depth: Maximum depth of the call graph
+ layout: Layout algorithm to use
+ output_format: Output format
+
+ Returns:
+ Function call graph visualization
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_function_call_graph(
+ functions=function_name,
+ codebase_context=self.analyzer.base_context,
+ depth=depth,
+ layout=layout,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def generate_call_graph(
+ self,
+ function_name: str | None = None,
+ file_path: str | None = None,
+ depth: int = 2,
+ layout: str = "hierarchical",
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Generate a call graph for a specific function or file.
+
+ Args:
+ function_name: Name of the function to analyze
+ file_path: Path to the file containing the function
+ depth: Maximum depth of the call graph
+ layout: Graph layout algorithm (hierarchical, force, circular)
+ output_format: Output format (json, dot, graphml)
+
+ Returns:
+ Dictionary containing the call graph data
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_function_call_graph(
+ functions=function_name,
+ codebase_context=self.analyzer.base_context,
+ depth=depth,
+ layout=layout,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def get_pr_impact(
+ self,
+ pr_number: int | None = None,
+ layout: str = "force",
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Get PR impact visualization.
+
+ Args:
+ pr_number: PR number to analyze
+ layout: Layout algorithm to use
+ output_format: Output format
+
+ Returns:
+ PR impact visualization
+ """
+ # Analyze PR if needed
+ if pr_number is not None:
+ self.analyze_pr(pr_number, analysis_types=["pr"])
+ elif self.analyzer.pr_number is None:
+ msg = "No PR number specified"
+ raise ValueError(msg)
+
+ # Generate visualization
+ viz = self.visualizer.generate_pr_diff_visualization(
+ pr_analysis=self.analyzer.results["results"]["pr"], layout=layout
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def export_visualization(
+ self,
+ visualization: dict[str, Any],
+ output_format: str = "json",
+ filename: str | None = None,
+ ) -> str | dict[str, Any]:
+ """
+ Export visualization in specified format.
+
+ Args:
+ visualization: Visualization to export
+ output_format: Output format
+ filename: Output filename
+
+ Returns:
+ Exported visualization or path to saved file
+ """
+ return self.visualizer.export(
+ visualization, format=output_format, filename=filename
+ )
+
+ def get_static_errors(self) -> list[dict[str, Any]]:
+ """
+ Get static errors in the codebase.
+
+ Returns:
+ List of static errors
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Get errors
+ errors = self.analyzer.get_issues(severity=IssueSeverity.ERROR)
+
+ # Convert to dictionaries
+ return [error.to_dict() for error in errors]
+
+ def get_parameter_issues(self) -> list[dict[str, Any]]:
+ """
+ Get parameter-related issues.
+
+ Returns:
+ List of parameter issues
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Get parameter issues
+ issues = self.analyzer.get_issues(category=IssueCategory.PARAMETER_MISMATCH)
+
+ # Convert to dictionaries
+ return [issue.to_dict() for issue in issues]
+
+ def get_unimplemented_functions(self) -> list[dict[str, Any]]:
+ """
+ Get unimplemented functions.
+
+ Returns:
+ List of unimplemented functions
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Get implementation issues
+ issues = self.analyzer.get_issues(category=IssueCategory.IMPLEMENTATION_ERROR)
+
+ # Convert to dictionaries
+ return [issue.to_dict() for issue in issues]
+
+ def get_circular_dependencies(self) -> list[dict[str, Any]]:
+ """
+ Get circular dependencies.
+
+ Returns:
+ List of circular dependencies
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["dependency"])
+
+ # Get circular dependencies
+ if "dependency" in self.analyzer.results.get("results", {}):
+ return (
+ self.analyzer.results["results"]["dependency"]
+ .get("circular_dependencies", {})
+ .get("circular_imports", [])
+ )
+
+ return []
+
+ def get_module_coupling(self) -> list[dict[str, Any]]:
+ """
+ Get module coupling metrics.
+
+ Returns:
+ Module coupling metrics
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["dependency"])
+
+ # Get module coupling
+ if "dependency" in self.analyzer.results.get("results", {}):
+ return (
+ self.analyzer.results["results"]["dependency"]
+ .get("module_coupling", {})
+ .get("high_coupling_modules", [])
+ )
+
+ return []
+
+ def get_diff_analysis(self, pr_number: int) -> dict[str, Any]:
+ """
+ Get diff analysis for a PR.
+
+ Args:
+ pr_number: PR number to analyze
+
+ Returns:
+ Diff analysis results
+ """
+ # Analyze PR
+ self.analyze_pr(pr_number, analysis_types=["pr"])
+
+ # Get diff analysis
+ if "pr" in self.analyzer.results.get("results", {}):
+ return self.analyzer.results["results"]["pr"]
+
+ return {}
+
+ def clear_cache(self):
+ """Clear the analysis cache."""
+ self._analysis_cache = {}
+
+ def _symbol_to_dict(self, symbol) -> dict[str, Any]:
+ """Convert symbol to dictionary."""
+ symbol_dict = {
+ "name": symbol.name if hasattr(symbol, "name") else str(symbol),
+ "type": str(symbol.symbol_type)
+ if hasattr(symbol, "symbol_type")
+ else "unknown",
+ "file": symbol.file.file_path
+ if hasattr(symbol, "file") and hasattr(symbol.file, "file_path")
+ else "unknown",
+ "line": symbol.line if hasattr(symbol, "line") else None,
+ }
+
+ # Add function-specific info
+ if hasattr(symbol, "parameters"):
+ symbol_dict["parameters"] = [
+ {
+ "name": p.name if hasattr(p, "name") else str(p),
+ "type": str(p.type) if hasattr(p, "type") and p.type else None,
+ "has_default": p.has_default
+ if hasattr(p, "has_default")
+ else False,
+ }
+ for p in symbol.parameters
+ ]
+
+ symbol_dict["return_type"] = (
+ str(symbol.return_type)
+ if hasattr(symbol, "return_type") and symbol.return_type
+ else None
+ )
+ symbol_dict["is_async"] = (
+ symbol.is_async if hasattr(symbol, "is_async") else False
+ )
+
+ # Add class-specific info
+ if hasattr(symbol, "superclasses"):
+ symbol_dict["superclasses"] = [
+ sc.name if hasattr(sc, "name") else str(sc)
+ for sc in symbol.superclasses
+ ]
+
+ return symbol_dict
+
+ def generate_class_diagram(
+ self,
+ class_name: str | None = None,
+ module_name: str | None = None,
+ include_methods: bool = True,
+ include_attributes: bool = True,
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Generate a class diagram for the codebase.
+
+ Args:
+ class_name: Name of the class to analyze (optional)
+ module_name: Name of the module containing the class (optional)
+ include_methods: Whether to include methods in the diagram
+ include_attributes: Whether to include attributes in the diagram
+ output_format: Output format (json, dot, graphml, plantuml)
+
+ Returns:
+ Dictionary containing the class diagram data
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["dependency"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_class_diagram(
+ codebase_context=self.analyzer.base_context,
+ class_name=class_name,
+ module_name=module_name,
+ include_methods=include_methods,
+ include_attributes=include_attributes,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+ def generate_sequence_diagram(
+ self,
+ function_name: str,
+ file_path: str | None = None,
+ max_depth: int = 3,
+ output_format: str = "json",
+ ) -> dict[str, Any]:
+ """
+ Generate a sequence diagram for a specific function.
+
+ Args:
+ function_name: Name of the function to analyze
+ file_path: Path to the file containing the function (optional)
+ max_depth: Maximum depth of the sequence diagram
+ output_format: Output format (json, plantuml)
+
+ Returns:
+ Dictionary containing the sequence diagram data
+ """
+ # Run analysis if not already done
+ if not self._analysis_cache:
+ self.analyze_codebase(analysis_types=["code_quality"])
+
+ # Generate visualization
+ viz = self.visualizer.generate_sequence_diagram(
+ codebase_context=self.analyzer.base_context,
+ function_name=function_name,
+ file_path=file_path,
+ max_depth=max_depth,
+ )
+
+ # Export if needed
+ if output_format != "json":
+ return self.visualizer.export(viz, format=output_format)
+
+ return viz
+
+
+def create_api(
+ repo_path: str | None = None, repo_url: str | None = None
+) -> CodegenAnalyzerAPI:
+ """
+ Create an API instance.
+
+ Args:
+ repo_path: Local path to the repository
+ repo_url: URL of the repository
+
+ Returns:
+ API instance
+ """
+ return CodegenAnalyzerAPI(repo_path=repo_path, repo_url=repo_url)
+
+
+# API endpoints for Flask or FastAPI integration
+def api_analyze_codebase(
+ repo_path: str, analysis_types: list[str] | None = None
+) -> dict[str, Any]:
+ """
+ API endpoint for codebase analysis.
+
+ Args:
+ repo_path: Path to the repository
+ analysis_types: Types of analysis to perform
+
+ Returns:
+ Analysis results
+ """
+ api = create_api(repo_path=repo_path)
+ return api.analyze_codebase(analysis_types=analysis_types)
+
+
+def api_analyze_pr(repo_path: str, pr_number: int) -> dict[str, Any]:
+ """
+ API endpoint for PR analysis.
+
+ Args:
+ repo_path: Path to the repository
+ pr_number: PR number to analyze
+
+ Returns:
+ Analysis results
+ """
+ api = create_api(repo_path=repo_path)
+ return api.analyze_pr(pr_number=pr_number)
+
+
+def api_get_visualization(
+ repo_path: str, viz_type: str, params: dict[str, Any]
+) -> dict[str, Any]:
+ """
+ API endpoint for visualizations.
+
+ Args:
+ repo_path: Path to the repository
+ viz_type: Type of visualization
+ params: Visualization parameters
+
+ Returns:
+ Visualization data
+ """
+ api = create_api(repo_path=repo_path)
+
+ # Run appropriate analysis based on visualization type
+ if viz_type == "module_dependencies":
+ api.analyze_codebase(analysis_types=["dependency"])
+ elif viz_type in ["function_calls", "code_quality"]:
+ api.analyze_codebase(analysis_types=["code_quality"])
+ elif viz_type == "pr_impact":
+ api.analyze_pr(pr_number=params["pr_number"])
+
+ # Generate visualization
+ if viz_type == "module_dependencies":
+ return api.get_module_dependencies(
+ module_path=params.get("module_path"),
+ layout=params.get("layout", "hierarchical"),
+ format=params.get("format", "json"),
+ )
+ elif viz_type == "function_calls":
+ return api.get_function_call_graph(
+ function_name=params["function_name"],
+ depth=params.get("depth", 2),
+ layout=params.get("layout", "hierarchical"),
+ format=params.get("format", "json"),
+ )
+ elif viz_type == "pr_impact":
+ return api.get_pr_impact(
+ pr_number=params.get("pr_number"),
+ layout=params.get("layout", "force"),
+ format=params.get("format", "json"),
+ )
+ else:
+ msg = f"Unknown visualization type: {viz_type}"
+ raise ValueError(msg)
+
+
+def api_get_static_errors(repo_path: str) -> list[dict[str, Any]]:
+ """
+ API endpoint for static errors.
+
+ Args:
+ repo_path: Path to the repository
+
+ Returns:
+ List of static errors
+ """
+ api = create_api(repo_path=repo_path)
+ return api.get_static_errors()
+
+
+def api_get_function_issues(repo_path: str, function_name: str) -> list[dict[str, Any]]:
+ """
+ API endpoint for function issues.
+
+ Args:
+ repo_path: Path to the repository
+ function_name: Name of the function
+
+ Returns:
+ List of function issues
+ """
+ api = create_api(repo_path=repo_path)
+ api.analyze_codebase(analysis_types=["code_quality"])
+
+ # Get symbol
+ symbol = api.analyzer.base_codebase.get_symbol(function_name)
+
+ if not symbol:
+ return []
+
+ # Get file path
+ file_path = (
+ symbol.file.file_path
+ if hasattr(symbol, "file") and hasattr(symbol.file, "file_path")
+ else None
+ )
+
+ if not file_path:
+ return []
+
+ # Get issues for this file and symbol
+ issues = api.analyzer.get_issues()
+ return [
+ issue.to_dict()
+ for issue in issues
+ if issue.file == file_path
+ and (
+ issue.symbol == function_name
+ or (
+ hasattr(issue, "related_symbols")
+ and function_name in issue.related_symbols
+ )
+ )
+ ]
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/base_analyzer.py b/codegen-on-oss/codegen_on_oss/analyzers/base_analyzer.py
new file mode 100644
index 000000000..20d75e9f1
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/base_analyzer.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python3
+"""
+Base Analyzer Module
+
+This module provides the foundation for all code analyzers in the system.
+It defines a common interface and shared functionality for codebase analysis.
+"""
+
+import json
+import logging
+import sys
+import tempfile
+from abc import ABC, abstractmethod
+from typing import Any
+
+try:
+ from codegen.configs.models.codebase import CodebaseConfig
+ from codegen.configs.models.secrets import SecretsConfig
+ from codegen.git.repo_operator.repo_operator import RepoOperator
+ from codegen.git.schemas.repo_config import RepoConfig
+ from codegen.sdk.codebase.config import ProjectConfig
+ from codegen.sdk.core.codebase import Codebase
+ from codegen.shared.enums.programming_language import ProgrammingLanguage
+
+ from codegen_on_oss.analyzers.issue_types import (
+ AnalysisType,
+ Issue,
+ IssueCategory,
+ IssueSeverity,
+ )
+
+ # Import from our own modules
+ from codegen_on_oss.context_codebase import (
+ GLOBAL_FILE_IGNORE_LIST,
+ CodebaseContext,
+ get_node_classes,
+ )
+ from codegen_on_oss.current_code_codebase import get_selected_codebase
+except ImportError:
+ print("Codegen SDK or required modules not found.")
+ sys.exit(1)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class BaseCodeAnalyzer(ABC):
+ """
+ Base class for all code analyzers.
+
+ This abstract class defines the common interface and shared functionality
+ for all code analyzers in the system. Specific analyzers should inherit
+ from this class and implement the abstract methods.
+ """
+
+ def __init__(
+ self,
+ repo_url: str | None = None,
+ repo_path: str | None = None,
+ base_branch: str = "main",
+ pr_number: int | None = None,
+ language: str | None = None,
+ file_ignore_list: list[str] | None = None,
+ config: dict[str, Any] | None = None,
+ ):
+ """
+ Initialize the base analyzer.
+
+ Args:
+ repo_url: URL of the repository to analyze
+ repo_path: Local path to the repository to analyze
+ base_branch: Base branch for comparison
+ pr_number: PR number to analyze
+ language: Programming language of the codebase
+ file_ignore_list: List of file patterns to ignore
+ config: Additional configuration options
+ """
+ self.repo_url = repo_url
+ self.repo_path = repo_path
+ self.base_branch = base_branch
+ self.pr_number = pr_number
+ self.language = language
+
+ # Use custom ignore list or default global list
+ self.file_ignore_list = file_ignore_list or GLOBAL_FILE_IGNORE_LIST
+
+ # Configuration options
+ self.config = config or {}
+
+ # Codebase and context objects
+ self.base_codebase = None
+ self.pr_codebase = None
+ self.base_context = None
+ self.pr_context = None
+
+ # Analysis results
+ self.issues: list[Issue] = []
+ self.results: dict[str, Any] = {}
+
+ # PR comparison data
+ self.pr_diff = None
+ self.commit_shas = None
+ self.modified_symbols = None
+ self.pr_branch = None
+
+ # Initialize codebase(s) based on provided parameters
+ if repo_url:
+ self._init_from_url(repo_url, language)
+ elif repo_path:
+ self._init_from_path(repo_path, language)
+
+ # If PR number is provided, initialize PR-specific data
+ if self.pr_number is not None and self.base_codebase is not None:
+ self._init_pr_data(self.pr_number)
+
+ # Initialize contexts
+ self._init_contexts()
+
+ def _init_from_url(self, repo_url: str, language: str | None = None):
+ """
+ Initialize codebase from a repository URL.
+
+ Args:
+ repo_url: URL of the repository
+ language: Programming language of the codebase
+ """
+ try:
+ # Extract repository information
+ if repo_url.endswith(".git"):
+ repo_url = repo_url[:-4]
+
+ parts = repo_url.rstrip("/").split("/")
+ repo_name = parts[-1]
+ owner = parts[-2]
+ repo_full_name = f"{owner}/{repo_name}"
+
+ # Create temporary directory for cloning
+ tmp_dir = tempfile.mkdtemp(prefix="analyzer_")
+
+ # Set up configuration
+ config = CodebaseConfig(
+ debug=False,
+ allow_external=True,
+ py_resolve_syspath=True,
+ )
+
+ secrets = SecretsConfig()
+
+ # Determine programming language
+ prog_lang = None
+ if language:
+ prog_lang = ProgrammingLanguage(language.upper())
+
+ # Initialize the codebase
+ logger.info(f"Initializing codebase from {repo_url}")
+
+ self.base_codebase = Codebase.from_github(
+ repo_full_name=repo_full_name,
+ tmp_dir=tmp_dir,
+ language=prog_lang,
+ config=config,
+ secrets=secrets,
+ )
+
+ logger.info(f"Successfully initialized codebase from {repo_url}")
+
+ except Exception as e:
+ logger.exception(f"Error initializing codebase from URL: {e}")
+ raise
+
+ def _init_from_path(self, repo_path: str, language: str | None = None):
+ """
+ Initialize codebase from a local repository path.
+
+ Args:
+ repo_path: Path to the repository
+ language: Programming language of the codebase
+ """
+ try:
+ # Set up configuration
+ config = CodebaseConfig(
+ debug=False,
+ allow_external=True,
+ py_resolve_syspath=True,
+ )
+
+ secrets = SecretsConfig()
+
+ # Initialize the codebase
+ logger.info(f"Initializing codebase from {repo_path}")
+
+ # Determine programming language
+ prog_lang = None
+ if language:
+ prog_lang = ProgrammingLanguage(language.upper())
+
+ # Set up repository configuration
+ repo_config = RepoConfig.from_repo_path(repo_path)
+ repo_config.respect_gitignore = False
+ repo_operator = RepoOperator(repo_config=repo_config, bot_commit=False)
+
+ # Create project configuration
+ project_config = ProjectConfig(
+ repo_operator=repo_operator,
+ programming_language=prog_lang if prog_lang else None,
+ )
+
+ # Initialize codebase
+ self.base_codebase = Codebase(
+ projects=[project_config], config=config, secrets=secrets
+ )
+
+ logger.info(f"Successfully initialized codebase from {repo_path}")
+
+ except Exception as e:
+ logger.exception(f"Error initializing codebase from path: {e}")
+ raise
+
+ def _init_pr_data(self, pr_number: int):
+ """
+ Initialize PR-specific data.
+
+ Args:
+ pr_number: PR number to analyze
+ """
+ try:
+ logger.info(f"Fetching PR #{pr_number} data")
+ result = self.base_codebase.get_modified_symbols_in_pr(pr_number)
+
+ # Unpack the result tuple
+ if len(result) >= 3:
+ self.pr_diff, self.commit_shas, self.modified_symbols = result[:3]
+ if len(result) >= 4:
+ self.pr_branch = result[3]
+
+ logger.info(f"Found {len(self.modified_symbols)} modified symbols in PR")
+
+ # Initialize PR codebase
+ self._init_pr_codebase()
+
+ except Exception as e:
+ logger.exception(f"Error initializing PR data: {e}")
+ raise
+
+ def _init_pr_codebase(self):
+ """Initialize PR codebase by checking out the PR branch."""
+ if not self.base_codebase or not self.pr_number:
+ logger.error("Base codebase or PR number not initialized")
+ return
+
+ try:
+ # Get PR data if not already fetched
+ if not self.pr_branch:
+ self._init_pr_data(self.pr_number)
+
+ if not self.pr_branch:
+ logger.error("Failed to get PR branch")
+ return
+
+ # Clone the base codebase
+ self.pr_codebase = self.base_codebase
+
+ # Checkout PR branch
+ logger.info(f"Checking out PR branch: {self.pr_branch}")
+ self.pr_codebase.checkout(self.pr_branch)
+
+ logger.info("Successfully initialized PR codebase")
+
+ except Exception as e:
+ logger.exception(f"Error initializing PR codebase: {e}")
+ raise
+
+ def _init_contexts(self):
+ """Initialize CodebaseContext objects for both base and PR codebases."""
+ if self.base_codebase:
+ try:
+ self.base_context = CodebaseContext(
+ codebase=self.base_codebase,
+ base_path=self.repo_path,
+ pr_branch=None,
+ base_branch=self.base_branch,
+ )
+ logger.info("Successfully initialized base context")
+ except Exception as e:
+ logger.exception(f"Error initializing base context: {e}")
+
+ if self.pr_codebase:
+ try:
+ self.pr_context = CodebaseContext(
+ codebase=self.pr_codebase,
+ base_path=self.repo_path,
+ pr_branch=self.pr_branch,
+ base_branch=self.base_branch,
+ )
+ logger.info("Successfully initialized PR context")
+ except Exception as e:
+ logger.exception(f"Error initializing PR context: {e}")
+
+ def add_issue(self, issue: Issue):
+ """
+ Add an issue to the list of detected issues.
+
+ Args:
+ issue: Issue to add
+ """
+ self.issues.append(issue)
+
+ def get_issues(
+ self,
+ severity: IssueSeverity | None = None,
+ category: IssueCategory | None = None,
+ ) -> list[Issue]:
+ """
+ Get all issues matching the specified criteria.
+
+ Args:
+ severity: Optional severity level to filter by
+ category: Optional category to filter by
+
+ Returns:
+ List of matching issues
+ """
+ filtered_issues = self.issues
+
+ if severity:
+ filtered_issues = [i for i in filtered_issues if i.severity == severity]
+
+ if category:
+ filtered_issues = [i for i in filtered_issues if i.category == category]
+
+ return filtered_issues
+
+ def save_results(self, output_file: str):
+ """
+ Save analysis results to a file.
+
+ Args:
+ output_file: Path to the output file
+ """
+ with open(output_file, "w") as f:
+ json.dump(self.results, f, indent=2)
+
+ logger.info(f"Results saved to {output_file}")
+
+ @abstractmethod
+ def analyze(self, analysis_type: AnalysisType) -> dict[str, Any]:
+ """
+ Perform analysis on the codebase.
+
+ Args:
+ analysis_type: Type of analysis to perform
+
+ Returns:
+ Dictionary containing analysis results
+ """
+ pass
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/code_quality.py b/codegen-on-oss/codegen_on_oss/analyzers/code_quality.py
new file mode 100644
index 000000000..1c93c6e8e
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/code_quality.py
@@ -0,0 +1,1309 @@
+#!/usr/bin/env python3
+"""
+Code Quality Analyzer Module
+
+This module provides analysis of code quality issues such as dead code,
+complexity, style, and maintainability. It identifies issues like unused variables,
+functions with excessive complexity, parameter errors, and implementation problems.
+"""
+
+import logging
+import math
+import re
+from typing import Any
+
+from codegen_on_oss.analyzers.codebase_context import CodebaseContext
+
+# Import from our own modules
+from codegen_on_oss.analyzers.issues import (
+ IssueCategory,
+ IssueCollection,
+ IssueSeverity,
+ create_issue,
+)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class CodeQualityAnalyzer:
+ """
+ Analyzer for code quality issues.
+
+ This class analyzes code quality issues in a codebase, including dead code,
+ complexity, style, and maintainability issues.
+ """
+
+ def __init__(
+ self,
+ codebase_context: CodebaseContext,
+ issue_collection: IssueCollection | None = None,
+ ):
+ """
+ Initialize the analyzer.
+
+ Args:
+ codebase_context: Context for the codebase to analyze
+ issue_collection: Collection for storing issues
+ """
+ self.context = codebase_context
+ self.issues = issue_collection or IssueCollection()
+
+ # Register default issue filters
+ self._register_default_filters()
+
+ def _register_default_filters(self):
+ """Register default issue filters."""
+ # Filter out issues in test files
+ self.issues.add_filter(
+ lambda issue: "test" not in issue.location.file.lower(),
+ "Skip issues in test files",
+ )
+
+ # Filter out issues in generated files
+ self.issues.add_filter(
+ lambda issue: "generated" not in issue.location.file.lower(),
+ "Skip issues in generated files",
+ )
+
+ def analyze(self) -> dict[str, Any]:
+ """
+ Perform code quality analysis.
+
+ Returns:
+ Dictionary containing analysis results
+ """
+ logger.info("Starting code quality analysis")
+
+ # Clear existing issues
+ self.issues = IssueCollection()
+ self._register_default_filters()
+
+ # Analyze dead code
+ dead_code = self._find_dead_code()
+
+ # Analyze complexity
+ complexity = self._analyze_complexity()
+
+ # Analyze parameters
+ parameter_issues = self._check_function_parameters()
+
+ # Analyze style issues
+ style_issues = self._check_style_issues()
+
+ # Analyze implementations
+ implementation_issues = self._check_implementations()
+
+ # Analyze maintainability
+ maintainability = self._calculate_maintainability()
+
+ # Combine results
+ results = {
+ "summary": {
+ "issue_count": len(self.issues.issues),
+ "analyzed_functions": len(self.context.get_functions()),
+ "analyzed_classes": len(self.context.get_classes()),
+ "analyzed_files": len(self.context.get_files()),
+ },
+ "dead_code": dead_code,
+ "complexity": complexity,
+ "parameter_issues": parameter_issues,
+ "style_issues": style_issues,
+ "implementation_issues": implementation_issues,
+ "maintainability": maintainability,
+ "issues": self.issues.to_dict(),
+ }
+
+ logger.info(
+ f"Code quality analysis complete. Found {len(self.issues.issues)} issues."
+ )
+
+ return results
+
+ def _find_dead_code(self) -> dict[str, Any]:
+ """
+ Find unused code (dead code) in the codebase.
+
+ Returns:
+ Dictionary containing dead code analysis results
+ """
+ logger.info("Analyzing dead code")
+
+ dead_code = {
+ "unused_functions": [],
+ "unused_classes": [],
+ "unused_variables": [],
+ "unused_imports": [],
+ }
+
+ # Find unused functions
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Skip decorated functions (as they might be used indirectly)
+ if hasattr(function, "decorators") and function.decorators:
+ continue
+
+ # Check if function has no call sites or usages
+ has_call_sites = (
+ hasattr(function, "call_sites") and len(function.call_sites) > 0
+ )
+ has_usages = hasattr(function, "usages") and len(function.usages) > 0
+
+ if not has_call_sites and not has_usages:
+ # Skip magic methods and main functions
+ if (hasattr(function, "is_magic") and function.is_magic) or (
+ hasattr(function, "name") and function.name in ["main", "__main__"]
+ ):
+ continue
+
+ # Get file path and name safely
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file") and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = (
+ function.name if hasattr(function, "name") else str(function)
+ )
+
+ # Add to dead code list
+ dead_code["unused_functions"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Unused function: {func_name}",
+ severity=IssueSeverity.WARNING,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.DEAD_CODE,
+ symbol=func_name,
+ suggestion="Consider removing this unused function or documenting why it's needed",
+ )
+ )
+
+ # Find unused classes
+ for cls in self.context.get_classes():
+ # Skip if class should be excluded
+ if self._should_skip_symbol(cls):
+ continue
+
+ # Check if class has no usages
+ has_usages = hasattr(cls, "usages") and len(cls.usages) > 0
+
+ if not has_usages:
+ # Get file path and name safely
+ file_path = (
+ cls.file.file_path
+ if hasattr(cls, "file") and hasattr(cls.file, "file_path")
+ else "unknown"
+ )
+ cls_name = cls.name if hasattr(cls, "name") else str(cls)
+
+ # Add to dead code list
+ dead_code["unused_classes"].append({
+ "name": cls_name,
+ "file": file_path,
+ "line": cls.line if hasattr(cls, "line") else None,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Unused class: {cls_name}",
+ severity=IssueSeverity.WARNING,
+ file=file_path,
+ line=cls.line if hasattr(cls, "line") else None,
+ category=IssueCategory.DEAD_CODE,
+ symbol=cls_name,
+ suggestion="Consider removing this unused class or documenting why it's needed",
+ )
+ )
+
+ # Find unused variables
+ for function in self.context.get_functions():
+ if not hasattr(function, "code_block") or not hasattr(
+ function.code_block, "local_var_assignments"
+ ):
+ continue
+
+ for var_assignment in function.code_block.local_var_assignments:
+ # Check if variable has no usages
+ has_usages = (
+ hasattr(var_assignment, "local_usages")
+ and len(var_assignment.local_usages) > 0
+ )
+
+ if not has_usages:
+ # Skip if variable name indicates it's intentionally unused (e.g., _)
+ var_name = (
+ var_assignment.name
+ if hasattr(var_assignment, "name")
+ else str(var_assignment)
+ )
+ if var_name == "_" or var_name.startswith("_unused"):
+ continue
+
+ # Get file path
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file")
+ and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+
+ # Add to dead code list
+ dead_code["unused_variables"].append({
+ "name": var_name,
+ "file": file_path,
+ "line": var_assignment.line
+ if hasattr(var_assignment, "line")
+ else None,
+ "function": function.name
+ if hasattr(function, "name")
+ else str(function),
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Unused variable '{var_name}' in function '{function.name if hasattr(function, 'name') else 'unknown'}'",
+ severity=IssueSeverity.INFO,
+ file=file_path,
+ line=var_assignment.line
+ if hasattr(var_assignment, "line")
+ else None,
+ category=IssueCategory.DEAD_CODE,
+ symbol=var_name,
+ suggestion="Consider removing this unused variable",
+ )
+ )
+
+ # Find unused imports
+ for file in self.context.get_files():
+ if hasattr(file, "is_binary") and file.is_binary:
+ continue
+
+ if not hasattr(file, "imports"):
+ continue
+
+ file_path = file.file_path if hasattr(file, "file_path") else str(file)
+
+ for imp in file.imports:
+ if not hasattr(imp, "usages"):
+ continue
+
+ if len(imp.usages) == 0:
+ # Get import source safely
+ import_source = imp.source if hasattr(imp, "source") else str(imp)
+
+ # Add to dead code list
+ dead_code["unused_imports"].append({
+ "import": import_source,
+ "file": file_path,
+ "line": imp.line if hasattr(imp, "line") else None,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Unused import: {import_source}",
+ severity=IssueSeverity.INFO,
+ file=file_path,
+ line=imp.line if hasattr(imp, "line") else None,
+ category=IssueCategory.DEAD_CODE,
+ code=import_source,
+ suggestion="Remove this unused import",
+ )
+ )
+
+ # Add summary statistics
+ dead_code["summary"] = {
+ "unused_functions_count": len(dead_code["unused_functions"]),
+ "unused_classes_count": len(dead_code["unused_classes"]),
+ "unused_variables_count": len(dead_code["unused_variables"]),
+ "unused_imports_count": len(dead_code["unused_imports"]),
+ "total_dead_code_count": (
+ len(dead_code["unused_functions"])
+ + len(dead_code["unused_classes"])
+ + len(dead_code["unused_variables"])
+ + len(dead_code["unused_imports"])
+ ),
+ }
+
+ return dead_code
+
+ def _analyze_complexity(self) -> dict[str, Any]:
+ """
+ Analyze code complexity.
+
+ Returns:
+ Dictionary containing complexity analysis results
+ """
+ logger.info("Analyzing code complexity")
+
+ complexity_result = {
+ "function_complexity": [],
+ "high_complexity_functions": [],
+ "average_complexity": 0.0,
+ "complexity_distribution": {
+ "low": 0,
+ "medium": 0,
+ "high": 0,
+ "very_high": 0,
+ },
+ }
+
+ # Process all functions to calculate complexity
+ total_complexity = 0
+ function_count = 0
+
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Skip if no code block
+ if not hasattr(function, "code_block"):
+ continue
+
+ # Calculate cyclomatic complexity
+ complexity = self._calculate_cyclomatic_complexity(function)
+
+ # Get file path and name safely
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file") and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = function.name if hasattr(function, "name") else str(function)
+
+ # Add to complexity list
+ complexity_result["function_complexity"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "complexity": complexity,
+ })
+
+ # Track total complexity
+ total_complexity += complexity
+ function_count += 1
+
+ # Categorize complexity
+ if complexity <= 5:
+ complexity_result["complexity_distribution"]["low"] += 1
+ elif complexity <= 10:
+ complexity_result["complexity_distribution"]["medium"] += 1
+ elif complexity <= 15:
+ complexity_result["complexity_distribution"]["high"] += 1
+ else:
+ complexity_result["complexity_distribution"]["very_high"] += 1
+
+ # Flag high complexity functions
+ if complexity > 10:
+ complexity_result["high_complexity_functions"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "complexity": complexity,
+ })
+
+ # Add issue
+ severity = (
+ IssueSeverity.WARNING if complexity <= 15 else IssueSeverity.ERROR
+ )
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' has high cyclomatic complexity ({complexity})",
+ severity=severity,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.COMPLEXITY,
+ symbol=func_name,
+ suggestion="Consider refactoring this function to reduce complexity",
+ )
+ )
+
+ # Calculate average complexity
+ complexity_result["average_complexity"] = (
+ total_complexity / function_count if function_count > 0 else 0.0
+ )
+
+ # Sort high complexity functions by complexity
+ complexity_result["high_complexity_functions"].sort(
+ key=lambda x: x["complexity"], reverse=True
+ )
+
+ return complexity_result
+
+ def _calculate_cyclomatic_complexity(self, function) -> int:
+ """
+ Calculate cyclomatic complexity for a function.
+
+ Args:
+ function: Function to analyze
+
+ Returns:
+ Cyclomatic complexity score
+ """
+ complexity = 1 # Base complexity
+
+ def analyze_statement(statement):
+ nonlocal complexity
+
+ # Check for if statements (including elif branches)
+ if hasattr(statement, "if_clause"):
+ complexity += 1
+
+ # Count elif branches
+ if hasattr(statement, "elif_statements"):
+ complexity += len(statement.elif_statements)
+
+ # Count else branches
+ if hasattr(statement, "else_clause") and statement.else_clause:
+ complexity += 1
+
+ # Count for loops
+ if hasattr(statement, "is_for_loop") and statement.is_for_loop:
+ complexity += 1
+
+ # Count while loops
+ if hasattr(statement, "is_while_loop") and statement.is_while_loop:
+ complexity += 1
+
+ # Count try/except blocks (each except adds a path)
+ if hasattr(statement, "is_try_block") and statement.is_try_block:
+ if hasattr(statement, "except_clauses"):
+ complexity += len(statement.except_clauses)
+
+ # Recursively process nested statements
+ if hasattr(statement, "statements"):
+ for nested_stmt in statement.statements:
+ analyze_statement(nested_stmt)
+
+ # Process all statements in the function's code block
+ if hasattr(function, "code_block") and hasattr(
+ function.code_block, "statements"
+ ):
+ for statement in function.code_block.statements:
+ analyze_statement(statement)
+
+ # If we can't analyze the AST, fall back to simple pattern matching
+ elif hasattr(function, "source"):
+ source = function.source
+ # Count branch points
+ complexity += source.count("if ")
+ complexity += source.count("elif ")
+ complexity += source.count("for ")
+ complexity += source.count("while ")
+ complexity += source.count("except:")
+ complexity += source.count("except ")
+ complexity += source.count("case ")
+
+ return complexity
+
+ def _check_function_parameters(self) -> dict[str, Any]:
+ """
+ Check for function parameter issues.
+
+ Returns:
+ Dictionary containing parameter analysis results
+ """
+ logger.info("Analyzing function parameters")
+
+ parameter_issues = {
+ "missing_types": [],
+ "inconsistent_types": [],
+ "unused_parameters": [],
+ "incorrect_usage": [],
+ }
+
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Skip if no parameters
+ if not hasattr(function, "parameters"):
+ continue
+
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file") and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = function.name if hasattr(function, "name") else str(function)
+
+ # Check for missing type annotations
+ missing_types = []
+ for param in function.parameters:
+ if not hasattr(param, "name"):
+ continue
+
+ if not hasattr(param, "type") or not param.type:
+ missing_types.append(param.name)
+
+ if missing_types:
+ parameter_issues["missing_types"].append({
+ "function": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "parameters": missing_types,
+ })
+
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' has parameters without type annotations: {', '.join(missing_types)}",
+ severity=IssueSeverity.WARNING,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.TYPE_ERROR,
+ symbol=func_name,
+ suggestion="Add type annotations to all parameters",
+ )
+ )
+
+ # Check for unused parameters
+ if hasattr(function, "source"):
+ # This is a simple check that looks for parameter names in the function body
+ # A more sophisticated check would analyze the AST
+ unused_params = []
+ for param in function.parameters:
+ if not hasattr(param, "name"):
+ continue
+
+ # Skip self/cls parameter in methods
+ if (
+ param.name in ["self", "cls"]
+ and hasattr(function, "parent")
+ and function.parent
+ ):
+ continue
+
+ # Check if parameter name appears in function body
+ # This is a simple heuristic and may produce false positives
+ param_regex = r"\b" + re.escape(param.name) + r"\b"
+ body_lines = (
+ function.source.split("\n")[1:]
+ if function.source.count("\n") > 0
+ else []
+ )
+ body_text = "\n".join(body_lines)
+
+ if not re.search(param_regex, body_text):
+ unused_params.append(param.name)
+
+ if unused_params:
+ parameter_issues["unused_parameters"].append({
+ "function": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "parameters": unused_params,
+ })
+
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' has unused parameters: {', '.join(unused_params)}",
+ severity=IssueSeverity.INFO,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.DEAD_CODE,
+ symbol=func_name,
+ suggestion="Remove unused parameters or use them in the function body",
+ )
+ )
+
+ # Check for incorrect parameter usage at call sites
+ if hasattr(function, "call_sites"):
+ for call_site in function.call_sites:
+ # Skip if call site has no arguments
+ if not hasattr(call_site, "args"):
+ continue
+
+ # Get required parameter count (excluding those with defaults)
+ required_count = 0
+ if hasattr(function, "parameters"):
+ required_count = sum(
+ 1
+ for p in function.parameters
+ if not hasattr(p, "has_default") or not p.has_default
+ )
+
+ # Get call site file info
+ call_file = (
+ call_site.file.file_path
+ if hasattr(call_site, "file")
+ and hasattr(call_site.file, "file_path")
+ else "unknown"
+ )
+ call_line = call_site.line if hasattr(call_site, "line") else None
+
+ # Check parameter count
+ arg_count = len(call_site.args)
+ if arg_count < required_count:
+ parameter_issues["incorrect_usage"].append({
+ "function": func_name,
+ "caller_file": call_file,
+ "caller_line": call_line,
+ "required_count": required_count,
+ "provided_count": arg_count,
+ })
+
+ self.issues.add_issue(
+ create_issue(
+ message=f"Call to '{func_name}' has too few arguments ({arg_count} provided, {required_count} required)",
+ severity=IssueSeverity.ERROR,
+ file=call_file,
+ line=call_line,
+ category=IssueCategory.PARAMETER_MISMATCH,
+ symbol=func_name,
+ suggestion=f"Provide all required arguments to '{func_name}'",
+ )
+ )
+
+ # Check for inconsistent parameter types across overloaded functions
+ functions_by_name = {}
+ for function in self.context.get_functions():
+ if hasattr(function, "name"):
+ if function.name not in functions_by_name:
+ functions_by_name[function.name] = []
+ functions_by_name[function.name].append(function)
+
+ for func_name, overloads in functions_by_name.items():
+ if len(overloads) > 1:
+ # Check for inconsistent parameter types
+ for i, func1 in enumerate(overloads):
+ for func2 in overloads[i + 1 :]:
+ inconsistent_types = []
+
+ # Skip if either function has no parameters
+ if not hasattr(func1, "parameters") or not hasattr(
+ func2, "parameters"
+ ):
+ continue
+
+ # Get common parameter names
+ func1_param_names = {
+ p.name for p in func1.parameters if hasattr(p, "name")
+ }
+ func2_param_names = {
+ p.name for p in func2.parameters if hasattr(p, "name")
+ }
+ common_params = func1_param_names.intersection(
+ func2_param_names
+ )
+
+ # Check parameter types
+ for param_name in common_params:
+ # Get parameter objects
+ param1 = next(
+ (
+ p
+ for p in func1.parameters
+ if hasattr(p, "name") and p.name == param_name
+ ),
+ None,
+ )
+ param2 = next(
+ (
+ p
+ for p in func2.parameters
+ if hasattr(p, "name") and p.name == param_name
+ ),
+ None,
+ )
+
+ if (
+ param1
+ and param2
+ and hasattr(param1, "type")
+ and hasattr(param2, "type")
+ ) and (
+ param1.type
+ and param2.type
+ and str(param1.type) != str(param2.type)
+ ):
+ inconsistent_types.append({
+ "parameter": param_name,
+ "type1": str(param1.type),
+ "type2": str(param2.type),
+ "function1": f"{func1.file.file_path}:{func1.line}"
+ if hasattr(func1, "file")
+ and hasattr(func1.file, "file_path")
+ and hasattr(func1, "line")
+ else str(func1),
+ "function2": f"{func2.file.file_path}:{func2.line}"
+ if hasattr(func2, "file")
+ and hasattr(func2.file, "file_path")
+ and hasattr(func2, "line")
+ else str(func2),
+ })
+
+ if inconsistent_types:
+ parameter_issues["inconsistent_types"].extend(
+ inconsistent_types
+ )
+
+ for issue in inconsistent_types:
+ func1_file = (
+ func1.file.file_path
+ if hasattr(func1, "file")
+ and hasattr(func1.file, "file_path")
+ else "unknown"
+ )
+ func1_line = (
+ func1.line if hasattr(func1, "line") else None
+ )
+
+ self.issues.add_issue(
+ create_issue(
+ message=f"Inconsistent types for parameter '{issue['parameter']}': {issue['type1']} vs {issue['type2']}",
+ severity=IssueSeverity.ERROR,
+ file=func1_file,
+ line=func1_line,
+ category=IssueCategory.TYPE_ERROR,
+ symbol=func_name,
+ suggestion="Use consistent parameter types across function overloads",
+ )
+ )
+
+ # Add summary statistics
+ parameter_issues["summary"] = {
+ "missing_types_count": len(parameter_issues["missing_types"]),
+ "inconsistent_types_count": len(parameter_issues["inconsistent_types"]),
+ "unused_parameters_count": len(parameter_issues["unused_parameters"]),
+ "incorrect_usage_count": len(parameter_issues["incorrect_usage"]),
+ "total_issues": (
+ len(parameter_issues["missing_types"])
+ + len(parameter_issues["inconsistent_types"])
+ + len(parameter_issues["unused_parameters"])
+ + len(parameter_issues["incorrect_usage"])
+ ),
+ }
+
+ return parameter_issues
+
+ def _check_style_issues(self) -> dict[str, Any]:
+ """
+ Check for code style issues.
+
+ Returns:
+ Dictionary containing style analysis results
+ """
+ logger.info("Analyzing code style")
+
+ style_result = {
+ "long_functions": [],
+ "long_lines": [],
+ "inconsistent_naming": [],
+ "summary": {
+ "long_functions_count": 0,
+ "long_lines_count": 0,
+ "inconsistent_naming_count": 0,
+ },
+ }
+
+ # Check for long functions (too many lines)
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Get function code
+ if hasattr(function, "source"):
+ code = function.source
+ lines = code.split("\n")
+
+ # Check function length
+ if len(lines) > 50: # Threshold for "too long"
+ # Get file path and name safely
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file")
+ and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = (
+ function.name if hasattr(function, "name") else str(function)
+ )
+
+ # Add to long functions list
+ style_result["long_functions"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "line_count": len(lines),
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' is too long ({len(lines)} lines)",
+ severity=IssueSeverity.INFO,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.STYLE_ISSUE,
+ symbol=func_name,
+ suggestion="Consider breaking this function into smaller, more focused functions",
+ )
+ )
+
+ # Check for long lines
+ for file in self.context.get_files():
+ # Skip binary files
+ if hasattr(file, "is_binary") and file.is_binary:
+ continue
+
+ # Get file content
+ if hasattr(file, "content"):
+ lines = file.content.split("\n")
+ file_path = file.file_path if hasattr(file, "file_path") else str(file)
+
+ # Find long lines
+ for i, line in enumerate(lines):
+ if len(line) > 100: # Threshold for "too long"
+ # Skip comment lines
+ if line.lstrip().startswith("#") or line.lstrip().startswith(
+ "//"
+ ):
+ continue
+
+ # Skip lines with strings that can't be easily broken
+ if '"' in line or "'" in line:
+ # If the line is mostly a string, skip it
+ if line.count('"') >= 2 or line.count("'") >= 2:
+ continue
+
+ # Add to long lines list
+ style_result["long_lines"].append({
+ "file": file_path,
+ "line_number": i + 1,
+ "line_length": len(line),
+ "line_content": line[:50] + "..."
+ if len(line) > 50
+ else line,
+ })
+
+ # Add issue (only for very long lines)
+ if len(line) > 120:
+ self.issues.add_issue(
+ create_issue(
+ message=f"Line is too long ({len(line)} characters)",
+ severity=IssueSeverity.INFO,
+ file=file_path,
+ line=i + 1,
+ category=IssueCategory.STYLE_ISSUE,
+ suggestion="Consider breaking this line into multiple lines",
+ )
+ )
+
+ # Update summary
+ style_result["summary"]["long_functions_count"] = len(
+ style_result["long_functions"]
+ )
+ style_result["summary"]["long_lines_count"] = len(style_result["long_lines"])
+ style_result["summary"]["inconsistent_naming_count"] = len(
+ style_result["inconsistent_naming"]
+ )
+
+ return style_result
+
+ def _check_implementations(self) -> dict[str, Any]:
+ """
+ Check for implementation issues.
+
+ Returns:
+ Dictionary containing implementation analysis results
+ """
+ logger.info("Analyzing implementations")
+
+ implementation_issues = {
+ "unimplemented_functions": [],
+ "empty_functions": [],
+ "abstract_methods_without_implementation": [],
+ "interface_methods_not_implemented": [],
+ "summary": {
+ "unimplemented_functions_count": 0,
+ "empty_functions_count": 0,
+ "abstract_methods_without_implementation_count": 0,
+ "interface_methods_not_implemented_count": 0,
+ },
+ }
+
+ # Check for empty functions
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Get function source
+ if hasattr(function, "source"):
+ source = function.source
+
+ # Check if function is empty or just has 'pass'
+ is_empty = False
+
+ if not source or source.strip() == "":
+ is_empty = True
+ else:
+ # Extract function body (skip the first line with the def)
+ body_lines = source.split("\n")[1:] if "\n" in source else []
+
+ # Check if body is empty or just has whitespace, docstring, or pass
+ non_empty_lines = [
+ line
+ for line in body_lines
+ if line.strip()
+ and not line.strip().startswith("#")
+ and not (
+ line.strip().startswith('"""')
+ or line.strip().startswith("'''")
+ )
+ and line.strip() != "pass"
+ ]
+
+ if not non_empty_lines:
+ is_empty = True
+
+ if is_empty:
+ # Get file path and name safely
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file")
+ and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = (
+ function.name if hasattr(function, "name") else str(function)
+ )
+
+ # Skip interface/abstract methods that are supposed to be empty
+ is_abstract = (
+ hasattr(function, "is_abstract") and function.is_abstract
+ ) or (
+ hasattr(function, "parent")
+ and hasattr(function.parent, "is_interface")
+ and function.parent.is_interface
+ )
+
+ if not is_abstract:
+ # Add to empty functions list
+ implementation_issues["empty_functions"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line
+ if hasattr(function, "line")
+ else None,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' is empty",
+ severity=IssueSeverity.WARNING,
+ file=file_path,
+ line=function.line
+ if hasattr(function, "line")
+ else None,
+ category=IssueCategory.MISSING_IMPLEMENTATION,
+ symbol=func_name,
+ suggestion="Implement this function or remove it if not needed",
+ )
+ )
+
+ # Check for abstract methods without implementations
+ abstract_methods = []
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Check if function is abstract
+ is_abstract = (
+ hasattr(function, "is_abstract") and function.is_abstract
+ ) or (
+ hasattr(function, "decorators")
+ and any(
+ hasattr(d, "name")
+ and d.name in ["abstractmethod", "abc.abstractmethod"]
+ for d in function.decorators
+ )
+ )
+
+ if (
+ is_abstract
+ and hasattr(function, "parent")
+ and hasattr(function, "name")
+ ):
+ abstract_methods.append((function.parent, function.name))
+
+ # For each abstract method, check if it has implementations in subclasses
+ for parent, method_name in abstract_methods:
+ if not hasattr(parent, "name"):
+ continue
+
+ parent_name = parent.name
+
+ # Find all subclasses
+ subclasses = []
+ for cls in self.context.get_classes():
+ if hasattr(cls, "superclasses"):
+ for superclass in cls.superclasses:
+ if (
+ hasattr(superclass, "name")
+ and superclass.name == parent_name
+ ):
+ subclasses.append(cls)
+
+ # Check if method is implemented in all subclasses
+ for subclass in subclasses:
+ if not hasattr(subclass, "methods"):
+ continue
+
+ # Check if method is implemented
+ implemented = any(
+ hasattr(m, "name") and m.name == method_name
+ for m in subclass.methods
+ )
+
+ if not implemented:
+ # Get file path and name safely
+ file_path = (
+ subclass.file.file_path
+ if hasattr(subclass, "file")
+ and hasattr(subclass.file, "file_path")
+ else "unknown"
+ )
+ cls_name = (
+ subclass.name if hasattr(subclass, "name") else str(subclass)
+ )
+
+ # Add to unimplemented list
+ implementation_issues[
+ "abstract_methods_without_implementation"
+ ].append({
+ "method": method_name,
+ "parent_class": parent_name,
+ "subclass": cls_name,
+ "file": file_path,
+ "line": subclass.line if hasattr(subclass, "line") else None,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Class '{cls_name}' does not implement abstract method '{method_name}' from '{parent_name}'",
+ severity=IssueSeverity.ERROR,
+ file=file_path,
+ line=subclass.line if hasattr(subclass, "line") else None,
+ category=IssueCategory.MISSING_IMPLEMENTATION,
+ symbol=cls_name,
+ suggestion=f"Implement the '{method_name}' method in '{cls_name}'",
+ )
+ )
+
+ # Update summary
+ implementation_issues["summary"]["unimplemented_functions_count"] = len(
+ implementation_issues["unimplemented_functions"]
+ )
+ implementation_issues["summary"]["empty_functions_count"] = len(
+ implementation_issues["empty_functions"]
+ )
+ implementation_issues["summary"][
+ "abstract_methods_without_implementation_count"
+ ] = len(implementation_issues["abstract_methods_without_implementation"])
+ implementation_issues["summary"]["interface_methods_not_implemented_count"] = (
+ len(implementation_issues["interface_methods_not_implemented"])
+ )
+
+ return implementation_issues
+
+ def _calculate_maintainability(self) -> dict[str, Any]:
+ """
+ Calculate maintainability metrics.
+
+ Returns:
+ Dictionary containing maintainability analysis results
+ """
+ logger.info("Analyzing maintainability")
+
+ maintainability_result = {
+ "function_maintainability": [],
+ "low_maintainability_functions": [],
+ "average_maintainability": 0.0,
+ "maintainability_distribution": {"high": 0, "medium": 0, "low": 0},
+ }
+
+ # Process all functions to calculate maintainability
+ total_maintainability = 0
+ function_count = 0
+
+ for function in self.context.get_functions():
+ # Skip if function should be excluded
+ if self._should_skip_symbol(function):
+ continue
+
+ # Skip if no code block
+ if not hasattr(function, "code_block"):
+ continue
+
+ # Calculate metrics
+ complexity = self._calculate_cyclomatic_complexity(function)
+
+ # Calculate Halstead volume (approximation)
+ operators = 0
+ operands = 0
+
+ if hasattr(function, "source"):
+ code = function.source
+ # Simple approximation of operators and operands
+ operators = len([c for c in code if c in "+-*/=<>!&|^~%"])
+ # Counting words as potential operands
+ operands = len(re.findall(r"\b[a-zA-Z_][a-zA-Z0-9_]*\b", code))
+
+ halstead_volume = (
+ operators * operands * math.log2(operators + operands)
+ if operators + operands > 0
+ else 0
+ )
+
+ # Count lines of code
+ loc = len(function.source.split("\n")) if hasattr(function, "source") else 0
+
+ # Calculate maintainability index
+ # Formula: 171 - 5.2 * ln(Halstead Volume) - 0.23 * (Cyclomatic Complexity) - 16.2 * ln(LOC)
+ halstead_term = (
+ 5.2 * math.log(max(1, halstead_volume)) if halstead_volume > 0 else 0
+ )
+ complexity_term = 0.23 * complexity
+ loc_term = 16.2 * math.log(max(1, loc)) if loc > 0 else 0
+
+ maintainability = 171 - halstead_term - complexity_term - loc_term
+
+ # Normalize to 0-100 scale
+ maintainability = max(0, min(100, maintainability * 100 / 171))
+
+ # Get file path and name safely
+ file_path = (
+ function.file.file_path
+ if hasattr(function, "file") and hasattr(function.file, "file_path")
+ else "unknown"
+ )
+ func_name = function.name if hasattr(function, "name") else str(function)
+
+ # Add to maintainability list
+ maintainability_result["function_maintainability"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "maintainability": maintainability,
+ "complexity": complexity,
+ "halstead_volume": halstead_volume,
+ "loc": loc,
+ })
+
+ # Track total maintainability
+ total_maintainability += maintainability
+ function_count += 1
+
+ # Categorize maintainability
+ if maintainability >= 70:
+ maintainability_result["maintainability_distribution"]["high"] += 1
+ elif maintainability >= 50:
+ maintainability_result["maintainability_distribution"]["medium"] += 1
+ else:
+ maintainability_result["maintainability_distribution"]["low"] += 1
+
+ # Flag low maintainability functions
+ maintainability_result["low_maintainability_functions"].append({
+ "name": func_name,
+ "file": file_path,
+ "line": function.line if hasattr(function, "line") else None,
+ "maintainability": maintainability,
+ "complexity": complexity,
+ "halstead_volume": halstead_volume,
+ "loc": loc,
+ })
+
+ # Add issue
+ self.issues.add_issue(
+ create_issue(
+ message=f"Function '{func_name}' has low maintainability index ({maintainability:.1f})",
+ severity=IssueSeverity.WARNING,
+ file=file_path,
+ line=function.line if hasattr(function, "line") else None,
+ category=IssueCategory.COMPLEXITY,
+ symbol=func_name,
+ suggestion="Consider refactoring this function to improve maintainability",
+ )
+ )
+
+ # Calculate average maintainability
+ maintainability_result["average_maintainability"] = (
+ total_maintainability / function_count if function_count > 0 else 0.0
+ )
+
+ # Sort low maintainability functions
+ maintainability_result["low_maintainability_functions"].sort(
+ key=lambda x: x["maintainability"]
+ )
+
+ return maintainability_result
+
+ def _should_skip_symbol(self, symbol) -> bool:
+ """
+ Check if a symbol should be skipped during analysis.
+
+ Args:
+ symbol: Symbol to check
+
+ Returns:
+ True if the symbol should be skipped, False otherwise
+ """
+ # Skip if no file
+ if not hasattr(symbol, "file"):
+ return True
+
+ # Skip if file should be skipped
+ return bool(self._should_skip_file(symbol.file))
+
+ def _should_skip_file(self, file) -> bool:
+ """
+ Check if a file should be skipped during analysis.
+
+ Args:
+ file: File to check
+
+ Returns:
+ True if the file should be skipped, False otherwise
+ """
+ # Skip binary files
+ if hasattr(file, "is_binary") and file.is_binary:
+ return True
+
+ # Get file path
+ file_path = file.file_path if hasattr(file, "file_path") else str(file)
+
+ # Skip test files
+ if "test" in file_path.lower():
+ return True
+
+ # Skip generated files
+ if "generated" in file_path.lower():
+ return True
+
+ # Skip files in ignore list
+ return any(pattern in file_path for pattern in self.context.file_ignore_list)
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/codebase_analysis.py b/codegen-on-oss/codegen_on_oss/analyzers/codebase_analysis.py
new file mode 100644
index 000000000..5bb8db053
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/codebase_analysis.py
@@ -0,0 +1,322 @@
+#!/usr/bin/env python3
+"""
+Codebase Analysis Module
+
+This module provides basic code analysis functionality for codebases, including:
+- Functions for getting codebase summaries
+- Functions for getting file summaries
+- Basic code analysis utilities
+
+This is a dedicated implementation of the SDK's codebase_analysis.py module
+for the analyzers directory, ensuring consistent analysis results.
+"""
+
+from typing import Any, Dict, List, Optional, Set, Tuple, Union
+
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.codebase import Codebase
+from codegen.sdk.core.external_module import ExternalModule
+from codegen.sdk.core.file import SourceFile
+from codegen.sdk.core.function import Function
+from codegen.sdk.core.import_resolution import Import
+from codegen.sdk.core.symbol import Symbol
+from codegen.sdk.enums import EdgeType, SymbolType
+
+
+def get_codebase_summary(codebase: Codebase) -> str: # type: ignore
+ """
+ Generate a comprehensive summary of a codebase.
+
+ Args:
+ codebase: The Codebase object to summarize
+
+ Returns:
+ A formatted string containing a summary of the codebase's nodes and edges
+ """
+ node_summary = f"""Contains {len(codebase.ctx.get_nodes())} nodes
+- {len(list(codebase.files))} files
+- {len(list(codebase.imports))} imports
+- {len(list(codebase.external_modules))} external_modules
+- {len(list(codebase.symbols))} symbols
+\t- {len(list(codebase.classes))} classes
+\t- {len(list(codebase.functions))} functions
+\t- {len(list(codebase.global_vars))} global_vars
+\t- {len(list(codebase.interfaces))} interfaces
+"""
+ edge_summary = f"""Contains {len(codebase.ctx.edges)} edges
+- {len([x for x in codebase.ctx.edges if x[2].type == EdgeType.SYMBOL_USAGE])} symbol -> used symbol
+- {len([x for x in codebase.ctx.edges if x[2].type == EdgeType.IMPORT_SYMBOL_RESOLUTION])} import -> used symbol
+- {len([x for x in codebase.ctx.edges if x[2].type == EdgeType.EXPORT])} export -> exported symbol
+ """
+
+ return f"{node_summary}\n{edge_summary}"
+
+
+def get_file_summary(file: SourceFile) -> str: # type: ignore
+ """
+ Generate a summary of a source file.
+
+ Args:
+ file: The SourceFile object to summarize
+
+ Returns:
+ A formatted string containing a summary of the file's dependencies and usage
+ """
+ return f"""==== [ `{file.name}` (SourceFile) Dependency Summary ] ====
+- {len(file.imports)} imports
+- {len(file.symbols)} symbol references
+\t- {len(file.classes)} classes
+\t- {len(file.functions)} functions
+\t- {len(file.global_vars)} global variables
+\t- {len(file.interfaces)} interfaces
+
+==== [ `{file.name}` Usage Summary ] ====
+- {len(file.imports)} importers
+"""
+
+
+def get_class_summary(cls: Class) -> str: # type: ignore
+ """
+ Generate a summary of a class.
+
+ Args:
+ cls: The Class object to summarize
+
+ Returns:
+ A formatted string containing a summary of the class's dependencies and usage
+ """
+ return f"""==== [ `{cls.name}` (Class) Dependency Summary ] ====
+- parent classes: {cls.parent_class_names}
+- {len(cls.methods)} methods
+- {len(cls.attributes)} attributes
+- {len(cls.decorators)} decorators
+- {len(cls.dependencies)} dependencies
+
+{get_symbol_summary(cls)}
+ """
+
+
+def get_function_summary(func: Function) -> str: # type: ignore
+ """
+ Generate a summary of a function.
+
+ Args:
+ func: The Function object to summarize
+
+ Returns:
+ A formatted string containing a summary of the function's dependencies and usage
+ """
+ return f"""==== [ `{func.name}` (Function) Dependency Summary ] ====
+- {len(func.return_statements)} return statements
+- {len(func.parameters)} parameters
+- {len(func.function_calls)} function calls
+- {len(func.call_sites)} call sites
+- {len(func.decorators)} decorators
+- {len(func.dependencies)} dependencies
+
+{get_symbol_summary(func)}
+ """
+
+
+def get_symbol_summary(symbol: Symbol) -> str: # type: ignore
+ """
+ Generate a summary of a symbol.
+
+ Args:
+ symbol: The Symbol object to summarize
+
+ Returns:
+ A formatted string containing a summary of the symbol's usage
+ """
+ usages = symbol.symbol_usages
+ imported_symbols = [x.imported_symbol for x in usages if isinstance(x, Import)]
+
+ return f"""==== [ `{symbol.name}` ({type(symbol).__name__}) Usage Summary ] ====
+- {len(usages)} usages
+\t- {len([x for x in usages if isinstance(x, Symbol) and x.symbol_type == SymbolType.Function])} functions
+\t- {len([x for x in usages if isinstance(x, Symbol) and x.symbol_type == SymbolType.Class])} classes
+\t- {len([x for x in usages if isinstance(x, Symbol) and x.symbol_type == SymbolType.GlobalVar])} global variables
+\t- {len([x for x in usages if isinstance(x, Symbol) and x.symbol_type == SymbolType.Interface])} interfaces
+\t- {len(imported_symbols)} imports
+\t\t- {len([x for x in imported_symbols if isinstance(x, Symbol) and x.symbol_type == SymbolType.Function])} functions
+\t\t- {len([x for x in imported_symbols if isinstance(x, Symbol) and x.symbol_type == SymbolType.Class])} classes
+\t\t- {len([x for x in imported_symbols if isinstance(x, Symbol) and x.symbol_type == SymbolType.GlobalVar])} global variables
+\t\t- {len([x for x in imported_symbols if isinstance(x, Symbol) and x.symbol_type == SymbolType.Interface])} interfaces
+\t\t- {len([x for x in imported_symbols if isinstance(x, ExternalModule)])} external modules
+\t\t- {len([x for x in imported_symbols if isinstance(x, SourceFile)])} files
+ """
+
+
+def get_dependency_graph(codebase: Codebase, file_path: Optional[str] = None) -> Dict[str, List[str]]: # type: ignore
+ """
+ Generate a dependency graph for a codebase or specific file.
+
+ Args:
+ codebase: The Codebase object to analyze
+ file_path: Optional path to a specific file to analyze
+
+ Returns:
+ A dictionary mapping file paths to lists of dependencies
+ """
+ dependency_graph = {}
+
+ files_to_analyze = [f for f in codebase.files if not file_path or f.file_path == file_path]
+
+ for file in files_to_analyze:
+ dependencies = []
+
+ # Add direct imports
+ for imp in file.imports:
+ if hasattr(imp, "imported_symbol") and hasattr(imp.imported_symbol, "file"):
+ if hasattr(imp.imported_symbol.file, "file_path"):
+ dependencies.append(imp.imported_symbol.file.file_path)
+
+ # Add symbol dependencies
+ for symbol in file.symbols:
+ for dep in symbol.dependencies:
+ if hasattr(dep, "file") and hasattr(dep.file, "file_path"):
+ dependencies.append(dep.file.file_path)
+
+ # Remove duplicates and self-references
+ unique_deps = list(set([d for d in dependencies if d != file.file_path]))
+ dependency_graph[file.file_path] = unique_deps
+
+ return dependency_graph
+
+
+def get_symbol_references(codebase: Codebase, symbol_name: str) -> List[Dict[str, Any]]: # type: ignore
+ """
+ Find all references to a symbol in the codebase.
+
+ Args:
+ codebase: The Codebase object to search
+ symbol_name: The name of the symbol to find references for
+
+ Returns:
+ A list of dictionaries containing reference information
+ """
+ references = []
+
+ # Find all symbols with the given name
+ target_symbols = [s for s in codebase.symbols if s.name == symbol_name]
+
+ for symbol in target_symbols:
+ # Find all edges that reference this symbol
+ for edge in codebase.ctx.edges:
+ if edge[1] == symbol.id: # If the edge points to our symbol
+ source_node = codebase.ctx.get_node(edge[0])
+ if source_node:
+ # Get file and line information if available
+ file_path = None
+ line_number = None
+
+ if hasattr(source_node, "file") and hasattr(source_node.file, "file_path"):
+ file_path = source_node.file.file_path
+
+ if hasattr(source_node, "line"):
+ line_number = source_node.line
+
+ references.append(
+ {
+ "file_path": file_path,
+ "line": line_number,
+ "source_type": type(source_node).__name__,
+ "source_name": getattr(source_node, "name", str(source_node)),
+ "edge_type": edge[2].type.name
+ if hasattr(edge[2], "type")
+ else "Unknown",
+ }
+ )
+
+ return references
+
+
+def get_file_complexity_metrics(file: SourceFile) -> Dict[str, Any]: # type: ignore
+ """
+ Calculate complexity metrics for a source file.
+
+ Args:
+ file: The SourceFile object to analyze
+
+ Returns:
+ A dictionary containing complexity metrics
+ """
+ metrics = {
+ "file_path": file.file_path,
+ "name": file.name,
+ "num_lines": 0,
+ "num_imports": len(file.imports),
+ "num_classes": len(file.classes),
+ "num_functions": len(file.functions),
+ "num_global_vars": len(file.global_vars),
+ "cyclomatic_complexity": 0,
+ "max_function_complexity": 0,
+ "max_class_complexity": 0,
+ }
+
+ # Calculate lines of code if source is available
+ if hasattr(file, "source") and file.source:
+ metrics["num_lines"] = len(file.source.split("\n"))
+
+ # Calculate function complexities
+ function_complexities = []
+ for func in file.functions:
+ complexity = _calculate_function_complexity(func)
+ function_complexities.append(complexity)
+ metrics["cyclomatic_complexity"] += complexity
+
+ if function_complexities:
+ metrics["max_function_complexity"] = max(function_complexities)
+
+ # Calculate class complexities
+ class_complexities = []
+ for cls in file.classes:
+ complexity = 0
+ for method in cls.methods:
+ method_complexity = _calculate_function_complexity(method)
+ complexity += method_complexity
+ class_complexities.append(complexity)
+ metrics["cyclomatic_complexity"] += complexity
+
+ if class_complexities:
+ metrics["max_class_complexity"] = max(class_complexities)
+
+ return metrics
+
+
+def _calculate_function_complexity(func: Function) -> int: # type: ignore
+ """
+ Calculate the cyclomatic complexity of a function.
+
+ Args:
+ func: The Function object to analyze
+
+ Returns:
+ An integer representing the cyclomatic complexity
+ """
+ complexity = 1 # Base complexity
+
+ if not hasattr(func, "source") or not func.source:
+ return complexity
+
+ # Simple heuristic: count control flow statements
+ source = func.source.lower()
+
+ # Count if statements
+ complexity += source.count(" if ") + source.count("\nif ")
+
+ # Count else if / elif statements
+ complexity += source.count("elif ") + source.count("else if ")
+
+ # Count loops
+ complexity += source.count(" for ") + source.count("\nfor ")
+ complexity += source.count(" while ") + source.count("\nwhile ")
+
+ # Count exception handlers
+ complexity += source.count("except ") + source.count("catch ")
+
+ # Count logical operators (each one creates a new path)
+ complexity += source.count(" and ") + source.count(" && ")
+ complexity += source.count(" or ") + source.count(" || ")
+
+ return complexity
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/current_code_codebase.py b/codegen-on-oss/codegen_on_oss/analyzers/current_code_codebase.py
new file mode 100644
index 000000000..07eb5795f
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/current_code_codebase.py
@@ -0,0 +1,261 @@
+import importlib
+import os
+from pathlib import Path
+from typing import TypedDict
+
+from codegen.configs.models.codebase import CodebaseConfig
+from codegen.configs.models.secrets import SecretsConfig
+from codegen.git.repo_operator.repo_operator import RepoOperator
+from codegen.git.schemas.repo_config import RepoConfig
+from codegen.sdk.codebase.config import ProjectConfig
+from codegen.sdk.core.codebase import Codebase, CodebaseType
+from codegen.shared.decorators.docs import (
+ DocumentedObject,
+ apidoc_objects,
+ no_apidoc_objects,
+ py_apidoc_objects,
+ ts_apidoc_objects,
+)
+from codegen.shared.enums.programming_language import ProgrammingLanguage
+from codegen.shared.logging.get_logger import get_logger
+
+logger = get_logger(__name__)
+
+
+def get_repo_path() -> str:
+ """Returns the base directory path of the repository being analyzed.
+ If not explicitly provided, defaults to the current directory.
+ """
+ # Default to current directory if not specified
+ return os.getcwd()
+
+
+def get_base_path(repo_path: str) -> str:
+ """Determines the base path within the repository.
+ For monorepos this might be a subdirectory, for simple repos it's the root.
+ """
+ # Check if there's a src directory, which is a common pattern
+ if os.path.isdir(os.path.join(repo_path, "src")):
+ return "src"
+ return ""
+
+
+def get_selected_codebase(
+ repo_path: str | None = None,
+ base_path: str | None = None,
+ config: CodebaseConfig | None = None,
+ secrets: SecretsConfig | None = None,
+ subdirectories: list[str] | None = None,
+ programming_language: ProgrammingLanguage | None = None,
+) -> CodebaseType:
+ """Returns a Codebase instance for the selected repository.
+
+ Parameters:
+ repo_path: Path to the repository
+ base_path: Base directory within the repository where code is located
+ config: CodebaseConfig instance for customizing codebase behavior
+ secrets: SecretsConfig for any credentials needed
+ subdirectories: List of subdirectories to include in the analysis
+ programming_language: Primary programming language of the codebase
+
+ Returns:
+ A Codebase instance initialized with the provided parameters
+ """
+ if not repo_path:
+ repo_path = get_repo_path()
+
+ if not base_path:
+ base_path = get_base_path(repo_path)
+
+ logger.info(
+ f"Creating codebase from repo at: {repo_path} with base_path {base_path}"
+ )
+
+ # Set up repository config
+ repo_config = RepoConfig.from_repo_path(repo_path)
+ repo_config.respect_gitignore = True # Respect gitignore by default
+ op = RepoOperator(repo_config=repo_config, bot_commit=False)
+
+ # Use provided config or create a new one
+ config = (config or CodebaseConfig()).model_copy(update={"base_path": base_path})
+
+ # Determine the programming language if not provided
+ if not programming_language:
+ # Default to Python, but try to detect from files
+ programming_language = ProgrammingLanguage.PYTHON
+ # TODO: Add language detection logic if needed
+
+ # Create project config
+ projects = [
+ ProjectConfig(
+ repo_operator=op,
+ programming_language=programming_language,
+ subdirectories=subdirectories,
+ base_path=base_path,
+ )
+ ]
+
+ # Create and return codebase
+ codebase = Codebase(projects=projects, config=config, secrets=secrets)
+ return codebase
+
+
+def import_modules_from_path(directory_path: str, package_prefix: str = ""):
+ """Imports all Python modules from the given directory path.
+
+ This is used to collect all documented objects from the modules.
+
+ Parameters:
+ directory_path: Path to the directory containing Python modules
+ package_prefix: Prefix to use for module imports (e.g., 'mypackage.')
+ """
+ directory = Path(directory_path)
+ if not directory.exists() or not directory.is_dir():
+ logger.warning(f"Directory does not exist: {directory_path}")
+ return
+
+ for file in directory.rglob("*.py"):
+ if "__init__" in file.name or "braintrust_evaluator" in file.name:
+ continue
+
+ try:
+ # Convert path to module name
+ relative_path = file.relative_to(directory)
+ module_name = package_prefix + str(relative_path).replace(
+ "/", "."
+ ).removesuffix(".py")
+
+ # Import the module
+ importlib.import_module(module_name)
+ logger.debug(f"Successfully imported module: {module_name}")
+ except Exception as e:
+ logger.exception(f"Error importing {module_name}: {e}")
+
+
+class DocumentedObjects(TypedDict):
+ """Type definition for the documented objects collection."""
+
+ apidoc: list[DocumentedObject]
+ ts_apidoc: list[DocumentedObject]
+ py_apidoc: list[DocumentedObject]
+ no_apidoc: list[DocumentedObject]
+
+
+def get_documented_objects(
+ repo_path: str | None = None,
+ package_prefix: str = "",
+ import_paths: list[str] | None = None,
+) -> DocumentedObjects:
+ """Get all objects decorated with API documentation decorators.
+
+ This function imports modules from the specified paths and collects
+ objects decorated with apidoc, py_apidoc, ts_apidoc, and no_apidoc.
+
+ Parameters:
+ repo_path: Path to the repository root
+ package_prefix: Prefix to use for importing modules
+ import_paths: List of paths to import from
+
+ Returns:
+ A dictionary containing the collected documented objects
+ """
+ if not repo_path:
+ repo_path = get_repo_path()
+
+ if not import_paths:
+ # Default to importing from common directories
+ base_path = get_base_path(repo_path)
+ import_paths = [
+ os.path.join(repo_path, base_path),
+ os.path.join(repo_path, base_path, "codegen")
+ if base_path
+ else os.path.join(repo_path, "codegen"),
+ os.path.join(repo_path, base_path, "sdk")
+ if base_path
+ else os.path.join(repo_path, "sdk"),
+ ]
+
+ # Import all modules to populate the documented objects lists
+ for path in import_paths:
+ if os.path.exists(path) and os.path.isdir(path):
+ import_modules_from_path(path, package_prefix)
+
+ # Add core types if they aren't already added
+ from codegen.sdk.core.codebase import CodebaseType, PyCodebaseType, TSCodebaseType
+
+ if CodebaseType not in apidoc_objects:
+ apidoc_objects.append(
+ DocumentedObject(
+ name="CodebaseType",
+ module="codegen.sdk.core.codebase",
+ object=CodebaseType,
+ )
+ )
+ if PyCodebaseType not in apidoc_objects:
+ apidoc_objects.append(
+ DocumentedObject(
+ name="PyCodebaseType",
+ module="codegen.sdk.core.codebase",
+ object=PyCodebaseType,
+ )
+ )
+ if TSCodebaseType not in apidoc_objects:
+ apidoc_objects.append(
+ DocumentedObject(
+ name="TSCodebaseType",
+ module="codegen.sdk.core.codebase",
+ object=TSCodebaseType,
+ )
+ )
+
+ # Return the collected objects
+ return {
+ "apidoc": apidoc_objects,
+ "py_apidoc": py_apidoc_objects,
+ "ts_apidoc": ts_apidoc_objects,
+ "no_apidoc": no_apidoc_objects,
+ }
+
+
+def get_codebase_with_docs(
+ repo_path: str | None = None,
+ base_path: str | None = None,
+ config: CodebaseConfig | None = None,
+ secrets: SecretsConfig | None = None,
+ subdirectories: list[str] | None = None,
+ programming_language: ProgrammingLanguage | None = None,
+ package_prefix: str = "",
+ import_paths: list[str] | None = None,
+) -> tuple[CodebaseType, DocumentedObjects]:
+ """Convenience function to get both a codebase and its documented objects.
+
+ Parameters:
+ repo_path: Path to the repository
+ base_path: Base directory within the repository
+ config: CodebaseConfig instance
+ secrets: SecretsConfig instance
+ subdirectories: List of subdirectories to include
+ programming_language: Primary programming language of the codebase
+ package_prefix: Prefix for importing modules
+ import_paths: List of paths to import from
+
+ Returns:
+ A tuple containing the Codebase instance and the documented objects
+ """
+ if not repo_path:
+ repo_path = get_repo_path()
+
+ codebase = get_selected_codebase(
+ repo_path=repo_path,
+ base_path=base_path,
+ config=config,
+ secrets=secrets,
+ subdirectories=subdirectories,
+ programming_language=programming_language,
+ )
+
+ documented_objects = get_documented_objects(
+ repo_path=repo_path, package_prefix=package_prefix, import_paths=import_paths
+ )
+
+ return codebase, documented_objects
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/issues.py b/codegen-on-oss/codegen_on_oss/analyzers/issues.py
new file mode 100644
index 000000000..c20ddc3ea
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/issues.py
@@ -0,0 +1,512 @@
+#!/usr/bin/env python3
+"""
+Issues Module
+
+This module defines issue models, categories, and severities for code analysis.
+It provides a standardized way to represent and manage issues across different analyzers.
+"""
+
+import json
+import logging
+from collections.abc import Callable
+from dataclasses import asdict, dataclass, field
+from enum import Enum
+from typing import Any
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class AnalysisType(str, Enum):
+ """Types of analysis that can be performed."""
+
+ CODEBASE = "codebase"
+ PR = "pr"
+ COMPARISON = "comparison"
+ CODE_QUALITY = "code_quality"
+ DEPENDENCY = "dependency"
+ SECURITY = "security"
+ PERFORMANCE = "performance"
+ TYPE_CHECKING = "type_checking"
+
+
+class IssueSeverity(str, Enum):
+ """Severity levels for issues."""
+
+ CRITICAL = "critical" # Must be fixed immediately, blocks functionality
+ ERROR = "error" # Must be fixed, causes errors or undefined behavior
+ WARNING = "warning" # Should be fixed, may cause problems in future
+ INFO = "info" # Informational, could be improved but not critical
+
+
+class IssueCategory(str, Enum):
+ """Categories of issues that can be detected."""
+
+ # Code Quality Issues
+ DEAD_CODE = "dead_code" # Unused variables, functions, etc.
+ COMPLEXITY = "complexity" # Code too complex, needs refactoring
+ STYLE_ISSUE = "style_issue" # Code style issues (line length, etc.)
+ DOCUMENTATION = "documentation" # Missing or incomplete documentation
+
+ # Type and Parameter Issues
+ TYPE_ERROR = "type_error" # Type errors or inconsistencies
+ PARAMETER_MISMATCH = "parameter_mismatch" # Parameter type or count mismatch
+ RETURN_TYPE_ERROR = "return_type_error" # Return type error or mismatch
+
+ # Implementation Issues
+ IMPLEMENTATION_ERROR = "implementation_error" # Incorrect implementation
+ MISSING_IMPLEMENTATION = "missing_implementation" # Missing implementation
+
+ # Dependency Issues
+ IMPORT_ERROR = "import_error" # Import errors or issues
+ DEPENDENCY_CYCLE = "dependency_cycle" # Circular dependency
+ MODULE_COUPLING = "module_coupling" # High coupling between modules
+
+ # API Issues
+ API_CHANGE = "api_change" # API has changed in a breaking way
+ API_USAGE_ERROR = "api_usage_error" # Incorrect API usage
+
+ # Security Issues
+ SECURITY_VULNERABILITY = "security_vulnerability" # Security vulnerability
+
+ # Performance Issues
+ PERFORMANCE_ISSUE = "performance_issue" # Performance issue
+
+
+class IssueStatus(str, Enum):
+ """Status of an issue."""
+
+ OPEN = "open" # Issue is open and needs to be fixed
+ FIXED = "fixed" # Issue has been fixed
+ WONTFIX = "wontfix" # Issue will not be fixed
+ INVALID = "invalid" # Issue is invalid or not applicable
+ DUPLICATE = "duplicate" # Issue is a duplicate of another
+
+
+@dataclass
+class CodeLocation:
+ """Location of an issue in code."""
+
+ file: str
+ line: int | None = None
+ column: int | None = None
+ end_line: int | None = None
+ end_column: int | None = None
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ return {k: v for k, v in asdict(self).items() if v is not None}
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "CodeLocation":
+ """Create from dictionary representation."""
+ return cls(**{k: v for k, v in data.items() if k in cls.__annotations__})
+
+ def __str__(self) -> str:
+ """Convert to string representation."""
+ if self.line is not None:
+ if self.column is not None:
+ return f"{self.file}:{self.line}:{self.column}"
+ return f"{self.file}:{self.line}"
+ return self.file
+
+
+@dataclass
+class Issue:
+ """Represents an issue found during analysis."""
+
+ # Core fields
+ message: str
+ severity: IssueSeverity
+ location: CodeLocation
+
+ # Classification fields
+ category: IssueCategory | None = None
+ analysis_type: AnalysisType | None = None
+ status: IssueStatus = IssueStatus.OPEN
+
+ # Context fields
+ symbol: str | None = None
+ code: str | None = None
+ suggestion: str | None = None
+ related_symbols: list[str] = field(default_factory=list)
+ related_locations: list[CodeLocation] = field(default_factory=list)
+
+ # Metadata fields
+ id: str | None = None
+ hash: str | None = None
+ metadata: dict[str, Any] = field(default_factory=dict)
+
+ def __post_init__(self):
+ """Initialize derived fields."""
+ # Generate an ID if not provided
+ if self.id is None:
+ import hashlib
+
+ # Create a hash based on location and message
+ hash_input = f"{self.location.file}:{self.location.line}:{self.message}"
+ self.id = hashlib.md5(hash_input.encode()).hexdigest()[:12]
+
+ @property
+ def file(self) -> str:
+ """Get the file path."""
+ return self.location.file
+
+ @property
+ def line(self) -> int | None:
+ """Get the line number."""
+ return self.location.line
+
+ def to_dict(self) -> dict[str, Any]:
+ """Convert to dictionary representation."""
+ result = {
+ "id": self.id,
+ "message": self.message,
+ "severity": self.severity.value,
+ "location": self.location.to_dict(),
+ "status": self.status.value,
+ }
+
+ # Add optional fields if present
+ if self.category:
+ result["category"] = self.category.value
+
+ if self.analysis_type:
+ result["analysis_type"] = self.analysis_type.value
+
+ if self.symbol:
+ result["symbol"] = self.symbol
+
+ if self.code:
+ result["code"] = self.code
+
+ if self.suggestion:
+ result["suggestion"] = self.suggestion
+
+ if self.related_symbols:
+ result["related_symbols"] = self.related_symbols
+
+ if self.related_locations:
+ result["related_locations"] = [
+ loc.to_dict() for loc in self.related_locations
+ ]
+
+ if self.metadata:
+ result["metadata"] = self.metadata
+
+ return result
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "Issue":
+ """Create from dictionary representation."""
+ # Convert string enums to actual enum values
+ if "severity" in data and isinstance(data["severity"], str):
+ data["severity"] = IssueSeverity(data["severity"])
+
+ if "category" in data and isinstance(data["category"], str):
+ data["category"] = IssueCategory(data["category"])
+
+ if "analysis_type" in data and isinstance(data["analysis_type"], str):
+ data["analysis_type"] = AnalysisType(data["analysis_type"])
+
+ if "status" in data and isinstance(data["status"], str):
+ data["status"] = IssueStatus(data["status"])
+
+ # Convert location dict to CodeLocation
+ if "location" in data and isinstance(data["location"], dict):
+ data["location"] = CodeLocation.from_dict(data["location"])
+
+ # Convert related_locations dicts to CodeLocation objects
+ if "related_locations" in data and isinstance(data["related_locations"], list):
+ data["related_locations"] = [
+ CodeLocation.from_dict(loc) if isinstance(loc, dict) else loc
+ for loc in data["related_locations"]
+ ]
+
+ return cls(**{k: v for k, v in data.items() if k in cls.__annotations__})
+
+
+class IssueCollection:
+ """Collection of issues with filtering and grouping capabilities."""
+
+ def __init__(self, issues: list[Issue] | None = None):
+ """
+ Initialize the issue collection.
+
+ Args:
+ issues: Initial list of issues
+ """
+ self.issues = issues or []
+ self._filters = []
+
+ def add_issue(self, issue: Issue):
+ """
+ Add an issue to the collection.
+
+ Args:
+ issue: Issue to add
+ """
+ self.issues.append(issue)
+
+ def add_issues(self, issues: list[Issue]):
+ """
+ Add multiple issues to the collection.
+
+ Args:
+ issues: Issues to add
+ """
+ self.issues.extend(issues)
+
+ def add_filter(self, filter_func: Callable[[Issue], bool], description: str = ""):
+ """
+ Add a filter function.
+
+ Args:
+ filter_func: Function that returns True if issue should be included
+ description: Description of the filter
+ """
+ self._filters.append((filter_func, description))
+
+ def get_issues(
+ self,
+ severity: IssueSeverity | None = None,
+ category: IssueCategory | None = None,
+ status: IssueStatus | None = None,
+ file_path: str | None = None,
+ symbol: str | None = None,
+ ) -> list[Issue]:
+ """
+ Get issues matching the specified criteria.
+
+ Args:
+ severity: Severity to filter by
+ category: Category to filter by
+ status: Status to filter by
+ file_path: File path to filter by
+ symbol: Symbol name to filter by
+
+ Returns:
+ List of matching issues
+ """
+ filtered_issues = self.issues
+
+ # Apply custom filters
+ for filter_func, _ in self._filters:
+ filtered_issues = [i for i in filtered_issues if filter_func(i)]
+
+ # Apply standard filters
+ if severity:
+ filtered_issues = [i for i in filtered_issues if i.severity == severity]
+
+ if category:
+ filtered_issues = [i for i in filtered_issues if i.category == category]
+
+ if status:
+ filtered_issues = [i for i in filtered_issues if i.status == status]
+
+ if file_path:
+ filtered_issues = [
+ i for i in filtered_issues if i.location.file == file_path
+ ]
+
+ if symbol:
+ filtered_issues = [
+ i
+ for i in filtered_issues
+ if (
+ i.symbol == symbol
+ or (i.related_symbols and symbol in i.related_symbols)
+ )
+ ]
+
+ return filtered_issues
+
+ def group_by_severity(self) -> dict[IssueSeverity, list[Issue]]:
+ """
+ Group issues by severity.
+
+ Returns:
+ Dictionary mapping severities to lists of issues
+ """
+ result = {severity: [] for severity in IssueSeverity}
+
+ for issue in self.issues:
+ result[issue.severity].append(issue)
+
+ return result
+
+ def group_by_category(self) -> dict[IssueCategory, list[Issue]]:
+ """
+ Group issues by category.
+
+ Returns:
+ Dictionary mapping categories to lists of issues
+ """
+ result = {category: [] for category in IssueCategory}
+
+ for issue in self.issues:
+ if issue.category:
+ result[issue.category].append(issue)
+
+ return result
+
+ def group_by_file(self) -> dict[str, list[Issue]]:
+ """
+ Group issues by file.
+
+ Returns:
+ Dictionary mapping file paths to lists of issues
+ """
+ result = {}
+
+ for issue in self.issues:
+ if issue.location.file not in result:
+ result[issue.location.file] = []
+
+ result[issue.location.file].append(issue)
+
+ return result
+
+ def statistics(self) -> dict[str, Any]:
+ """
+ Get statistics about the issues.
+
+ Returns:
+ Dictionary with issue statistics
+ """
+ by_severity = self.group_by_severity()
+ by_category = self.group_by_category()
+ by_status = {status: [] for status in IssueStatus}
+ for issue in self.issues:
+ by_status[issue.status].append(issue)
+
+ return {
+ "total": len(self.issues),
+ "by_severity": {
+ severity.value: len(issues) for severity, issues in by_severity.items()
+ },
+ "by_category": {
+ category.value: len(issues)
+ for category, issues in by_category.items()
+ if len(issues) > 0 # Only include non-empty categories
+ },
+ "by_status": {
+ status.value: len(issues) for status, issues in by_status.items()
+ },
+ "file_count": len(self.group_by_file()),
+ }
+
+ def to_dict(self) -> dict[str, Any]:
+ """
+ Convert to dictionary representation.
+
+ Returns:
+ Dictionary representation of the issue collection
+ """
+ return {
+ "issues": [issue.to_dict() for issue in self.issues],
+ "statistics": self.statistics(),
+ "filters": [desc for _, desc in self._filters if desc],
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "IssueCollection":
+ """
+ Create from dictionary representation.
+
+ Args:
+ data: Dictionary representation
+
+ Returns:
+ Issue collection
+ """
+ collection = cls()
+
+ if "issues" in data and isinstance(data["issues"], list):
+ collection.add_issues([
+ Issue.from_dict(issue) if isinstance(issue, dict) else issue
+ for issue in data["issues"]
+ ])
+
+ return collection
+
+ def save_to_file(self, file_path: str, format: str = "json"):
+ """
+ Save to file.
+
+ Args:
+ file_path: Path to save to
+ format: Format to save in
+ """
+ if format == "json":
+ with open(file_path, "w") as f:
+ json.dump(self.to_dict(), f, indent=2)
+ else:
+ raise ValueError(f"Unsupported format: {format}")
+
+ @classmethod
+ def load_from_file(cls, file_path: str) -> "IssueCollection":
+ """
+ Load from file.
+
+ Args:
+ file_path: Path to load from
+
+ Returns:
+ Issue collection
+ """
+ with open(file_path) as f:
+ data = json.load(f)
+
+ return cls.from_dict(data)
+
+
+def create_issue(
+ message: str,
+ severity: str | IssueSeverity,
+ file: str,
+ line: int | None = None,
+ category: str | IssueCategory | None = None,
+ symbol: str | None = None,
+ suggestion: str | None = None,
+) -> Issue:
+ """
+ Create an issue with simplified parameters.
+
+ Args:
+ message: Issue message
+ severity: Issue severity
+ file: File path
+ line: Line number
+ category: Issue category
+ symbol: Symbol name
+ suggestion: Suggested fix
+
+ Returns:
+ Issue object
+ """
+ # Convert string severity to enum
+ if isinstance(severity, str):
+ severity = IssueSeverity(severity)
+
+ # Convert string category to enum
+ if isinstance(category, str) and category:
+ category = IssueCategory(category)
+
+ # Create location
+ location = CodeLocation(file=file, line=line)
+
+ # Create issue
+ return Issue(
+ message=message,
+ severity=severity,
+ location=location,
+ category=category,
+ symbol=symbol,
+ suggestion=suggestion,
+ )
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/snapshot_manager.py b/codegen-on-oss/codegen_on_oss/analyzers/snapshot_manager.py
new file mode 100644
index 000000000..a09a54a81
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/snapshot_manager.py
@@ -0,0 +1,816 @@
+#!/usr/bin/env python3
+"""
+Snapshot Manager Module
+
+This module provides functionality for creating, storing, and comparing
+codebase snapshots. It allows tracking changes over time and validating
+consistency between versions.
+"""
+
+import hashlib
+import json
+import logging
+import os
+import tempfile
+from dataclasses import dataclass, field
+from datetime import datetime
+from typing import Any
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class SnapshotMetadata:
+ """Metadata for a codebase snapshot."""
+
+ snapshot_id: str
+ timestamp: str
+ description: str
+ creator: str
+ base_path: str
+ commit_hash: str | None = None
+ branch: str | None = None
+ tag: str | None = None
+ file_count: int = 0
+ total_lines: int = 0
+ language_stats: dict[str, int] = field(default_factory=dict)
+ extra: dict[str, Any] = field(default_factory=dict)
+
+
+@dataclass
+class FileSnapshot:
+ """Snapshot of a file in the codebase."""
+
+ path: str
+ relative_path: str
+ hash: str
+ size: int
+ lines: int
+ language: str | None = None
+ content_hash: str | None = None
+ ast_hash: str | None = None
+ last_modified: str | None = None
+ metadata: dict[str, Any] = field(default_factory=dict)
+
+
+class CodebaseSnapshot:
+ """
+ Codebase snapshot representation.
+
+ This class stores a complete snapshot of a codebase at a point in time,
+ including all files and their metadata.
+ """
+
+ def __init__(
+ self,
+ base_path: str,
+ description: str = "",
+ creator: str = "snapshot_manager",
+ include_patterns: list[str] | None = None,
+ exclude_patterns: list[str] | None = None,
+ snapshot_id: str | None = None,
+ store_content: bool = False,
+ ):
+ """
+ Initialize a codebase snapshot.
+
+ Args:
+ base_path: Base path of the codebase
+ description: Description of the snapshot
+ creator: Creator of the snapshot
+ include_patterns: Patterns of files to include
+ exclude_patterns: Patterns of files to exclude
+ snapshot_id: Optional ID for the snapshot
+ store_content: Whether to store file content
+ """
+ self.base_path = os.path.abspath(base_path)
+ self.description = description
+ self.creator = creator
+ self.include_patterns = include_patterns or ["*"]
+ self.exclude_patterns = exclude_patterns or []
+ self.snapshot_id = snapshot_id or self._generate_id()
+ self.store_content = store_content
+ self.timestamp = datetime.now().isoformat()
+
+ # Initialize data structures
+ self.files: dict[str, FileSnapshot] = {}
+ self.content: dict[str, str] = {}
+ self.language_stats: dict[str, int] = {}
+
+ # Get git information if available
+ self.commit_hash = self._get_git_commit_hash()
+ self.branch = self._get_git_branch()
+ self.tag = self._get_git_tag()
+
+ def _generate_id(self) -> str:
+ """
+ Generate a unique ID for the snapshot.
+
+ Returns:
+ Generated ID
+ """
+ timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
+ random_suffix = hashlib.md5(os.urandom(16)).hexdigest()[:8]
+ return f"snapshot_{timestamp}_{random_suffix}"
+
+ def _get_git_commit_hash(self) -> str | None:
+ """
+ Get the current Git commit hash.
+
+ Returns:
+ Commit hash if available, None otherwise
+ """
+ try:
+ import subprocess
+
+ result = subprocess.run(
+ ["git", "rev-parse", "HEAD"],
+ cwd=self.base_path,
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+
+ if result.returncode == 0:
+ return result.stdout.strip()
+ return None
+ except Exception:
+ return None
+
+ def _get_git_branch(self) -> str | None:
+ """
+ Get the current Git branch.
+
+ Returns:
+ Branch name if available, None otherwise
+ """
+ try:
+ import subprocess
+
+ result = subprocess.run(
+ ["git", "rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=self.base_path,
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+
+ if result.returncode == 0:
+ return result.stdout.strip()
+ return None
+ except Exception:
+ return None
+
+ def _get_git_tag(self) -> str | None:
+ """
+ Get the current Git tag.
+
+ Returns:
+ Tag name if available, None otherwise
+ """
+ try:
+ import subprocess
+
+ result = subprocess.run(
+ ["git", "describe", "--tags", "--exact-match"],
+ cwd=self.base_path,
+ capture_output=True,
+ text=True,
+ check=False,
+ )
+
+ if result.returncode == 0:
+ return result.stdout.strip()
+ return None
+ except Exception:
+ return None
+
+ def _get_file_language(self, file_path: str) -> str | None:
+ """
+ Determine the programming language of a file based on its extension.
+
+ Args:
+ file_path: Path to the file
+
+ Returns:
+ Language name if recognized, None otherwise
+ """
+ extension = os.path.splitext(file_path)[1].lower()
+
+ language_map = {
+ ".py": "Python",
+ ".js": "JavaScript",
+ ".jsx": "JavaScript",
+ ".ts": "TypeScript",
+ ".tsx": "TypeScript",
+ ".java": "Java",
+ ".c": "C",
+ ".cpp": "C++",
+ ".h": "C/C++",
+ ".hpp": "C++",
+ ".cs": "C#",
+ ".go": "Go",
+ ".rb": "Ruby",
+ ".php": "PHP",
+ ".swift": "Swift",
+ ".kt": "Kotlin",
+ ".rs": "Rust",
+ ".scala": "Scala",
+ ".html": "HTML",
+ ".css": "CSS",
+ ".scss": "SCSS",
+ ".less": "LESS",
+ ".json": "JSON",
+ ".xml": "XML",
+ ".yaml": "YAML",
+ ".yml": "YAML",
+ ".md": "Markdown",
+ ".sql": "SQL",
+ ".sh": "Shell",
+ ".bat": "Batch",
+ ".ps1": "PowerShell",
+ }
+
+ return language_map.get(extension)
+
+ def _should_include_file(self, file_path: str) -> bool:
+ """
+ Check if a file should be included in the snapshot.
+
+ Args:
+ file_path: Path to the file
+
+ Returns:
+ True if the file should be included, False otherwise
+ """
+ import fnmatch
+
+ # Convert to relative path
+ rel_path = os.path.relpath(file_path, self.base_path)
+
+ # Check exclude patterns first
+ for pattern in self.exclude_patterns:
+ if fnmatch.fnmatch(rel_path, pattern):
+ return False
+
+ # Then check include patterns
+ for pattern in self.include_patterns:
+ if fnmatch.fnmatch(rel_path, pattern):
+ return True
+
+ return False
+
+ def _compute_file_hash(self, file_path: str) -> str:
+ """
+ Compute a hash of a file's content.
+
+ Args:
+ file_path: Path to the file
+
+ Returns:
+ Hash of the file content
+ """
+ hash_md5 = hashlib.md5()
+ with open(file_path, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_md5.update(chunk)
+ return hash_md5.hexdigest()
+
+ def _count_lines(self, file_path: str) -> int:
+ """
+ Count the number of lines in a file.
+
+ Args:
+ file_path: Path to the file
+
+ Returns:
+ Number of lines in the file
+ """
+ try:
+ with open(file_path, encoding="utf-8", errors="ignore") as f:
+ return sum(1 for _ in f)
+ except Exception:
+ # Fallback for binary files
+ return 0
+
+ def create(self):
+ """
+ Create a snapshot of the codebase.
+
+ This method scans the codebase, collects file metadata, and
+ optionally stores file content.
+ """
+ if not os.path.isdir(self.base_path):
+ logger.error(f"Base path not found: {self.base_path}")
+ return
+
+ # Reset data structures
+ self.files = {}
+ self.content = {}
+ self.language_stats = {}
+
+ total_files = 0
+ total_lines = 0
+
+ # Walk the directory tree
+ for root, _, files in os.walk(self.base_path):
+ for file in files:
+ file_path = os.path.join(root, file)
+
+ # Skip if file should not be included
+ if not self._should_include_file(file_path):
+ continue
+
+ try:
+ # Get file stats
+ file_stats = os.stat(file_path)
+ file_size = file_stats.st_size
+ file_modified = datetime.fromtimestamp(
+ file_stats.st_mtime
+ ).isoformat()
+
+ # Get file language
+ language = self._get_file_language(file_path)
+
+ # Count lines
+ line_count = self._count_lines(file_path)
+
+ # Compute hash
+ file_hash = self._compute_file_hash(file_path)
+
+ # Get relative path
+ rel_path = os.path.relpath(file_path, self.base_path)
+
+ # Create file snapshot
+ file_snapshot = FileSnapshot(
+ path=file_path,
+ relative_path=rel_path,
+ hash=file_hash,
+ size=file_size,
+ lines=line_count,
+ language=language,
+ last_modified=file_modified,
+ )
+
+ # Store file content if requested
+ if self.store_content:
+ try:
+ with open(
+ file_path, encoding="utf-8", errors="ignore"
+ ) as f:
+ file_content = f.read()
+ self.content[rel_path] = file_content
+ except Exception as e:
+ logger.warning(
+ f"Could not read content of {file_path}: {e!s}"
+ )
+
+ # Store file snapshot
+ self.files[rel_path] = file_snapshot
+
+ # Update language stats
+ if language:
+ self.language_stats[language] = (
+ self.language_stats.get(language, 0) + 1
+ )
+
+ # Update totals
+ total_files += 1
+ total_lines += line_count
+ except Exception as e:
+ logger.warning(f"Error processing file {file_path}: {e!s}")
+
+ logger.info(
+ f"Created snapshot with {total_files} files and {total_lines} lines"
+ )
+
+ def get_metadata(self) -> SnapshotMetadata:
+ """
+ Get metadata for the snapshot.
+
+ Returns:
+ Snapshot metadata
+ """
+ return SnapshotMetadata(
+ snapshot_id=self.snapshot_id,
+ timestamp=self.timestamp,
+ description=self.description,
+ creator=self.creator,
+ base_path=self.base_path,
+ commit_hash=self.commit_hash,
+ branch=self.branch,
+ tag=self.tag,
+ file_count=len(self.files),
+ total_lines=sum(file.lines for file in self.files.values()),
+ language_stats=self.language_stats,
+ )
+
+ def save(self, output_path: str | None = None) -> str:
+ """
+ Save the snapshot to disk.
+
+ Args:
+ output_path: Optional path to save the snapshot to
+
+ Returns:
+ Path to the saved snapshot
+ """
+ # Create a temporary directory if output_path is not provided
+ if not output_path:
+ output_dir = tempfile.mkdtemp(prefix="codebase_snapshot_")
+ output_path = os.path.join(output_dir, f"{self.snapshot_id}.json")
+
+ # Create output directory if it doesn't exist
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
+
+ # Convert snapshot to JSON
+ snapshot_data = {
+ "metadata": self.get_metadata().__dict__,
+ "files": {rel_path: file.__dict__ for rel_path, file in self.files.items()},
+ "content": self.content if self.store_content else {},
+ }
+
+ # Save to disk
+ with open(output_path, "w") as f:
+ json.dump(snapshot_data, f, indent=2)
+
+ logger.info(f"Saved snapshot to {output_path}")
+ return output_path
+
+ @classmethod
+ def load(cls, snapshot_path: str) -> "CodebaseSnapshot":
+ """
+ Load a snapshot from disk.
+
+ Args:
+ snapshot_path: Path to the snapshot file
+
+ Returns:
+ Loaded snapshot
+ """
+ with open(snapshot_path) as f:
+ snapshot_data = json.load(f)
+
+ # Extract metadata
+ metadata = snapshot_data["metadata"]
+
+ # Create snapshot instance
+ snapshot = cls(
+ base_path=metadata["base_path"],
+ description=metadata["description"],
+ creator=metadata["creator"],
+ snapshot_id=metadata["snapshot_id"],
+ )
+
+ # Set timestamp
+ snapshot.timestamp = metadata["timestamp"]
+
+ # Set Git information
+ snapshot.commit_hash = metadata.get("commit_hash")
+ snapshot.branch = metadata.get("branch")
+ snapshot.tag = metadata.get("tag")
+
+ # Load files
+ snapshot.files = {}
+ for rel_path, file_data in snapshot_data["files"].items():
+ snapshot.files[rel_path] = FileSnapshot(
+ path=file_data["path"],
+ relative_path=file_data["relative_path"],
+ hash=file_data["hash"],
+ size=file_data["size"],
+ lines=file_data["lines"],
+ language=file_data.get("language"),
+ last_modified=file_data.get("last_modified"),
+ metadata=file_data.get("metadata", {}),
+ )
+
+ # Load content if available
+ snapshot.content = snapshot_data.get("content", {})
+ snapshot.store_content = bool(snapshot.content)
+
+ # Load language stats
+ snapshot.language_stats = metadata.get("language_stats", {})
+
+ logger.info(f"Loaded snapshot from {snapshot_path}")
+ return snapshot
+
+ def diff(self, other: "CodebaseSnapshot") -> dict[str, Any]:
+ """
+ Compare this snapshot with another snapshot.
+
+ Args:
+ other: Snapshot to compare with
+
+ Returns:
+ Diff between the snapshots
+ """
+ # Get sets of file paths
+ self_files = set(self.files.keys())
+ other_files = set(other.files.keys())
+
+ # Find added, deleted, and common files
+ added_files = other_files - self_files
+ deleted_files = self_files - other_files
+ common_files = self_files & other_files
+
+ # Find modified files
+ modified_files = []
+ for file_path in common_files:
+ self_file = self.files[file_path]
+ other_file = other.files[file_path]
+
+ if self_file.hash != other_file.hash:
+ modified_files.append(file_path)
+
+ # Calculate content diff for modified files if content is available
+ content_diff = {}
+ if self.store_content and other.store_content:
+ for file_path in modified_files:
+ if file_path in self.content and file_path in other.content:
+ try:
+ # Use difflib to generate unified diff
+ import difflib
+
+ diff = difflib.unified_diff(
+ self.content[file_path].splitlines(keepends=True),
+ other.content[file_path].splitlines(keepends=True),
+ fromfile=f"a/{file_path}",
+ tofile=f"b/{file_path}",
+ )
+ content_diff[file_path] = "".join(diff)
+ except Exception as e:
+ logger.warning(f"Error generating diff for {file_path}: {e!s}")
+
+ # Calculate statistics
+ diff_stats = {
+ "files_added": len(added_files),
+ "files_deleted": len(deleted_files),
+ "files_modified": len(modified_files),
+ "files_unchanged": len(common_files) - len(modified_files),
+ "lines_added": sum(
+ other.files[file_path].lines for file_path in added_files
+ ),
+ "lines_deleted": sum(
+ self.files[file_path].lines for file_path in deleted_files
+ ),
+ "lines_modified": sum(
+ other.files[file_path].lines - self.files[file_path].lines
+ for file_path in modified_files
+ if file_path in other.files and file_path in self.files
+ ),
+ }
+
+ # Calculate language stats diff
+ language_diff = {}
+ for language in set(self.language_stats.keys()) | set(
+ other.language_stats.keys()
+ ):
+ self_count = self.language_stats.get(language, 0)
+ other_count = other.language_stats.get(language, 0)
+
+ if self_count != other_count:
+ language_diff[language] = other_count - self_count
+
+ return {
+ "added_files": list(added_files),
+ "deleted_files": list(deleted_files),
+ "modified_files": modified_files,
+ "stats": diff_stats,
+ "language_diff": language_diff,
+ "content_diff": content_diff,
+ "from_snapshot": self.snapshot_id,
+ "to_snapshot": other.snapshot_id,
+ "timestamp": datetime.now().isoformat(),
+ }
+
+
+class SnapshotManager:
+ """
+ Manager for codebase snapshots.
+
+ This class provides functionality to create, store, load, and
+ compare codebase snapshots.
+ """
+
+ def __init__(self, storage_dir: str | None = None):
+ """
+ Initialize the snapshot manager.
+
+ Args:
+ storage_dir: Directory to store snapshots in
+ """
+ self.storage_dir = storage_dir or os.path.join(
+ tempfile.gettempdir(), "codebase_snapshots"
+ )
+ os.makedirs(self.storage_dir, exist_ok=True)
+
+ # Initialize data structures
+ self.snapshots: dict[str, SnapshotMetadata] = {}
+ self.load_index()
+
+ def load_index(self):
+ """Load the snapshot index."""
+ index_path = os.path.join(self.storage_dir, "index.json")
+
+ if os.path.isfile(index_path):
+ try:
+ with open(index_path) as f:
+ data = json.load(f)
+
+ self.snapshots = {}
+ for snapshot_id, metadata in data.items():
+ self.snapshots[snapshot_id] = SnapshotMetadata(**metadata)
+ except Exception as e:
+ logger.exception(f"Error loading snapshot index: {e!s}")
+ self.snapshots = {}
+
+ def save_index(self):
+ """Save the snapshot index."""
+ index_path = os.path.join(self.storage_dir, "index.json")
+
+ try:
+ with open(index_path, "w") as f:
+ json.dump(
+ {id: metadata.__dict__ for id, metadata in self.snapshots.items()},
+ f,
+ indent=2,
+ )
+ except Exception as e:
+ logger.exception(f"Error saving snapshot index: {e!s}")
+
+ def create_snapshot(
+ self,
+ base_path: str,
+ description: str = "",
+ creator: str = "snapshot_manager",
+ include_patterns: list[str] | None = None,
+ exclude_patterns: list[str] | None = None,
+ snapshot_id: str | None = None,
+ store_content: bool = False,
+ ) -> str:
+ """
+ Create a new snapshot of a codebase.
+
+ Args:
+ base_path: Base path of the codebase
+ description: Description of the snapshot
+ creator: Creator of the snapshot
+ include_patterns: Patterns of files to include
+ exclude_patterns: Patterns of files to exclude
+ snapshot_id: Optional ID for the snapshot
+ store_content: Whether to store file content
+
+ Returns:
+ ID of the created snapshot
+ """
+ # Create the snapshot
+ snapshot = CodebaseSnapshot(
+ base_path=base_path,
+ description=description,
+ creator=creator,
+ include_patterns=include_patterns,
+ exclude_patterns=exclude_patterns,
+ snapshot_id=snapshot_id,
+ store_content=store_content,
+ )
+
+ # Generate the snapshot
+ snapshot.create()
+
+ # Save the snapshot
+ snapshot_path = os.path.join(self.storage_dir, f"{snapshot.snapshot_id}.json")
+ snapshot.save(snapshot_path)
+
+ # Update the index
+ self.snapshots[snapshot.snapshot_id] = snapshot.get_metadata()
+ self.save_index()
+
+ return snapshot.snapshot_id
+
+ def get_snapshot(self, snapshot_id: str) -> CodebaseSnapshot | None:
+ """
+ Get a snapshot by ID.
+
+ Args:
+ snapshot_id: ID of the snapshot
+
+ Returns:
+ Snapshot if found, None otherwise
+ """
+ if snapshot_id not in self.snapshots:
+ logger.error(f"Snapshot not found: {snapshot_id}")
+ return None
+
+ snapshot_path = os.path.join(self.storage_dir, f"{snapshot_id}.json")
+
+ if not os.path.isfile(snapshot_path):
+ logger.error(f"Snapshot file not found: {snapshot_path}")
+ return None
+
+ return CodebaseSnapshot.load(snapshot_path)
+
+ def delete_snapshot(self, snapshot_id: str) -> bool:
+ """
+ Delete a snapshot.
+
+ Args:
+ snapshot_id: ID of the snapshot
+
+ Returns:
+ True if the snapshot was deleted, False otherwise
+ """
+ if snapshot_id not in self.snapshots:
+ logger.error(f"Snapshot not found: {snapshot_id}")
+ return False
+
+ snapshot_path = os.path.join(self.storage_dir, f"{snapshot_id}.json")
+
+ if os.path.isfile(snapshot_path):
+ try:
+ os.remove(snapshot_path)
+ except Exception as e:
+ logger.exception(f"Error deleting snapshot file: {e!s}")
+ return False
+
+ # Update the index
+ del self.snapshots[snapshot_id]
+ self.save_index()
+
+ return True
+
+ def compare_snapshots(
+ self, snapshot_id1: str, snapshot_id2: str
+ ) -> dict[str, Any] | None:
+ """
+ Compare two snapshots.
+
+ Args:
+ snapshot_id1: ID of the first snapshot
+ snapshot_id2: ID of the second snapshot
+
+ Returns:
+ Diff between the snapshots if both exist, None otherwise
+ """
+ snapshot1 = self.get_snapshot(snapshot_id1)
+ snapshot2 = self.get_snapshot(snapshot_id2)
+
+ if not snapshot1 or not snapshot2:
+ return None
+
+ return snapshot1.diff(snapshot2)
+
+ def get_latest_snapshot(self, base_path: str | None = None) -> str | None:
+ """
+ Get the latest snapshot ID.
+
+ Args:
+ base_path: Optional base path to filter snapshots
+
+ Returns:
+ ID of the latest snapshot if any exist, None otherwise
+ """
+ if not self.snapshots:
+ return None
+
+ filtered_snapshots = self.snapshots
+
+ if base_path:
+ filtered_snapshots = {
+ id: metadata
+ for id, metadata in self.snapshots.items()
+ if metadata.base_path == base_path
+ }
+
+ if not filtered_snapshots:
+ return None
+
+ # Sort by timestamp and get the latest
+ latest_id = max(
+ filtered_snapshots.keys(), key=lambda id: filtered_snapshots[id].timestamp
+ )
+ return latest_id
+
+ def list_snapshots(self, base_path: str | None = None) -> list[SnapshotMetadata]:
+ """
+ List all snapshots.
+
+ Args:
+ base_path: Optional base path to filter snapshots
+
+ Returns:
+ List of snapshot metadata
+ """
+ if base_path:
+ return [
+ metadata
+ for metadata in self.snapshots.values()
+ if metadata.base_path == base_path
+ ]
+ else:
+ return list(self.snapshots.values())
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/transaction_manager.py b/codegen-on-oss/codegen_on_oss/analyzers/transaction_manager.py
new file mode 100644
index 000000000..7efd254bd
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/transaction_manager.py
@@ -0,0 +1,589 @@
+#!/usr/bin/env python3
+"""
+Transaction Manager Module for Analyzers
+
+This module provides a transaction manager for handling code modifications during analysis.
+It's responsible for queuing, sorting, and committing transactions in a controlled manner.
+"""
+
+import logging
+import math
+import time
+from collections.abc import Callable
+from pathlib import Path
+from typing import Any
+
+from codegen_on_oss.analyzers.transactions import (
+ ChangeType,
+ DiffLite,
+ EditTransaction,
+ FileAddTransaction,
+ FileRemoveTransaction,
+ FileRenameTransaction,
+ RemoveTransaction,
+ Transaction,
+ TransactionPriority,
+)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class MaxTransactionsExceeded(Exception):
+ """Raised when the number of transactions exceeds the max_transactions limit."""
+
+ def __init__(self, message: str, threshold: int | None = None):
+ super().__init__(message)
+ self.threshold = threshold
+
+
+class MaxPreviewTimeExceeded(Exception):
+ """Raised when more than the allotted time has passed for previewing transactions."""
+
+ def __init__(self, message: str, threshold: int | None = None):
+ super().__init__(message)
+ self.threshold = threshold
+
+
+class TransactionError(Exception):
+ """Exception raised for transaction-related errors."""
+
+ pass
+
+
+class TransactionManager:
+ """Responsible for handling `Transaction` objects - basically an atomic modification of a codebase.
+
+ This is used to queue up transactions and then commit them in bulk.
+ """
+
+ def __init__(self) -> None:
+ """Initialize the transaction manager."""
+ self.queued_transactions: dict[Path, list[Transaction]] = {}
+ self.pending_undos: set[Callable[[], None]] = set()
+ self._commiting: bool = False
+ self.max_transactions: int | None = None # None = no limit
+ self.stopwatch_start: float | None = None
+ self.stopwatch_max_seconds: int | None = None # None = no limit
+ self.session: dict[str, Any] = {} # Session data for tracking state
+
+ def sort_transactions(self) -> None:
+ """Sort transactions by priority and position."""
+ for _file_path, file_transactions in self.queued_transactions.items():
+ file_transactions.sort(key=Transaction._to_sort_key)
+
+ def clear_transactions(self) -> None:
+ """Clear all transactions and reset limits.
+
+ Should be called between analysis runs to remove any potential extraneous transactions.
+ """
+ if len(self.queued_transactions) > 0:
+ logger.warning("Not all transactions have been committed")
+ self.queued_transactions.clear()
+ for undo in self.pending_undos:
+ undo()
+ self.pending_undos.clear()
+ self.set_max_transactions(None)
+ self.reset_stopwatch()
+
+ def _format_transactions(self, transactions: list[Transaction]) -> str:
+ """Format transactions for display."""
+ return "\\n".join([
+ ">" * 100 + f"\\n[ID: {t.transaction_id}]: {t.diff_str()}" + "<" * 100
+ for t in transactions
+ ])
+
+ def get_transactions_str(self) -> str:
+ """Returns a human-readable string representation of the transactions."""
+ return "\\n\\n\\n".join([
+ f"{file_path}:\\n{self._format_transactions(transactions)}"
+ for file_path, transactions in self.queued_transactions.items()
+ ])
+
+ ####################################################################################################################
+ # Transaction Limits
+ ####################################################################################################################
+
+ def get_num_transactions(self) -> int:
+ """Returns total number of transactions created to date."""
+ return sum([
+ len(transactions) for transactions in self.queued_transactions.values()
+ ])
+
+ def set_max_transactions(self, max_transactions: int | None = None) -> None:
+ """Set the maximum number of transactions allowed."""
+ self.max_transactions = max_transactions
+
+ def max_transactions_exceeded(self) -> bool:
+ """Util method to check if the max transactions limit has been exceeded."""
+ if self.max_transactions is None:
+ return False
+ return self.get_num_transactions() >= self.max_transactions
+
+ ####################################################################################################################
+ # Stopwatch
+ ####################################################################################################################
+
+ def reset_stopwatch(self, max_seconds: int | None = None) -> None:
+ """Reset the stopwatch with an optional time limit."""
+ self.stopwatch_start = time.time()
+ self.stopwatch_max_seconds = max_seconds
+
+ def is_time_exceeded(self) -> bool:
+ """Check if the stopwatch time limit has been exceeded."""
+ if self.stopwatch_max_seconds is None or self.stopwatch_start is None:
+ return False
+ else:
+ num_seconds = time.time() - self.stopwatch_start
+ return num_seconds > self.stopwatch_max_seconds
+
+ ####################################################################################################################
+ # Transaction Creation
+ ####################################################################################################################
+
+ def add_file_add_transaction(self, filepath: Path) -> None:
+ """Add a transaction to create a new file."""
+ t = FileAddTransaction(filepath)
+ self.add_transaction(t)
+
+ def add_file_rename_transaction(self, file: Any, new_filepath: str) -> None:
+ """Add a transaction to rename a file."""
+ t = FileRenameTransaction(file, new_filepath)
+ self.add_transaction(t)
+
+ def add_file_remove_transaction(self, file: Any) -> None:
+ """Add a transaction to remove a file."""
+ t = FileRemoveTransaction(file)
+ self.add_transaction(t)
+
+ def add_transaction(
+ self,
+ transaction: Transaction,
+ dedupe: bool = True,
+ solve_conflicts: bool = True,
+ ) -> bool:
+ """Add a transaction to the queue.
+
+ Args:
+ transaction: The transaction to add
+ dedupe: Whether to check for duplicate transactions
+ solve_conflicts: Whether to resolve conflicts with existing transactions
+
+ Returns:
+ True if the transaction was added, False otherwise
+ """
+ # Get the list of transactions for the file
+ file_path = transaction.file_path
+ if file_path not in self.queued_transactions:
+ self.queued_transactions[file_path] = []
+ file_queue = self.queued_transactions[file_path]
+
+ # Dedupe transactions
+ if dedupe and transaction in file_queue:
+ logger.debug(f"Transaction already exists in queue: {transaction}")
+ return False
+
+ # Solve conflicts
+ if new_transaction := self._resolve_conflicts(
+ transaction, file_queue, solve_conflicts=solve_conflicts
+ ):
+ file_queue.append(new_transaction)
+
+ self.check_limits()
+ return True
+
+ def add(self, transaction: Transaction) -> bool:
+ """Alias for add_transaction."""
+ return self.add_transaction(transaction)
+
+ def check_limits(self) -> None:
+ """Check if any limits have been exceeded."""
+ self.check_max_transactions()
+ self.check_max_preview_time()
+
+ def check_max_transactions(self) -> None:
+ """Check if the maximum number of transactions has been exceeded."""
+ if self.max_transactions_exceeded():
+ logger.info(
+ f"Max transactions reached: {self.max_transactions}. Stopping analysis."
+ )
+ msg = f"Max transactions reached: {self.max_transactions}"
+ raise MaxTransactionsExceeded(msg, threshold=self.max_transactions)
+
+ def check_max_preview_time(self) -> None:
+ """Check if the maximum preview time has been exceeded."""
+ if self.is_time_exceeded():
+ logger.info(
+ f"Max preview time exceeded: {self.stopwatch_max_seconds}. Stopping analysis."
+ )
+ msg = f"Max preview time exceeded: {self.stopwatch_max_seconds}"
+ raise MaxPreviewTimeExceeded(msg, threshold=self.stopwatch_max_seconds)
+
+ ####################################################################################################################
+ # Commit
+ ####################################################################################################################
+
+ def to_commit(self, files: set[Path] | None = None) -> set[Path]:
+ """Get paths of files to commit.
+
+ Args:
+ files: Optional set of files to filter by
+
+ Returns:
+ Set of file paths to commit
+ """
+ if files is None:
+ return set(self.queued_transactions.keys())
+ return files.intersection(self.queued_transactions)
+
+ def commit(self, files: set[Path]) -> list[DiffLite]:
+ """Execute transactions in bulk for each file, in reverse order of start_byte.
+
+ Args:
+ files: Set of file paths to commit
+
+ Returns:
+ List of diffs that were committed
+ """
+ if self._commiting:
+ logger.warning("Skipping commit, already committing")
+ return []
+
+ self._commiting = True
+ try:
+ diffs: list[DiffLite] = []
+ if not self.queued_transactions or len(self.queued_transactions) == 0:
+ return diffs
+
+ self.sort_transactions()
+
+ # Log information about the commit
+ if len(files) > 3:
+ num_transactions = sum([
+ len(self.queued_transactions[file_path]) for file_path in files
+ ])
+ logger.info(
+ f"Committing {num_transactions} transactions for {len(files)} files"
+ )
+ else:
+ for file in files:
+ logger.info(
+ f"Committing {len(self.queued_transactions[file])} transactions for {file}"
+ )
+
+ # Execute transactions for each file
+ for file_path in files:
+ file_transactions = self.queued_transactions.pop(file_path, [])
+ modified = False
+ for transaction in file_transactions:
+ # Add diff IF the file is a source file
+ diff = transaction.get_diff()
+ if diff.change_type == ChangeType.Modified:
+ if not modified:
+ modified = True
+ diffs.append(diff)
+ else:
+ diffs.append(diff)
+ transaction.execute()
+
+ return diffs
+ finally:
+ self._commiting = False
+
+ def apply(self, transaction: Transaction) -> None:
+ """Apply a single transaction immediately.
+
+ Args:
+ transaction: The transaction to apply
+ """
+ self.add_transaction(transaction)
+ self.commit({transaction.file_path})
+
+ def apply_all(self) -> list[DiffLite]:
+ """Apply all queued transactions.
+
+ Returns:
+ List of diffs that were committed
+ """
+ files = self.to_commit()
+ return self.commit(files)
+
+ def revert_all(self) -> None:
+ """Revert all pending transactions."""
+ self.queued_transactions.clear()
+ for undo in self.pending_undos:
+ undo()
+ self.pending_undos.clear()
+
+ ####################################################################################################################
+ # Conflict Resolution
+ ####################################################################################################################
+
+ def _resolve_conflicts(
+ self,
+ transaction: Transaction,
+ file_queue: list[Transaction],
+ solve_conflicts: bool = True,
+ ) -> Transaction | None:
+ """Resolve conflicts between the new transaction and existing transactions.
+
+ Args:
+ transaction: The new transaction
+ file_queue: List of existing transactions for the file
+ solve_conflicts: Whether to attempt to resolve conflicts
+
+ Returns:
+ The transaction to add, or None if it should be discarded
+ """
+ # Extract the conflict resolution logic to reduce complexity
+ try:
+ conflicts = self._get_conflicts(transaction)
+ if solve_conflicts and conflicts:
+ return self._handle_conflicts(transaction, file_queue, conflicts)
+ else:
+ # Add to priority queue and rebuild the queue
+ return transaction
+ except TransactionError:
+ logger.exception("Transaction conflict detected")
+ self._log_conflict_error(transaction, self._get_conflicts(transaction))
+ raise
+
+ def _handle_conflicts(
+ self,
+ transaction: Transaction,
+ file_queue: list[Transaction],
+ conflicts: list[Transaction],
+ ) -> Transaction | None:
+ """Handle conflicts between transactions.
+
+ Args:
+ transaction: The new transaction
+ file_queue: List of existing transactions for the file
+ conflicts: List of conflicting transactions
+
+ Returns:
+ The transaction to add, or None if it should be discarded
+ """
+ # Check if the current transaction completely overlaps with any existing transaction
+ completely_overlapping = self._get_overlapping_conflicts(transaction)
+ if completely_overlapping is not None:
+ # If it does, check the overlapping transaction's type
+ # If the overlapping transaction is a remove, remove the current transaction
+ if isinstance(completely_overlapping, RemoveTransaction):
+ return None
+ # If the overlapping transaction is an edit, try to break it down
+ elif isinstance(completely_overlapping, EditTransaction):
+ if self._break_down_transaction(completely_overlapping, file_queue):
+ return transaction
+
+ raise TransactionError()
+ else:
+ # If current transaction is deleted, remove all conflicting transactions
+ if isinstance(transaction, RemoveTransaction):
+ for t in conflicts:
+ file_queue.remove(t)
+ # If current transaction is edit, try to break it down
+ elif isinstance(transaction, EditTransaction):
+ if self._break_down_transaction(transaction, file_queue):
+ return None
+ raise TransactionError()
+
+ return transaction
+
+ def _break_down_transaction(
+ self, to_break: EditTransaction, file_queue: list[Transaction]
+ ) -> bool:
+ """Break down an edit transaction into smaller transactions.
+
+ Args:
+ to_break: The transaction to break down
+ file_queue: List of existing transactions for the file
+
+ Returns:
+ True if the transaction was broken down, False otherwise
+ """
+ new_transactions = to_break.break_down()
+ if not new_transactions:
+ return False
+
+ try:
+ insert_idx = file_queue.index(to_break)
+ file_queue.pop(insert_idx)
+ except ValueError:
+ insert_idx = len(file_queue)
+
+ for new_transaction in new_transactions:
+ broken_down = self._resolve_conflicts(
+ new_transaction, file_queue, solve_conflicts=True
+ )
+ if broken_down:
+ file_queue.insert(insert_idx, broken_down)
+
+ return True
+
+ def _log_conflict_error(
+ self, transaction: Transaction, conflicts: list[Transaction]
+ ) -> None:
+ """Log a conflict error.
+
+ Args:
+ transaction: The transaction that caused the conflict
+ conflicts: List of conflicting transactions
+ """
+ msg = (
+ f"Potential conflict detected in file {transaction.file_path}!\n"
+ "Attempted to perform code modification:\n"
+ "\n"
+ f"{self._format_transactions([transaction])}\n"
+ "\n"
+ "That potentially conflicts with the following other modifications:\n"
+ "\n"
+ f"{self._format_transactions(conflicts)}\n"
+ "\n"
+ "Aborting!\n"
+ "\n"
+ f"[Conflict Detected] Potential Modification Conflict in File {transaction.file_path}!"
+ )
+ raise TransactionError(msg)
+
+ def get_transactions_at_range(
+ self,
+ file_path: Path,
+ start_byte: int,
+ end_byte: int,
+ transaction_order: TransactionPriority | None = None,
+ *,
+ combined: bool = False,
+ ) -> list[Transaction]:
+ """Returns list of queued transactions that matches the given filtering criteria.
+
+ Args:
+ file_path: Path to the file
+ start_byte: Start byte position
+ end_byte: End byte position
+ transaction_order: Optional filter by transaction order
+ combined: Return a list of transactions which collectively apply to the given range
+
+ Returns:
+ List of matching transactions
+ """
+ matching_transactions: list[Transaction] = []
+ if file_path not in self.queued_transactions:
+ return matching_transactions
+
+ for t in self.queued_transactions[file_path]:
+ if t.start_byte == start_byte:
+ if t.end_byte == end_byte and (
+ transaction_order is None
+ or t.transaction_order == transaction_order
+ ):
+ matching_transactions.append(t)
+ elif combined and t.start_byte != t.end_byte:
+ other = self.get_transactions_at_range(
+ t.file_path,
+ t.end_byte,
+ end_byte,
+ transaction_order,
+ combined=combined,
+ )
+ if other:
+ return [t, *other]
+
+ return matching_transactions
+
+ def get_transaction_containing_range(
+ self,
+ file_path: Path,
+ start_byte: int,
+ end_byte: int,
+ transaction_order: TransactionPriority | None = None,
+ ) -> Transaction | None:
+ """Returns the nearest transaction that includes the range specified given the filtering criteria.
+
+ Args:
+ file_path: Path to the file
+ start_byte: Start byte position
+ end_byte: End byte position
+ transaction_order: Optional filter by transaction order
+
+ Returns:
+ The transaction containing the range, or None if not found
+ """
+ if file_path not in self.queued_transactions:
+ return None
+
+ smallest_difference = math.inf
+ best_fit_transaction = None
+ for t in self.queued_transactions[file_path]:
+ if (
+ t.start_byte <= start_byte
+ and t.end_byte >= end_byte
+ and (
+ transaction_order is None
+ or t.transaction_order == transaction_order
+ )
+ ):
+ smallest_difference = min(
+ smallest_difference,
+ abs(t.start_byte - start_byte) + abs(t.end_byte - end_byte),
+ )
+ if smallest_difference == 0:
+ return t
+ best_fit_transaction = t
+ return best_fit_transaction
+
+ def _get_conflicts(self, transaction: Transaction) -> list[Transaction]:
+ """Returns all transactions that overlap with the given transaction.
+
+ Args:
+ transaction: The transaction to check for conflicts
+
+ Returns:
+ List of conflicting transactions
+ """
+ conflicts: list[Transaction] = []
+ if transaction.file_path not in self.queued_transactions:
+ return conflicts
+
+ for t in self.queued_transactions[transaction.file_path]:
+ # Skip if the transaction is the same
+ if t == transaction:
+ continue
+
+ # Check if the transaction overlaps with the given transaction
+ if (
+ (t.start_byte <= transaction.start_byte < t.end_byte)
+ or (t.start_byte < transaction.end_byte <= t.end_byte)
+ or (transaction.start_byte <= t.start_byte < transaction.end_byte)
+ or (transaction.start_byte < t.end_byte <= transaction.end_byte)
+ ):
+ conflicts.append(t)
+
+ return conflicts
+
+ def _get_overlapping_conflicts(
+ self, transaction: Transaction
+ ) -> Transaction | None:
+ """Returns the transaction that completely overlaps with the given transaction.
+
+ Args:
+ transaction: The transaction to check for overlaps
+
+ Returns:
+ The overlapping transaction, or None if not found
+ """
+ if transaction.file_path not in self.queued_transactions:
+ return None
+
+ for t in self.queued_transactions[transaction.file_path]:
+ if (
+ transaction.start_byte >= t.start_byte
+ and transaction.end_byte <= t.end_byte
+ ):
+ return t
+ return None
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/transactions.py b/codegen-on-oss/codegen_on_oss/analyzers/transactions.py
new file mode 100644
index 000000000..b3ead5446
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/transactions.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python3
+"""
+Transactions Module for Analyzers
+
+This module defines transaction classes for code modifications during analysis.
+It provides a structured way to represent and execute code changes.
+"""
+
+from collections.abc import Callable
+from difflib import unified_diff
+from enum import IntEnum
+from functools import cached_property
+from pathlib import Path
+from typing import Protocol, runtime_checkable, Optional, Union, Any, TYPE_CHECKING
+
+# Define change types for diffs
+class ChangeType(IntEnum):
+ """Types of changes that can be made to files."""
+ Modified = 1
+ Removed = 2
+ Renamed = 3
+ Added = 4
+
+# Simple diff class for tracking changes
+class DiffLite:
+ """Simple diff for tracking code changes."""
+
+ def __init__(
+ self,
+ change_type: ChangeType,
+ path: Path,
+ rename_from: Optional[Path] = None,
+ rename_to: Optional[Path] = None,
+ old_content: Optional[bytes] = None
+ ):
+ self.change_type = change_type
+ self.path = path
+ self.rename_from = rename_from
+ self.rename_to = rename_to
+ self.old_content = old_content
+
+class TransactionPriority(IntEnum):
+ """Priority levels for different types of transactions."""
+ Remove = 0 # Remove always has highest priority
+ Edit = 1 # Edit comes next
+ Insert = 2 # Insert is always the last of the edit operations
+ # File operations happen last, since they will mess up all other transactions
+ FileAdd = 10
+ FileRename = 11
+ FileRemove = 12
+
+@runtime_checkable
+class ContentFunc(Protocol):
+ """A function executed to generate a content block dynamically."""
+ def __call__(self) -> str: ...
+
+class Transaction:
+ """Base class for all transactions.
+
+ A transaction represents an atomic modification to a file in the codebase.
+ """
+ start_byte: int
+ end_byte: int
+ file_path: Path
+ priority: Union[int, tuple]
+ transaction_order: TransactionPriority
+ transaction_counter: int = 0
+
+ def __init__(
+ self,
+ start_byte: int,
+ end_byte: int,
+ file_path: Path,
+ priority: Union[int, tuple] = 0,
+ new_content: Optional[Union[str, Callable[[], str]]] = None,
+ ) -> None:
+ self.start_byte = start_byte
+ assert self.start_byte >= 0
+ self.end_byte = end_byte
+ self.file_path = file_path
+ self.priority = priority
+ self._new_content = new_content
+ self.transaction_id = Transaction.transaction_counter
+
+ Transaction.transaction_counter += 1
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __hash__(self):
+ return hash((self.start_byte, self.end_byte, self.file_path, self.priority, self.new_content))
+
+ def __eq__(self, other):
+ if not isinstance(other, type(self)):
+ return False
+
+ # Check for everything EXCEPT transaction_id
+ return (
+ self.start_byte == other.start_byte
+ and self.end_byte == other.end_byte
+ and self.file_path == other.file_path
+ and self.priority == other.priority
+ and self._new_content == other._new_content
+ )
+
+ @property
+ def length(self):
+ """Length of the transaction in bytes."""
+ return self.end_byte - self.start_byte
+
+ def execute(self):
+ """Execute the transaction to modify the file."""
+ msg = "Transaction.execute() must be implemented by subclasses"
+ raise NotImplementedError(msg)
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ msg = "Transaction.get_diff() must be implemented by subclasses"
+ raise NotImplementedError(msg)
+
+ def diff_str(self):
+ """Human-readable string representation of the change."""
+ msg = "Transaction.diff_str() must be implemented by subclasses"
+ raise NotImplementedError(msg)
+
+ def _to_sort_key(transaction: "Transaction"):
+ """Key function for sorting transactions."""
+ # Sort by:
+ # 1. Descending start_byte
+ # 2. Ascending transaction type
+ # 3. Ascending priority
+ # 4. Descending time of transaction
+ priority = (transaction.priority,) if isinstance(transaction.priority, int) else transaction.priority
+
+ return -transaction.start_byte, transaction.transaction_order.value, priority, -transaction.transaction_id
+
+ @cached_property
+ def new_content(self) -> Optional[str]:
+ """Get the new content, evaluating the content function if necessary."""
+ return self._new_content() if isinstance(self._new_content, ContentFunc) else self._new_content
+
+ @staticmethod
+ def create_new_file(filepath: Union[str, Path], content: str) -> "FileAddTransaction":
+ """Create a transaction to add a new file."""
+ return FileAddTransaction(Path(filepath))
+
+ @staticmethod
+ def delete_file(filepath: Union[str, Path]) -> "FileRemoveTransaction":
+ """Create a transaction to delete a file."""
+ # In a real implementation, this would need a File object
+ # For now, we'll create a placeholder implementation
+ from pathlib import Path
+ class FilePlaceholder:
+ def __init__(self, path):
+ self.path = Path(path)
+
+ return FileRemoveTransaction(FilePlaceholder(filepath))
+
+class RemoveTransaction(Transaction):
+ """Transaction to remove content from a file."""
+ transaction_order = TransactionPriority.Remove
+
+ exec_func: Optional[Callable[[], None]] = None
+
+ def __init__(self, start_byte: int, end_byte: int, file: Any, priority: int = 0, exec_func: Optional[Callable[[], None]] = None) -> None:
+ super().__init__(start_byte, end_byte, file.path, priority=priority)
+ self.file = file
+ self.exec_func = exec_func
+
+ def _generate_new_content_bytes(self) -> bytes:
+ """Generate the new content bytes after removal."""
+ content_bytes = self.file.content_bytes
+ new_content_bytes = content_bytes[: self.start_byte] + content_bytes[self.end_byte :]
+ return new_content_bytes
+
+ def execute(self) -> None:
+ """Removes the content between start_byte and end_byte."""
+ self.file.write_bytes(self._generate_new_content_bytes())
+ if self.exec_func:
+ self.exec_func()
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Modified, self.file_path, old_content=self.file.content_bytes)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ diff = "".join(unified_diff(self.file.content.splitlines(True), self._generate_new_content_bytes().decode("utf-8").splitlines(True)))
+ return f"Remove {self.length} bytes at bytes ({self.start_byte}, {self.end_byte})\n{diff}"
+
+class InsertTransaction(Transaction):
+ """Transaction to insert content into a file."""
+ transaction_order = TransactionPriority.Insert
+
+ exec_func: Optional[Callable[[], None]] = None
+
+ def __init__(
+ self,
+ insert_byte: int,
+ file: Any,
+ new_content: Union[str, Callable[[], str]],
+ *,
+ priority: Union[int, tuple] = 0,
+ exec_func: Optional[Callable[[], None]] = None,
+ ) -> None:
+ super().__init__(insert_byte, insert_byte, file.path, priority=priority, new_content=new_content)
+ self.insert_byte = insert_byte
+ self.file = file
+ self.exec_func = exec_func
+
+ def _generate_new_content_bytes(self) -> bytes:
+ """Generate the new content bytes after insertion."""
+ if self.new_content is None:
+ raise ValueError("Cannot generate content bytes: new_content is None")
+ new_bytes = bytes(self.new_content, encoding="utf-8")
+ content_bytes = self.file.content_bytes
+ head = content_bytes[: self.insert_byte]
+ tail = content_bytes[self.insert_byte :]
+ new_content_bytes = head + new_bytes + tail
+ return new_content_bytes
+
+ def execute(self) -> None:
+ """Inserts new_src at the specified byte_index."""
+ self.file.write_bytes(self._generate_new_content_bytes())
+ if self.exec_func:
+ self.exec_func()
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Modified, self.file_path, old_content=self.file.content_bytes)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ diff = "".join(unified_diff(self.file.content.splitlines(True), self._generate_new_content_bytes().decode("utf-8").splitlines(True)))
+ content_length = len(self.new_content) if self.new_content is not None else 0
+ return f"Insert {content_length} bytes at bytes ({self.start_byte}, {self.end_byte})\n{diff}"
+
+class EditTransaction(Transaction):
+ """Transaction to edit content in a file."""
+ transaction_order = TransactionPriority.Edit
+ new_content: str
+
+ def __init__(
+ self,
+ start_byte: int,
+ end_byte: int,
+ file: Any,
+ new_content: str,
+ priority: int = 0,
+ ) -> None:
+ super().__init__(start_byte, end_byte, file.path, priority=priority, new_content=new_content)
+ self.file = file
+
+ def _generate_new_content_bytes(self) -> bytes:
+ """Generate the new content bytes after editing."""
+ new_bytes = bytes(self.new_content, "utf-8")
+ content_bytes = self.file.content_bytes
+ new_content_bytes = content_bytes[: self.start_byte] + new_bytes + content_bytes[self.end_byte :]
+ return new_content_bytes
+
+ def execute(self) -> None:
+ """Edits the entirety of this node's source to new_src."""
+ self.file.write_bytes(self._generate_new_content_bytes())
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Modified, self.file_path, old_content=self.file.content_bytes)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ diff = "".join(unified_diff(self.file.content.splitlines(True), self._generate_new_content_bytes().decode("utf-8").splitlines(True)))
+ return f"Edit {self.length} bytes at bytes ({self.start_byte}, {self.end_byte}), src: ({self.new_content[:50]})\n{diff}"
+
+ def break_down(self) -> Optional[list[InsertTransaction]]:
+ """Break down an edit transaction into insert transactions."""
+ old = self.file.content_bytes[self.start_byte : self.end_byte]
+ new = bytes(self.new_content, "utf-8")
+ if old and old in new:
+ prefix, suffix = new.split(old, maxsplit=1)
+ ret = []
+ if suffix:
+ ret.append(InsertTransaction(self.end_byte, self.file, suffix.decode("utf-8"), priority=self.priority))
+ if prefix:
+ ret.append(InsertTransaction(self.start_byte, self.file, prefix.decode("utf-8"), priority=self.priority))
+ return ret
+ return None
+
+class FileAddTransaction(Transaction):
+ """Transaction to add a new file."""
+ transaction_order = TransactionPriority.FileAdd
+
+ def __init__(
+ self,
+ file_path: Path,
+ priority: int = 0,
+ ) -> None:
+ super().__init__(0, 0, file_path, priority=priority)
+
+ def execute(self) -> None:
+ """Adds a new file."""
+ pass # execute is a no-op as the file is immediately added
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Added, self.file_path)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ return f"Add file at {self.file_path}"
+
+class FileRenameTransaction(Transaction):
+ """Transaction to rename a file."""
+ transaction_order = TransactionPriority.FileRename
+
+ def __init__(
+ self,
+ file: Any,
+ new_file_path: str,
+ priority: int = 0,
+ ) -> None:
+ super().__init__(0, 0, file.path, priority=priority, new_content=new_file_path)
+ self.new_file_path = file.ctx.to_absolute(new_file_path) if hasattr(file, 'ctx') else Path(new_file_path)
+ self.file = file
+
+ def execute(self) -> None:
+ """Renames the file."""
+ if hasattr(self.file, 'ctx') and hasattr(self.file.ctx, 'io'):
+ self.file.ctx.io.save_files({self.file.path})
+ self.file_path.rename(self.new_file_path)
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Renamed, self.file_path, self.file_path, self.new_file_path,
+ old_content=self.file.content_bytes if hasattr(self.file, 'content_bytes') else None)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ return f"Rename file from {self.file_path} to {self.new_file_path}"
+
+class FileRemoveTransaction(Transaction):
+ """Transaction to remove a file."""
+ transaction_order = TransactionPriority.FileRemove
+
+ def __init__(
+ self,
+ file: Any,
+ priority: int = 0,
+ ) -> None:
+ super().__init__(0, 0, file.path, priority=priority)
+ self.file = file
+
+ def execute(self) -> None:
+ """Removes the file."""
+ if hasattr(self.file, 'ctx') and hasattr(self.file.ctx, 'io'):
+ self.file.ctx.io.delete_file(self.file.path)
+ else:
+ # Fallback for when ctx.io is not available
+ import os
+ if os.path.exists(self.file_path):
+ os.remove(self.file_path)
+
+ def get_diff(self) -> DiffLite:
+ """Gets the diff produced by this transaction."""
+ return DiffLite(ChangeType.Removed, self.file_path,
+ old_content=self.file.content_bytes if hasattr(self.file, 'content_bytes') else None)
+
+ def diff_str(self) -> str:
+ """Human-readable string representation of the change."""
+ return f"Remove file at {self.file_path}"
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/utils.py b/codegen-on-oss/codegen_on_oss/analyzers/utils.py
new file mode 100644
index 000000000..b04da2f70
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/utils.py
@@ -0,0 +1,80 @@
+"""Utility functions for documentation generation."""
+
+import re
+import textwrap
+
+
+def sanitize_docstring_for_markdown(docstring: str | None) -> str:
+ """Sanitize the docstring for MDX.
+
+ Args:
+ docstring: The docstring to sanitize.
+
+ Returns:
+ The sanitized docstring.
+ """
+ if docstring is None:
+ return ""
+ docstring_lines = docstring.splitlines()
+ if len(docstring_lines) > 1:
+ docstring_lines[1:] = [textwrap.dedent(line) for line in docstring_lines[1:]]
+ docstring = "\n".join(docstring_lines)
+ if docstring.startswith('"""'):
+ docstring = docstring[3:]
+ if docstring.endswith('"""'):
+ docstring = docstring[:-3]
+ return docstring
+
+
+def sanitize_mdx_mintlify_description(content: str) -> str:
+ """Mintlify description field needs to have string escaped, which content doesn't need.
+
+ Args:
+ content: The content to sanitize.
+
+ Returns:
+ The sanitized content.
+ """
+ content = sanitize_docstring_for_markdown(content)
+ # make sure all `< />` components are properly escaped with a `` inline-block
+ # if the string already has the single-quote then this is a no-op
+ content = re.sub(r"(?]+>)(?!`)", r"`\1`", content)
+
+ # escape double quote characters
+ if re.search(r'\\"', content):
+ return content # No-op if already escaped
+ return re.sub(r'(")', r"\\\1", content)
+
+
+def sanitize_html_for_mdx(html_string: str) -> str:
+ """Sanitize HTML string for MDX by escaping double quotes in attribute values.
+
+ Args:
+ html_string: The input HTML string to sanitize
+
+ Returns:
+ The sanitized HTML string with escaped quotes
+ """
+ # Replace double quotes with " but only in HTML attributes
+ return re.sub(r'"', """, html_string)
+
+
+def extract_class_description(docstring: str) -> str:
+ """Extract the class description from a docstring, excluding the attributes section.
+
+ Args:
+ docstring: The class docstring to parse
+
+ Returns:
+ The class description with whitespace normalized
+ """
+ if not docstring:
+ return ""
+
+ # Split by "Attributes:" and take only the first part
+ parts = docstring.split("Attributes:")
+ description = parts[0]
+
+ # Normalize whitespace
+ lines = [line.strip() for line in description.strip().splitlines()]
+ return " ".join(filter(None, lines))
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/__init__.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/analysis_visualizer.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/analysis_visualizer.py
new file mode 100644
index 000000000..a7c2a3f77
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/analysis_visualizer.py
@@ -0,0 +1,558 @@
+#!/usr/bin/env python3
+"""
+Analysis Visualizer Module
+
+This module provides visualization capabilities for code analysis results
+including dead code detection, cyclomatic complexity, and issue heatmaps.
+"""
+
+import logging
+
+from .visualizer import BaseVisualizer, OutputFormat, VisualizationType
+
+try:
+ import matplotlib.pyplot as plt
+ import networkx as nx
+ from matplotlib.colors import LinearSegmentedColormap
+except ImportError:
+ logging.warning(
+ "Visualization dependencies not found. Please install them with: pip install networkx matplotlib"
+ )
+
+logger = logging.getLogger(__name__)
+
+
+class AnalysisVisualizer(BaseVisualizer):
+ """
+ Visualizer for code analysis results.
+
+ This class provides methods to visualize analysis results such as
+ dead code detection, cyclomatic complexity, and issue heatmaps.
+ """
+
+ def __init__(self, analyzer=None, codebase=None, context=None, **kwargs):
+ """
+ Initialize the AnalysisVisualizer.
+
+ Args:
+ analyzer: Analyzer with analysis results
+ codebase: Codebase instance to visualize
+ context: Context providing graph representation
+ **kwargs: Additional configuration options
+ """
+ super().__init__(**kwargs)
+ self.analyzer = analyzer
+ self.codebase = codebase or (analyzer.base_codebase if analyzer else None)
+ self.context = context or (analyzer.base_context if analyzer else None)
+
+ def visualize_dead_code(self, path_filter: str | None = None):
+ """
+ Generate a visualization of dead (unused) code in the codebase.
+
+ Args:
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ entity_name = path_filter or "codebase"
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Check for analyzer
+ if not self.analyzer:
+ logger.error("Analyzer required for dead code visualization")
+ return None
+
+ # Check for analysis results
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.error("Analysis results not available")
+ return None
+
+ # Extract dead code information from analysis results
+ dead_code = {}
+ if (
+ "static_analysis" in self.analyzer.results
+ and "dead_code" in self.analyzer.results["static_analysis"]
+ ):
+ dead_code = self.analyzer.results["static_analysis"]["dead_code"]
+
+ if not dead_code:
+ logger.warning("No dead code detected in analysis results")
+ return None
+
+ # Create file nodes for containing dead code
+ file_nodes = {}
+
+ # Process unused functions
+ if "unused_functions" in dead_code:
+ for unused_func in dead_code["unused_functions"]:
+ file_path = unused_func.get("file", "")
+
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not file_path.startswith(path_filter):
+ continue
+
+ # Add file node if not already added
+ if file_path not in file_nodes:
+ # Find file in codebase
+ file_obj = None
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path) == file_path:
+ file_obj = file
+ break
+
+ if file_obj:
+ file_name = file_path.split("/")[-1]
+ self._add_node(
+ file_obj,
+ name=file_name,
+ color=self.config.color_palette.get("File"),
+ file_path=file_path,
+ )
+
+ file_nodes[file_path] = file_obj
+
+ # Add unused function node
+ func_name = unused_func.get("name", "")
+ func_line = unused_func.get("line", None)
+
+ # Create a placeholder for the function (we don't have the actual object)
+ func_obj = {
+ "name": func_name,
+ "file_path": file_path,
+ "line": func_line,
+ "type": "Function",
+ }
+
+ self._add_node(
+ func_obj,
+ name=func_name,
+ color=self.config.color_palette.get("Dead"),
+ file_path=file_path,
+ line=func_line,
+ is_dead=True,
+ )
+
+ # Add edge from file to function
+ if file_path in file_nodes:
+ self._add_edge(
+ file_nodes[file_path], func_obj, type="contains_dead"
+ )
+
+ # Process unused variables
+ if "unused_variables" in dead_code:
+ for unused_var in dead_code["unused_variables"]:
+ file_path = unused_var.get("file", "")
+
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not file_path.startswith(path_filter):
+ continue
+
+ # Add file node if not already added
+ if file_path not in file_nodes:
+ # Find file in codebase
+ file_obj = None
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path) == file_path:
+ file_obj = file
+ break
+
+ if file_obj:
+ file_name = file_path.split("/")[-1]
+ self._add_node(
+ file_obj,
+ name=file_name,
+ color=self.config.color_palette.get("File"),
+ file_path=file_path,
+ )
+
+ file_nodes[file_path] = file_obj
+
+ # Add unused variable node
+ var_name = unused_var.get("name", "")
+ var_line = unused_var.get("line", None)
+
+ # Create a placeholder for the variable
+ var_obj = {
+ "name": var_name,
+ "file_path": file_path,
+ "line": var_line,
+ "type": "Variable",
+ }
+
+ self._add_node(
+ var_obj,
+ name=var_name,
+ color=self.config.color_palette.get("Dead"),
+ file_path=file_path,
+ line=var_line,
+ is_dead=True,
+ )
+
+ # Add edge from file to variable
+ if file_path in file_nodes:
+ self._add_edge(file_nodes[file_path], var_obj, type="contains_dead")
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.DEAD_CODE, entity_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.DEAD_CODE, entity_name, fig
+ )
+
+ def visualize_cyclomatic_complexity(self, path_filter: str | None = None):
+ """
+ Generate a heatmap visualization of cyclomatic complexity.
+
+ Args:
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ entity_name = path_filter or "codebase"
+
+ # Check for analyzer
+ if not self.analyzer:
+ logger.error("Analyzer required for complexity visualization")
+ return None
+
+ # Check for analysis results
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.error("Analysis results not available")
+ return None
+
+ # Extract complexity information from analysis results
+ complexity_data = {}
+ if (
+ "static_analysis" in self.analyzer.results
+ and "code_complexity" in self.analyzer.results["static_analysis"]
+ ):
+ complexity_data = self.analyzer.results["static_analysis"][
+ "code_complexity"
+ ]
+
+ if not complexity_data:
+ logger.warning("No complexity data found in analysis results")
+ return None
+
+ # Extract function complexities
+ functions = []
+ if "function_complexity" in complexity_data:
+ for func_data in complexity_data["function_complexity"]:
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not func_data.get("file", "").startswith(
+ path_filter
+ ):
+ continue
+
+ functions.append({
+ "name": func_data.get("name", ""),
+ "file": func_data.get("file", ""),
+ "complexity": func_data.get("complexity", 1),
+ "line": func_data.get("line", None),
+ })
+
+ # Sort functions by complexity (descending)
+ functions.sort(key=lambda x: x.get("complexity", 0), reverse=True)
+
+ # Generate heatmap visualization
+ plt.figure(figsize=(12, 10))
+
+ # Extract data for heatmap
+ func_names = [
+ f"{func['name']} ({func['file'].split('/')[-1]})" for func in functions[:30]
+ ]
+ complexities = [func.get("complexity", 0) for func in functions[:30]]
+
+ # Create horizontal bar chart
+ bars = plt.barh(func_names, complexities)
+
+ # Color bars by complexity
+ norm = plt.Normalize(1, max(10, max(complexities)))
+ cmap = plt.cm.get_cmap("YlOrRd")
+
+ for i, bar in enumerate(bars):
+ complexity = complexities[i]
+ bar.set_color(cmap(norm(complexity)))
+
+ # Add labels and title
+ plt.xlabel("Cyclomatic Complexity")
+ plt.title("Top Functions by Cyclomatic Complexity")
+ plt.grid(axis="x", linestyle="--", alpha=0.6)
+
+ # Add colorbar
+ plt.colorbar(plt.cm.ScalarMappable(norm=norm, cmap=cmap), label="Complexity")
+
+ # Save and return visualization
+ return self._save_visualization(
+ VisualizationType.CYCLOMATIC_COMPLEXITY, entity_name, plt.gcf()
+ )
+
+ def visualize_issues_heatmap(self, severity=None, path_filter: str | None = None):
+ """
+ Generate a heatmap visualization of issues in the codebase.
+
+ Args:
+ severity: Optional severity level to filter issues
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ entity_name = f"{severity.value if severity else 'all'}_issues"
+
+ # Check for analyzer
+ if not self.analyzer:
+ logger.error("Analyzer required for issues visualization")
+ return None
+
+ # Check for analysis results
+ if (
+ not hasattr(self.analyzer, "results")
+ or "issues" not in self.analyzer.results
+ ):
+ logger.error("Issues not available in analysis results")
+ return None
+
+ issues = self.analyzer.results["issues"]
+
+ # Filter issues by severity if specified
+ if severity:
+ issues = [issue for issue in issues if issue.get("severity") == severity]
+
+ # Filter issues by path if specified
+ if path_filter:
+ issues = [
+ issue
+ for issue in issues
+ if issue.get("file", "").startswith(path_filter)
+ ]
+
+ if not issues:
+ logger.warning("No issues found matching the criteria")
+ return None
+
+ # Group issues by file
+ file_issues = {}
+ for issue in issues:
+ file_path = issue.get("file", "")
+ if file_path not in file_issues:
+ file_issues[file_path] = []
+
+ file_issues[file_path].append(issue)
+
+ # Generate heatmap visualization
+ plt.figure(figsize=(12, 10))
+
+ # Extract data for heatmap
+ files = list(file_issues.keys())
+ file_names = [file_path.split("/")[-1] for file_path in files]
+ issue_counts = [len(file_issues[file_path]) for file_path in files]
+
+ # Sort by issue count
+ sorted_data = sorted(
+ zip(file_names, issue_counts, files, strict=False),
+ key=lambda x: x[1],
+ reverse=True,
+ )
+ file_names, issue_counts, files = zip(*sorted_data, strict=False)
+
+ # Create horizontal bar chart
+ bars = plt.barh(file_names[:20], issue_counts[:20])
+
+ # Color bars by issue count
+ norm = plt.Normalize(1, max(5, max(issue_counts[:20])))
+ cmap = plt.cm.get_cmap("OrRd")
+
+ for i, bar in enumerate(bars):
+ count = issue_counts[i]
+ bar.set_color(cmap(norm(count)))
+
+ # Add labels and title
+ plt.xlabel("Number of Issues")
+ severity_text = f" ({severity.value})" if severity else ""
+ plt.title(f"Files with the Most Issues{severity_text}")
+ plt.grid(axis="x", linestyle="--", alpha=0.6)
+
+ # Add colorbar
+ plt.colorbar(plt.cm.ScalarMappable(norm=norm, cmap=cmap), label="Issue Count")
+
+ # Save and return visualization
+ return self._save_visualization(
+ VisualizationType.ISSUES_HEATMAP, entity_name, plt.gcf()
+ )
+
+ def visualize_pr_comparison(self):
+ """
+ Generate a visualization comparing base branch with PR.
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Check for analyzer with PR data
+ if (
+ not self.analyzer
+ or not hasattr(self.analyzer, "pr_codebase")
+ or not self.analyzer.pr_codebase
+ or not self.analyzer.base_codebase
+ ):
+ logger.error("PR comparison requires analyzer with PR data")
+ return None
+
+ entity_name = (
+ f"pr_{self.analyzer.pr_number}"
+ if hasattr(self.analyzer, "pr_number") and self.analyzer.pr_number
+ else "pr_comparison"
+ )
+
+ # Check for analysis results
+ if (
+ not hasattr(self.analyzer, "results")
+ or "comparison" not in self.analyzer.results
+ ):
+ logger.error("Comparison data not available in analysis results")
+ return None
+
+ comparison = self.analyzer.results["comparison"]
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Process symbol comparison data
+ if "symbol_comparison" in comparison:
+ for symbol_data in comparison["symbol_comparison"]:
+ symbol_name = symbol_data.get("name", "")
+ in_base = symbol_data.get("in_base", False)
+ in_pr = symbol_data.get("in_pr", False)
+
+ # Create a placeholder for the symbol
+ symbol_obj = {
+ "name": symbol_name,
+ "in_base": in_base,
+ "in_pr": in_pr,
+ "type": "Symbol",
+ }
+
+ # Determine node color based on presence in base and PR
+ if in_base and in_pr:
+ color = "#A5D6A7" # Light green (modified)
+ elif in_base:
+ color = "#EF9A9A" # Light red (removed)
+ else:
+ color = "#90CAF9" # Light blue (added)
+
+ # Add node for symbol
+ self._add_node(
+ symbol_obj,
+ name=symbol_name,
+ color=color,
+ in_base=in_base,
+ in_pr=in_pr,
+ )
+
+ # Process parameter changes if available
+ if "parameter_changes" in symbol_data:
+ param_changes = symbol_data["parameter_changes"]
+
+ # Process removed parameters
+ for param in param_changes.get("removed", []):
+ param_obj = {
+ "name": param,
+ "change_type": "removed",
+ "type": "Parameter",
+ }
+
+ self._add_node(
+ param_obj,
+ name=param,
+ color="#EF9A9A", # Light red (removed)
+ change_type="removed",
+ )
+
+ self._add_edge(symbol_obj, param_obj, type="removed_parameter")
+
+ # Process added parameters
+ for param in param_changes.get("added", []):
+ param_obj = {
+ "name": param,
+ "change_type": "added",
+ "type": "Parameter",
+ }
+
+ self._add_node(
+ param_obj,
+ name=param,
+ color="#90CAF9", # Light blue (added)
+ change_type="added",
+ )
+
+ self._add_edge(symbol_obj, param_obj, type="added_parameter")
+
+ # Process return type changes if available
+ if "return_type_change" in symbol_data:
+ return_type_change = symbol_data["return_type_change"]
+ old_type = return_type_change.get("old", "None")
+ new_type = return_type_change.get("new", "None")
+
+ return_obj = {
+ "name": f"{old_type} -> {new_type}",
+ "old_type": old_type,
+ "new_type": new_type,
+ "type": "ReturnType",
+ }
+
+ self._add_node(
+ return_obj,
+ name=f"{old_type} -> {new_type}",
+ color="#FFD54F", # Amber (changed)
+ old_type=old_type,
+ new_type=new_type,
+ )
+
+ self._add_edge(symbol_obj, return_obj, type="return_type_change")
+
+ # Process call site issues if available
+ if "call_site_issues" in symbol_data:
+ for issue in symbol_data["call_site_issues"]:
+ issue_file = issue.get("file", "")
+ issue_line = issue.get("line", None)
+ issue_text = issue.get("issue", "")
+
+ # Create a placeholder for the issue
+ issue_obj = {
+ "name": issue_text,
+ "file": issue_file,
+ "line": issue_line,
+ "type": "Issue",
+ }
+
+ self._add_node(
+ issue_obj,
+ name=f"{issue_file.split('/')[-1]}:{issue_line}",
+ color="#EF5350", # Red (error)
+ file_path=issue_file,
+ line=issue_line,
+ issue_text=issue_text,
+ )
+
+ self._add_edge(symbol_obj, issue_obj, type="call_site_issue")
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.PR_COMPARISON, entity_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.PR_COMPARISON, entity_name, fig
+ )
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/__init__.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/__init__.py
new file mode 100644
index 000000000..e9e9da182
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/__init__.py
@@ -0,0 +1,6 @@
+"""
+Call Graph Visualization Module
+
+This module provides tools for visualizing call graphs and function relationships in a codebase.
+"""
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/call_trace.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/call_trace.py
new file mode 100644
index 000000000..85448ac4f
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/call_trace.py
@@ -0,0 +1,83 @@
+import codegen
+import networkx as nx
+from codegen import Codebase
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.import_resolution import Import
+from codegen.sdk.core.symbol import Symbol
+
+G = nx.DiGraph()
+
+IGNORE_EXTERNAL_MODULE_CALLS = True
+IGNORE_CLASS_CALLS = False
+MAX_DEPTH = 10
+
+COLOR_PALETTE = {
+ "StartFunction": "#9cdcfe", # Light blue for the starting function
+ "PyFunction": "#a277ff", # Purple for Python functions
+ "PyClass": "#ffca85", # Orange for Python classes
+ "ExternalModule": "#f694ff", # Pink for external module references
+}
+
+# Dictionary to track visited nodes and prevent cycles
+visited = {}
+
+
+def create_dependencies_visualization(symbol: Symbol, depth: int = 0):
+ """Creates a visualization of symbol dependencies in the codebase
+
+ Recursively traverses the dependency tree of a symbol (function, class, etc.)
+ and creates a directed graph representation. Dependencies can be either direct
+ symbol references or imports.
+
+ Args:
+ symbol (Symbol): The starting symbol whose dependencies will be mapped
+ depth (int): Current depth in the recursive traversal
+ """
+ if depth >= MAX_DEPTH:
+ return
+
+ for dep in symbol.dependencies:
+ dep_symbol = None
+
+ if isinstance(dep, Symbol):
+ dep_symbol = dep
+ elif isinstance(dep, Import):
+ dep_symbol = dep.resolved_symbol if dep.resolved_symbol else None
+
+ if dep_symbol:
+ G.add_node(dep_symbol, color=COLOR_PALETTE.get(dep_symbol.__class__.__name__, "#f694ff"))
+ G.add_edge(symbol, dep_symbol)
+
+ if not isinstance(dep_symbol, Class):
+ create_dependencies_visualization(dep_symbol, depth + 1)
+
+
+@codegen.function("visualize-symbol-dependencies")
+def run(codebase: Codebase):
+ """Generate a visualization of symbol dependencies in a codebase.
+
+ This codemod:
+ 1. Creates a directed graph of symbol dependencies starting from a target function
+ 2. Tracks relationships between functions, classes, and imports
+ 3. Generates a visual representation of the dependency hierarchy
+ """
+ global G
+ G = nx.DiGraph()
+
+ target_func = codebase.get_function("get_query_runner")
+ G.add_node(target_func, color=COLOR_PALETTE.get("StartFunction"))
+
+ create_dependencies_visualization(target_func)
+
+ print(G)
+ print("Use codegen.sh to visualize the graph!")
+
+
+if __name__ == "__main__":
+ print("Initializing codebase...")
+ codebase = Codebase.from_repo("codegen-oss/posthog", commit="b174f2221ea4ae50e715eb6a7e70e9a2b0760800", language="python")
+ print(f"Codebase with {len(codebase.files)} files and {len(codebase.functions)} functions.")
+ print("Creating graph...")
+
+ run(codebase)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/graph_viz_call_graph.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/graph_viz_call_graph.py
new file mode 100644
index 000000000..9fd770841
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/graph_viz_call_graph.py
@@ -0,0 +1,358 @@
+from abc import ABC
+
+import networkx as nx
+
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.codebase import CodebaseType
+from codegen.sdk.core.detached_symbols.function_call import FunctionCall
+from codegen.sdk.core.external_module import ExternalModule
+from codegen.sdk.core.function import Function
+from codegen.sdk.core.interfaces.callable import Callable
+from codegen.shared.enums.programming_language import ProgrammingLanguage
+from tests.shared.skills.decorators import skill, skill_impl
+from tests.shared.skills.skill import Skill
+from tests.shared.skills.skill_test import SkillTestCase, SkillTestCasePyFile
+
+CallGraphFromNodeTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(
+ input="""
+def function_to_trace():
+ Y()
+ Z()
+
+def Y():
+ A()
+
+def Z():
+ B()
+
+def A():
+ pass
+
+def B():
+ C()
+
+def C():
+ pass
+""",
+ filepath="example.py",
+ )
+ ],
+ graph=True,
+)
+
+
+@skill(eval_skill=False, prompt="Show me a visualization of the call graph from X", uid="81e8fbb7-a00a-4e74-b9c2-24f79d24d389")
+class CallGraphFromNode(Skill, ABC):
+ """This skill creates a directed call graph for a given function. Starting from the specified function, it recursively iterates
+ through its function calls and the functions called by them, building a graph of the call paths to a maximum depth. The root of the directed graph
+ is the starting function, each node represents a function call, and edge from node A to node B indicates that function A calls function B. In its current form,
+ it ignores recursive calls and external modules but can be modified trivially to include them. Furthermore, this skill can easily be adapted to support
+ creating a call graph for a class method. In order to do this one simply needs to replace
+
+ `function_to_trace = codebase.get_function("function_to_trace")`
+
+ with
+
+ `function_to_trace = codebase.get_class("class_of_method_to_trace").get_method("method_to_trace")`
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[CallGraphFromNodeTest], language=ProgrammingLanguage.PYTHON)
+ @skill_impl(test_cases=[], skip_test=True, language=ProgrammingLanguage.TYPESCRIPT)
+ def skill_func(codebase: CodebaseType):
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # ===== [ Whether to Graph External Modules] =====
+ GRAPH_EXERNAL_MODULE_CALLS = False
+
+ # ===== [ Maximum Recursive Depth ] =====
+ MAX_DEPTH = 5
+
+ def create_downstream_call_trace(parent: FunctionCall | Function | None = None, depth: int = 0):
+ """Creates call graph for parent
+
+ This function recurses through the call graph of a function and creates a visualization
+
+ Args:
+ parent (FunctionCallDefinition| Function): The function for which a call graph will be created.
+ depth (int): The current depth of the recursive stack.
+
+ """
+ # if the maximum recursive depth has been exceeded return
+ if MAX_DEPTH <= depth:
+ return
+ if isinstance(parent, FunctionCall):
+ src_call, src_func = parent, parent.function_definition
+ else:
+ src_call, src_func = parent, parent
+ # Iterate over all call paths of the symbol
+ for call in src_func.function_calls:
+ # the symbol being called
+ func = call.function_definition
+
+ # ignore direct recursive calls
+ if func.name == src_func.name:
+ continue
+
+ # if the function being called is not from an external module
+ if not isinstance(func, ExternalModule):
+ # add `call` to the graph and an edge from `src_call` to `call`
+ G.add_node(call)
+ G.add_edge(src_call, call)
+
+ # recursive call to function call
+ create_downstream_call_trace(call, depth + 1)
+ elif GRAPH_EXERNAL_MODULE_CALLS:
+ # add `call` to the graph and an edge from `src_call` to `call`
+ G.add_node(call)
+ G.add_edge(src_call, call)
+
+ # ===== [ Function To Be Traced] =====
+ function_to_trace = codebase.get_function("function_to_trace")
+
+ # Set starting node
+ G.add_node(function_to_trace, color="yellow")
+
+ # Add all the children (and sub-children) to the graph
+ create_downstream_call_trace(function_to_trace)
+
+ # Visualize the graph
+ codebase.visualize(G)
+
+
+CallGraphFilterTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(
+ input="""
+class MyClass:
+ def get(self):
+ self.helper_method()
+ return "GET request"
+
+ def post(self):
+ self.helper_method()
+ return "POST request"
+
+ def patch(self):
+ return "PATCH request"
+
+ def delete(self):
+ return "DELETE request"
+
+ def helper_method(self):
+ pass
+
+ def other_method(self):
+ self.helper_method()
+ return "This method should not be included"
+
+def external_function():
+ instance = MyClass()
+ instance.get()
+ instance.post()
+ instance.other_method()
+""",
+ filepath="path/to/file.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+from path.to.file import MyClass
+
+def function_to_trace():
+ instance = MyClass()
+ assert instance.get() == "GET request"
+ assert instance.post() == "POST request"
+ assert instance.patch() == "PATCH request"
+ assert instance.delete() == "DELETE request"
+""",
+ filepath="path/to/file1.py",
+ ),
+ ],
+ graph=True,
+)
+
+
+@skill(
+ eval_skill=False,
+ prompt="Show me a visualization of the call graph from MyClass and filter out test files and include only the methods that have the name post, get, patch, delete",
+ uid="fc1f3ea0-46e7-460a-88ad-5312d4ca1a12",
+)
+class CallGraphFilter(Skill, ABC):
+ """This skill shows a visualization of the call graph from a given function or symbol.
+ It iterates through the usages of the starting function and its subsequent calls,
+ creating a directed graph of function calls. The skill filters out test files and class declarations
+ and includes only methods with specific names (post, get, patch, delete).
+ The call graph uses red for the starting node, yellow for class methods,
+ and can be customized based on user requests. The graph is limited to a specified depth
+ to manage complexity. In its current form, it ignores recursive calls and external modules
+ but can be modified trivially to include them
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[CallGraphFilterTest], language=ProgrammingLanguage.PYTHON)
+ @skill_impl(test_cases=[], skip_test=True, language=ProgrammingLanguage.TYPESCRIPT)
+ def skill_func(codebase: CodebaseType):
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # Get the symbol for my_class
+ func_to_trace = codebase.get_function("function_to_trace")
+
+ # Add the main symbol as a node
+ G.add_node(func_to_trace, color="red")
+
+ # ===== [ Maximum Recursive Depth ] =====
+ MAX_DEPTH = 5
+
+ SKIP_CLASS_DECLARATIONS = True
+
+ cls = codebase.get_class("MyClass")
+
+ # Define a recursive function to traverse function calls
+ def create_filtered_downstream_call_trace(parent: FunctionCall | Function, current_depth, max_depth):
+ if current_depth > max_depth:
+ return
+
+ # if parent is of type Function
+ if isinstance(parent, Function):
+ # set both src_call, src_func to parent
+ src_call, src_func = parent, parent
+ else:
+ # get the first callable of parent
+ src_call, src_func = parent, parent.function_definition
+
+ # Iterate over all call paths of the symbol
+ for call in src_func.function_calls:
+ # the symbol being called
+ func = call.function_definition
+
+ if SKIP_CLASS_DECLARATIONS and isinstance(func, Class):
+ continue
+
+ # if the function being called is not from an external module and is not defined in a test file
+ if not isinstance(func, ExternalModule) and not func.file.filepath.startswith("test"):
+ # add `call` to the graph and an edge from `src_call` to `call`
+ metadata = {}
+ if isinstance(func, Function) and func.is_method and func.name in ["post", "get", "patch", "delete"]:
+ name = f"{func.parent_class.name}.{func.name}"
+ metadata = {"color": "yellow", "name": name}
+ G.add_node(call, **metadata)
+ G.add_edge(src_call, call, symbol=cls) # Add edge from current to successor
+
+ # Recursively add successors of the current symbol
+ create_filtered_downstream_call_trace(call, current_depth + 1, max_depth)
+
+ # Start the recursive traversal
+ create_filtered_downstream_call_trace(func_to_trace, 1, MAX_DEPTH)
+
+ # Visualize the graph
+ codebase.visualize(G)
+
+
+CallPathsBetweenNodesTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(
+ input="""
+def start_func():
+ intermediate_func()
+def intermediate_func():
+ end_func()
+
+def end_func():
+ pass
+""",
+ filepath="example.py",
+ )
+ ],
+ graph=True,
+)
+
+
+@skill(eval_skill=False, prompt="Show me a visualization of the call paths between start_class and end_class", uid="aa3f70c3-ac1c-4737-a8b8-7ba89e3c5671")
+class CallPathsBetweenNodes(Skill, ABC):
+ """This skill generates and visualizes a call graph between two specified functions.
+ It starts from a given function and iteratively traverses through its function calls,
+ building a directed graph of the call paths. The skill then identifies all simple paths between the
+ start and end functions, creating a subgraph that includes only the nodes in these paths.
+
+ By default, the call graph uses blue for the starting node and red for the ending node, but these
+ colors can be customized based on user preferences. The visualization provides a clear representation
+ of how functions are interconnected, helping developers understand the flow of execution and
+ dependencies between different parts of the codebase.
+
+ In its current form, it ignores recursive calls and external modules but can be modified trivially to include them
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[CallPathsBetweenNodesTest], language=ProgrammingLanguage.PYTHON)
+ @skill_impl(test_cases=[], skip_test=True, language=ProgrammingLanguage.TYPESCRIPT)
+ def skill_func(codebase: CodebaseType):
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # ===== [ Maximum Recursive Depth ] =====
+ MAX_DEPTH = 5
+
+ # Define a recursive function to traverse usages
+ def create_downstream_call_trace(parent: FunctionCall | Function, end: Callable, current_depth, max_depth):
+ if current_depth > max_depth:
+ return
+
+ # if parent is of type Function
+ if isinstance(parent, Function):
+ # set both src_call, src_func to parent
+ src_call, src_func = parent, parent
+ else:
+ # get the first callable of parent
+ src_call, src_func = parent, parent.function_definition
+
+ # Iterate over all call paths of the symbol
+ for call in src_func.function_calls:
+ # the symbol being called
+ func = call.function_definition
+
+ # ignore direct recursive calls
+ if func.name == src_func.name:
+ continue
+
+ # if the function being called is not from an external module
+ if not isinstance(func, ExternalModule):
+ # add `call` to the graph and an edge from `src_call` to `call`
+ G.add_node(call)
+ G.add_edge(src_call, call)
+
+ if func == end:
+ G.add_edge(call, end)
+ return
+ # recursive call to function call
+ create_downstream_call_trace(call, end, current_depth + 1, max_depth)
+
+ # Get the start and end function
+ start = codebase.get_function("start_func")
+ end = codebase.get_function("end_func")
+
+ # Set starting node as blue
+ G.add_node(start, color="blue")
+ # Set ending node as red
+ G.add_node(end, color="red")
+
+ # Start the recursive traversal
+ create_downstream_call_trace(start, end, 1, MAX_DEPTH)
+
+ # Find all the simple paths between start and end
+ all_paths = nx.all_simple_paths(G, source=start, target=end)
+
+ # Collect all nodes that are part of these paths
+ nodes_in_paths = set()
+ for path in all_paths:
+ nodes_in_paths.update(path)
+
+ # Create a new subgraph with only the nodes in the paths
+ G = G.subgraph(nodes_in_paths)
+
+ # Visualize the graph
+ codebase.visualize(G)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/method_relationships.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/method_relationships.py
new file mode 100644
index 000000000..b45e1e3fd
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/method_relationships.py
@@ -0,0 +1,107 @@
+import codegen
+import networkx as nx
+from codegen import Codebase
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.detached_symbols.function_call import FunctionCall
+from codegen.sdk.core.external_module import ExternalModule
+from codegen.sdk.core.function import Function
+
+G = nx.DiGraph()
+
+# Configuration Settings
+IGNORE_EXTERNAL_MODULE_CALLS = False
+IGNORE_CLASS_CALLS = True
+MAX_DEPTH = 100
+
+# Track visited nodes to prevent duplicate processing
+visited = set()
+
+COLOR_PALETTE = {
+ "StartMethod": "#9cdcfe", # Light blue for root/entry point methods
+ "PyFunction": "#a277ff", # Purple for regular Python functions
+ "PyClass": "#ffca85", # Warm peach for class definitions
+ "ExternalModule": "#f694ff", # Pink for external module calls
+ "StartClass": "#FFE082", # Yellow for the starting class
+}
+
+
+def graph_class_methods(target_class: Class):
+ """Creates a graph visualization of all methods in a class and their call relationships"""
+ G.add_node(target_class, color=COLOR_PALETTE["StartClass"])
+
+ for method in target_class.methods:
+ method_name = f"{target_class.name}.{method.name}"
+ G.add_node(method, name=method_name, color=COLOR_PALETTE["StartMethod"])
+ visited.add(method)
+ G.add_edge(target_class, method)
+
+ for method in target_class.methods:
+ create_downstream_call_trace(method)
+
+
+def generate_edge_meta(call: FunctionCall) -> dict:
+ """Generate metadata for graph edges representing function calls"""
+ return {"name": call.name, "file_path": call.filepath, "start_point": call.start_point, "end_point": call.end_point, "symbol_name": "FunctionCall"}
+
+
+def create_downstream_call_trace(src_func: Function, depth: int = 0):
+ """Creates call graph for parent function by recursively traversing all function calls"""
+ if MAX_DEPTH <= depth or isinstance(src_func, ExternalModule):
+ return
+
+ for call in src_func.function_calls:
+ if call.name == src_func.name:
+ continue
+
+ func = call.function_definition
+ if not func:
+ continue
+
+ if isinstance(func, ExternalModule) and IGNORE_EXTERNAL_MODULE_CALLS:
+ continue
+ if isinstance(func, Class) and IGNORE_CLASS_CALLS:
+ continue
+
+ if isinstance(func, (Class, ExternalModule)):
+ func_name = func.name
+ elif isinstance(func, Function):
+ func_name = f"{func.parent_class.name}.{func.name}" if func.is_method else func.name
+
+ if func not in visited:
+ G.add_node(func, name=func_name, color=COLOR_PALETTE.get(func.__class__.__name__, None))
+ visited.add(func)
+
+ G.add_edge(src_func, func, **generate_edge_meta(call))
+
+ if isinstance(func, Function):
+ create_downstream_call_trace(func, depth + 1)
+
+
+@codegen.function("visualize-class-method-relationships")
+def run(codebase: Codebase):
+ """Generate a visualization of method call relationships within a class.
+
+ This codemod:
+ 1. Creates a directed graph with the target class as the root node
+ 2. Adds all class methods and their downstream function calls
+ 3. Generates a visual representation of the call hierarchy
+ """
+ global G, visited
+ G = nx.DiGraph()
+ visited = set()
+
+ target_class = codebase.get_class("_Client")
+ graph_class_methods(target_class)
+
+ print(G)
+ print("Use codegen.sh to visualize the graph!")
+
+
+if __name__ == "__main__":
+ print("Initializing codebase...")
+ codebase = Codebase.from_repo("codegen-oss/modal-client", commit="00bf226a1526f9d775d2d70fc7711406aaf42958", language="python")
+ print(f"Codebase with {len(codebase.files)} files and {len(codebase.functions)} functions.")
+ print("Creating graph...")
+
+ run(codebase)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/viz_cal_graph.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/viz_cal_graph.py
new file mode 100644
index 000000000..095e5f92b
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/call_graph/viz_cal_graph.py
@@ -0,0 +1,121 @@
+import codegen
+import networkx as nx
+from codegen import Codebase
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.detached_symbols.function_call import FunctionCall
+from codegen.sdk.core.external_module import ExternalModule
+from codegen.sdk.core.function import Function
+
+G = nx.DiGraph()
+
+IGNORE_EXTERNAL_MODULE_CALLS = True
+IGNORE_CLASS_CALLS = False
+MAX_DEPTH = 10
+
+# Color scheme for different types of nodes in the visualization
+# Each node type has a distinct color for better visual differentiation
+COLOR_PALETTE = {
+ "StartFunction": "#9cdcfe", # Base purple - draws attention to the root node
+ "PyFunction": "#a277ff", # Mint green - complementary to purple
+ "PyClass": "#ffca85", # Warm peach - provides contrast
+ "ExternalModule": "#f694ff", # Light pink - analogous to base purple
+}
+
+
+def generate_edge_meta(call: FunctionCall) -> dict:
+ """Generate metadata for graph edges representing function calls
+
+ Args:
+ call (FunctionCall): Object containing information about the function call
+
+ Returns:
+ dict: Metadata including name, file path, and location information
+ """
+ return {"name": call.name, "file_path": call.filepath, "start_point": call.start_point, "end_point": call.end_point, "symbol_name": "FunctionCall"}
+
+
+def create_downstream_call_trace(src_func: Function, depth: int = 0):
+ """Creates call graph for parent function by recursively traversing all function calls
+
+ This function builds a directed graph showing all downstream function calls,
+ up to MAX_DEPTH levels deep. Each node represents a function and edges
+ represent calls between functions.
+
+ Args:
+ src_func (Function): The function for which a call graph will be created
+ depth (int): Current depth in the recursive traversal
+ """
+ # Stop recursion if max depth reached
+ if MAX_DEPTH <= depth:
+ return
+ # Stop if the source is an external module
+ if isinstance(src_func, ExternalModule):
+ return
+
+ # Examine each function call made by the source function
+ for call in src_func.function_calls:
+ # Skip recursive calls
+ if call.name == src_func.name:
+ continue
+
+ # Get the function definition being called
+ func = call.function_definition
+
+ # Skip if function definition not found
+ if not func:
+ continue
+ # Apply filtering based on configuration flags
+ if isinstance(func, ExternalModule) and IGNORE_EXTERNAL_MODULE_CALLS:
+ continue
+ if isinstance(func, Class) and IGNORE_CLASS_CALLS:
+ continue
+
+ # Generate the display name for the function
+ # For methods, include the class name
+ if isinstance(func, (Class, ExternalModule)):
+ func_name = func.name
+ elif isinstance(func, Function):
+ func_name = f"{func.parent_class.name}.{func.name}" if func.is_method else func.name
+
+ # Add node and edge to the graph with appropriate metadata
+ G.add_node(func, name=func_name, color=COLOR_PALETTE.get(func.__class__.__name__))
+ G.add_edge(src_func, func, **generate_edge_meta(call))
+
+ # Recursively process called function if it's a regular function
+ if isinstance(func, Function):
+ create_downstream_call_trace(func, depth + 1)
+
+
+@codegen.function("visualize-function-call-relationships")
+def run(codebase: Codebase):
+ """Generate a visualization of function call relationships in a codebase.
+
+ This codemod:
+ 1. Creates a directed graph of function calls starting from a target method
+ 2. Tracks relationships between functions, classes, and external modules
+ 3. Generates a visual representation of the call hierarchy
+ """
+ global G
+ G = nx.DiGraph()
+
+ target_class = codebase.get_class("SharingConfigurationViewSet")
+ target_method = target_class.get_method("patch")
+
+ # Generate the call graph starting from the target method
+ create_downstream_call_trace(target_method)
+
+ # Add the root node (target method) to the graph
+ G.add_node(target_method, name=f"{target_class.name}.{target_method.name}", color=COLOR_PALETTE.get("StartFunction"))
+
+ print(G)
+ print("Use codegen.sh to visualize the graph!")
+
+
+if __name__ == "__main__":
+ print("Initializing codebase...")
+ codebase = Codebase.from_repo("codegen-oss/posthog", commit="b174f2221ea4ae50e715eb6a7e70e9a2b0760800", language="python")
+ print(f"Codebase with {len(codebase.files)} files and {len(codebase.functions)} functions.")
+ print("Creating graph...")
+
+ run(codebase)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/code_visualizer.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/code_visualizer.py
new file mode 100644
index 000000000..98c462643
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/code_visualizer.py
@@ -0,0 +1,568 @@
+#!/usr/bin/env python3
+"""
+Code Structure Visualizer
+
+This module provides visualization capabilities for code structures such as
+call graphs, dependency graphs, class methods, and blast radius.
+"""
+
+import logging
+
+from .visualizer import BaseVisualizer, OutputFormat, VisualizationType
+
+try:
+ import matplotlib.pyplot as plt
+ import networkx as nx
+except ImportError:
+ logging.warning(
+ "Visualization dependencies not found. Please install them with: pip install networkx matplotlib"
+ )
+
+logger = logging.getLogger(__name__)
+
+
+class CodeVisualizer(BaseVisualizer):
+ """
+ Visualizer for code structures such as call graphs and dependencies.
+
+ This class provides methods to visualize relationships between code entities
+ including functions, classes, and modules.
+ """
+
+ def __init__(self, codebase=None, context=None, **kwargs):
+ """
+ Initialize the CodeVisualizer.
+
+ Args:
+ codebase: Codebase instance to visualize
+ context: Context providing graph representation
+ **kwargs: Additional configuration options
+ """
+ super().__init__(**kwargs)
+ self.codebase = codebase
+ self.context = context
+
+ # Initialize codebase if needed
+ if not self.codebase and not self.context and "analyzer" in kwargs:
+ self.codebase = kwargs["analyzer"].base_codebase
+ self.context = kwargs["analyzer"].base_context
+
+ def visualize_call_graph(self, function_name: str, max_depth: int | None = None):
+ """
+ Generate a call graph visualization for a function.
+
+ Args:
+ function_name: Name of the function to visualize
+ max_depth: Maximum depth of the call graph (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the function in the codebase
+ function = None
+ for func in self.codebase.functions:
+ if func.name == function_name:
+ function = func
+ break
+
+ if not function:
+ logger.error(f"Function {function_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ function,
+ name=function_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add call relationships
+ visited = {function}
+
+ def add_calls(func, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no function calls attribute
+ if not hasattr(func, "function_calls"):
+ return
+
+ for call in func.function_calls:
+ # Skip recursive calls
+ if call.name == func.name:
+ continue
+
+ # Get the called function
+ called_func = call.function_definition
+ if not called_func:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(called_func, "is_external")
+ and called_func.is_external
+ ):
+ continue
+
+ # Generate name for display
+ if (
+ hasattr(called_func, "is_method")
+ and called_func.is_method
+ and hasattr(called_func, "parent_class")
+ ):
+ called_name = f"{called_func.parent_class.name}.{called_func.name}"
+ else:
+ called_name = called_func.name
+
+ # Add node for called function
+ self._add_node(
+ called_func,
+ name=called_name,
+ color=self.config.color_palette.get("Function"),
+ file_path=called_func.file.path
+ if hasattr(called_func, "file")
+ and hasattr(called_func.file, "path")
+ else None,
+ )
+
+ # Add edge for call relationship
+ self._add_edge(
+ function,
+ called_func,
+ type="call",
+ file_path=call.filepath if hasattr(call, "filepath") else None,
+ line=call.line if hasattr(call, "line") else None,
+ )
+
+ # Recursively process called function
+ if called_func not in visited:
+ visited.add(called_func)
+ add_calls(called_func, depth + 1)
+
+ # Start from the root function
+ add_calls(function)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.CALL_GRAPH, function_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.CALL_GRAPH, function_name, fig
+ )
+
+ def visualize_dependency_graph(
+ self, symbol_name: str, max_depth: int | None = None
+ ):
+ """
+ Generate a dependency graph visualization for a symbol.
+
+ Args:
+ symbol_name: Name of the symbol to visualize
+ max_depth: Maximum depth of the dependency graph (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the symbol in the codebase
+ symbol = None
+ for sym in self.codebase.symbols:
+ if hasattr(sym, "name") and sym.name == symbol_name:
+ symbol = sym
+ break
+
+ if not symbol:
+ logger.error(f"Symbol {symbol_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ symbol,
+ name=symbol_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add dependencies
+ visited = {symbol}
+
+ def add_dependencies(sym, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no dependencies attribute
+ if not hasattr(sym, "dependencies"):
+ return
+
+ for dep in sym.dependencies:
+ dep_symbol = None
+
+ if hasattr(dep, "__class__") and dep.__class__.__name__ == "Symbol":
+ dep_symbol = dep
+ elif hasattr(dep, "resolved_symbol"):
+ dep_symbol = dep.resolved_symbol
+
+ if not dep_symbol:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(dep_symbol, "is_external")
+ and dep_symbol.is_external
+ ):
+ continue
+
+ # Add node for dependency
+ self._add_node(
+ dep_symbol,
+ name=dep_symbol.name
+ if hasattr(dep_symbol, "name")
+ else str(dep_symbol),
+ color=self.config.color_palette.get(
+ dep_symbol.__class__.__name__, "#BBBBBB"
+ ),
+ file_path=dep_symbol.file.path
+ if hasattr(dep_symbol, "file") and hasattr(dep_symbol.file, "path")
+ else None,
+ )
+
+ # Add edge for dependency relationship
+ self._add_edge(sym, dep_symbol, type="depends_on")
+
+ # Recursively process dependency
+ if dep_symbol not in visited:
+ visited.add(dep_symbol)
+ add_dependencies(dep_symbol, depth + 1)
+
+ # Start from the root symbol
+ add_dependencies(symbol)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.DEPENDENCY_GRAPH, symbol_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.DEPENDENCY_GRAPH, symbol_name, fig
+ )
+
+ def visualize_blast_radius(self, symbol_name: str, max_depth: int | None = None):
+ """
+ Generate a blast radius visualization for a symbol.
+
+ Args:
+ symbol_name: Name of the symbol to visualize
+ max_depth: Maximum depth of the blast radius (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the symbol in the codebase
+ symbol = None
+ for sym in self.codebase.symbols:
+ if hasattr(sym, "name") and sym.name == symbol_name:
+ symbol = sym
+ break
+
+ if not symbol:
+ logger.error(f"Symbol {symbol_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ symbol,
+ name=symbol_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add usages (reverse dependencies)
+ visited = {symbol}
+
+ def add_usages(sym, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no usages attribute
+ if not hasattr(sym, "usages"):
+ return
+
+ for usage in sym.usages:
+ # Skip if no usage symbol
+ if not hasattr(usage, "usage_symbol"):
+ continue
+
+ usage_symbol = usage.usage_symbol
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(usage_symbol, "is_external")
+ and usage_symbol.is_external
+ ):
+ continue
+
+ # Add node for usage
+ self._add_node(
+ usage_symbol,
+ name=usage_symbol.name
+ if hasattr(usage_symbol, "name")
+ else str(usage_symbol),
+ color=self.config.color_palette.get(
+ usage_symbol.__class__.__name__, "#BBBBBB"
+ ),
+ file_path=usage_symbol.file.path
+ if hasattr(usage_symbol, "file")
+ and hasattr(usage_symbol.file, "path")
+ else None,
+ )
+
+ # Add edge for usage relationship
+ self._add_edge(sym, usage_symbol, type="used_by")
+
+ # Recursively process usage
+ if usage_symbol not in visited:
+ visited.add(usage_symbol)
+ add_usages(usage_symbol, depth + 1)
+
+ # Start from the root symbol
+ add_usages(symbol)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.BLAST_RADIUS, symbol_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.BLAST_RADIUS, symbol_name, fig
+ )
+
+ def visualize_class_methods(self, class_name: str):
+ """
+ Generate a class methods visualization.
+
+ Args:
+ class_name: Name of the class to visualize
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the class in the codebase
+ class_obj = None
+ for cls in self.codebase.classes:
+ if cls.name == class_name:
+ class_obj = cls
+ break
+
+ if not class_obj:
+ logger.error(f"Class {class_name} not found in codebase")
+ return None
+
+ # Add class node
+ self._add_node(
+ class_obj,
+ name=class_name,
+ color=self.config.color_palette.get("Class"),
+ is_root=True,
+ )
+
+ # Skip if no methods attribute
+ if not hasattr(class_obj, "methods"):
+ logger.error(f"Class {class_name} has no methods attribute")
+ return None
+
+ # Add method nodes and connections
+ method_ids = {}
+ for method in class_obj.methods:
+ method_name = f"{class_name}.{method.name}"
+
+ # Add method node
+ method_id = self._add_node(
+ method,
+ name=method_name,
+ color=self.config.color_palette.get("Function"),
+ file_path=method.file.path
+ if hasattr(method, "file") and hasattr(method.file, "path")
+ else None,
+ )
+
+ method_ids[method.name] = method_id
+
+ # Add edge from class to method
+ self._add_edge(class_obj, method, type="contains")
+
+ # Add call relationships between methods
+ for method in class_obj.methods:
+ # Skip if no function calls attribute
+ if not hasattr(method, "function_calls"):
+ continue
+
+ for call in method.function_calls:
+ # Get the called function
+ called_func = call.function_definition
+ if not called_func:
+ continue
+
+ # Only add edges between methods of this class
+ if (
+ hasattr(called_func, "is_method")
+ and called_func.is_method
+ and hasattr(called_func, "parent_class")
+ and called_func.parent_class == class_obj
+ ):
+ self._add_edge(
+ method,
+ called_func,
+ type="calls",
+ line=call.line if hasattr(call, "line") else None,
+ )
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.CLASS_METHODS, class_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.CLASS_METHODS, class_name, fig
+ )
+
+ def visualize_module_dependencies(self, module_path: str):
+ """
+ Generate a module dependencies visualization.
+
+ Args:
+ module_path: Path to the module to visualize
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ # Initialize graph
+ self._initialize_graph()
+
+ # Get all files in the module
+ module_files = []
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path).startswith(module_path):
+ module_files.append(file)
+
+ if not module_files:
+ logger.error(f"No files found in module {module_path}")
+ return None
+
+ # Add file nodes
+ module_node_ids = {}
+ for file in module_files:
+ file_name = str(file.path).split("/")[-1]
+ file_module = "/".join(str(file.path).split("/")[:-1])
+
+ # Add file node
+ file_id = self._add_node(
+ file,
+ name=file_name,
+ module=file_module,
+ color=self.config.color_palette.get("File"),
+ file_path=str(file.path),
+ )
+
+ module_node_ids[str(file.path)] = file_id
+
+ # Add import relationships
+ for file in module_files:
+ # Skip if no imports attribute
+ if not hasattr(file, "imports"):
+ continue
+
+ for imp in file.imports:
+ imported_file = None
+
+ # Try to get imported file
+ if hasattr(imp, "resolved_file"):
+ imported_file = imp.resolved_file
+ elif hasattr(imp, "resolved_symbol") and hasattr(
+ imp.resolved_symbol, "file"
+ ):
+ imported_file = imp.resolved_symbol.file
+
+ if not imported_file:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(imported_file, "is_external")
+ and imported_file.is_external
+ ):
+ continue
+
+ # Add node for imported file if not already added
+ imported_path = (
+ str(imported_file.path) if hasattr(imported_file, "path") else ""
+ )
+
+ if imported_path not in module_node_ids:
+ imported_name = imported_path.split("/")[-1]
+ imported_module = "/".join(imported_path.split("/")[:-1])
+
+ imported_id = self._add_node(
+ imported_file,
+ name=imported_name,
+ module=imported_module,
+ color=self.config.color_palette.get(
+ "External"
+ if imported_path.startswith(module_path)
+ else "File"
+ ),
+ file_path=imported_path,
+ )
+
+ module_node_ids[imported_path] = imported_id
+
+ # Add edge for import relationship
+ self._add_edge(
+ file,
+ imported_file,
+ type="imports",
+ import_name=imp.name if hasattr(imp, "name") else "",
+ )
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/codebase_visualizer.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/codebase_visualizer.py
new file mode 100644
index 000000000..2cea2331b
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/codebase_visualizer.py
@@ -0,0 +1,1690 @@
+#!/usr/bin/env python3
+"""
+Codebase Visualizer Module
+
+This module provides comprehensive visualization capabilities for codebases and PR analyses.
+It integrates with codebase_analyzer.py and context_codebase.py to provide visual representations
+of code structure, dependencies, and issues. It supports multiple visualization types to help
+developers understand codebase architecture and identify potential problems.
+"""
+
+import json
+import logging
+import os
+import sys
+from dataclasses import dataclass, field
+from datetime import datetime
+from enum import Enum
+from typing import Any
+
+try:
+ import matplotlib.pyplot as plt
+ import networkx as nx
+ from matplotlib.colors import LinearSegmentedColormap
+except ImportError:
+ print(
+ "Visualization dependencies not found. Please install them with: pip install networkx matplotlib"
+ )
+ sys.exit(1)
+
+try:
+ from codegen.sdk.core.class_definition import Class
+ from codegen.sdk.core.codebase import Codebase
+ from codegen.sdk.core.detached_symbols.function_call import FunctionCall
+ from codegen.sdk.core.file import SourceFile
+ from codegen.sdk.core.function import Function
+ from codegen.sdk.core.import_resolution import Import
+ from codegen.sdk.core.symbol import Symbol
+ from codegen.sdk.enums import EdgeType, SymbolType
+
+ from codegen_on_oss.codebase_analyzer import (
+ AnalysisType,
+ CodebaseAnalyzer,
+ Issue,
+ IssueSeverity,
+ )
+
+ # Import custom modules
+ from codegen_on_oss.context_codebase import (
+ GLOBAL_FILE_IGNORE_LIST,
+ CodebaseContext,
+ get_node_classes,
+ )
+ from codegen_on_oss.current_code_codebase import get_selected_codebase
+except ImportError:
+ print(
+ "Codegen SDK or custom modules not found. Please ensure all dependencies are installed."
+ )
+ sys.exit(1)
+
+# Configure logging
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[logging.StreamHandler()],
+)
+logger = logging.getLogger(__name__)
+
+
+class VisualizationType(str, Enum):
+ """Types of visualizations supported by this module."""
+
+ CALL_GRAPH = "call_graph"
+ DEPENDENCY_GRAPH = "dependency_graph"
+ BLAST_RADIUS = "blast_radius"
+ CLASS_METHODS = "class_methods"
+ MODULE_DEPENDENCIES = "module_dependencies"
+ DEAD_CODE = "dead_code"
+ CYCLOMATIC_COMPLEXITY = "cyclomatic_complexity"
+ ISSUES_HEATMAP = "issues_heatmap"
+ PR_COMPARISON = "pr_comparison"
+
+
+class OutputFormat(str, Enum):
+ """Output formats for visualizations."""
+
+ JSON = "json"
+ PNG = "png"
+ SVG = "svg"
+ HTML = "html"
+ DOT = "dot"
+
+
+@dataclass
+class VisualizationConfig:
+ """Configuration for visualization generation."""
+
+ max_depth: int = 5
+ ignore_external: bool = True
+ ignore_tests: bool = True
+ node_size_base: int = 300
+ edge_width_base: float = 1.0
+ filename_filter: list[str] | None = None
+ symbol_filter: list[str] | None = None
+ output_format: OutputFormat = OutputFormat.JSON
+ output_directory: str | None = None
+ layout_algorithm: str = "spring"
+ highlight_nodes: list[str] = field(default_factory=list)
+ highlight_color: str = "#ff5555"
+ color_palette: dict[str, str] = field(
+ default_factory=lambda: {
+ "Function": "#a277ff", # Purple
+ "Class": "#ffca85", # Orange
+ "File": "#80CBC4", # Teal
+ "Module": "#81D4FA", # Light Blue
+ "Variable": "#B39DDB", # Light Purple
+ "Root": "#ef5350", # Red
+ "Warning": "#FFCA28", # Amber
+ "Error": "#EF5350", # Red
+ "Dead": "#78909C", # Gray
+ "External": "#B0BEC5", # Light Gray
+ }
+ )
+
+
+class CodebaseVisualizer:
+ """
+ Visualizer for codebase structures and analytics.
+
+ This class provides methods to generate various visualizations of a codebase,
+ including call graphs, dependency graphs, complexity heatmaps, and more.
+ It integrates with CodebaseAnalyzer to visualize analysis results.
+ """
+
+ def __init__(
+ self,
+ analyzer: CodebaseAnalyzer | None = None,
+ codebase: Codebase | None = None,
+ context: CodebaseContext | None = None,
+ config: VisualizationConfig | None = None,
+ ):
+ """
+ Initialize the CodebaseVisualizer.
+
+ Args:
+ analyzer: Optional CodebaseAnalyzer instance with analysis results
+ codebase: Optional Codebase instance to visualize
+ context: Optional CodebaseContext providing graph representation
+ config: Visualization configuration options
+ """
+ self.analyzer = analyzer
+ self.codebase = codebase or (analyzer.base_codebase if analyzer else None)
+ self.context = context or (analyzer.base_context if analyzer else None)
+ self.config = config or VisualizationConfig()
+
+ # Create visualization directory if specified
+ if self.config.output_directory:
+ os.makedirs(self.config.output_directory, exist_ok=True)
+
+ # Initialize graph for visualization
+ self.graph = nx.DiGraph()
+
+ # Initialize codebase if needed
+ if not self.codebase and not self.context:
+ logger.info(
+ "No codebase or context provided, initializing from current directory"
+ )
+ self.codebase = get_selected_codebase()
+ self.context = CodebaseContext(
+ codebase=self.codebase, base_path=os.getcwd()
+ )
+ elif self.codebase and not self.context:
+ logger.info("Creating context from provided codebase")
+ self.context = CodebaseContext(
+ codebase=self.codebase,
+ base_path=os.getcwd()
+ if not hasattr(self.codebase, "base_path")
+ else self.codebase.base_path,
+ )
+
+ def _initialize_graph(self):
+ """Initialize a fresh graph for visualization."""
+ self.graph = nx.DiGraph()
+
+ def _add_node(self, node: Any, **attrs):
+ """
+ Add a node to the visualization graph with attributes.
+
+ Args:
+ node: Node object to add
+ **attrs: Node attributes
+ """
+ # Skip if node already exists
+ if self.graph.has_node(node):
+ return
+
+ # Generate node ID (memory address for unique identification)
+ node_id = id(node)
+
+ # Get node name
+ if "name" in attrs:
+ node_name = attrs["name"]
+ elif hasattr(node, "name"):
+ node_name = node.name
+ elif hasattr(node, "path"):
+ node_name = str(node.path).split("/")[-1]
+ else:
+ node_name = str(node)
+
+ # Determine node type and color
+ node_type = node.__class__.__name__
+ color = attrs.get("color", self.config.color_palette.get(node_type, "#BBBBBB"))
+
+ # Add node with attributes
+ self.graph.add_node(
+ node_id,
+ original_node=node,
+ name=node_name,
+ type=node_type,
+ color=color,
+ **attrs,
+ )
+
+ return node_id
+
+ def _add_edge(self, source: Any, target: Any, **attrs):
+ """
+ Add an edge to the visualization graph with attributes.
+
+ Args:
+ source: Source node
+ target: Target node
+ **attrs: Edge attributes
+ """
+ # Get node IDs
+ source_id = id(source)
+ target_id = id(target)
+
+ # Add edge with attributes
+ self.graph.add_edge(source_id, target_id, **attrs)
+
+ def _generate_filename(
+ self, visualization_type: VisualizationType, entity_name: str
+ ):
+ """
+ Generate a filename for the visualization.
+
+ Args:
+ visualization_type: Type of visualization
+ entity_name: Name of the entity being visualized
+
+ Returns:
+ Generated filename
+ """
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ sanitized_name = (
+ entity_name.replace("/", "_").replace("\\", "_").replace(".", "_")
+ )
+ return f"{visualization_type.value}_{sanitized_name}_{timestamp}.{self.config.output_format.value}"
+
+ def _save_visualization(
+ self, visualization_type: VisualizationType, entity_name: str, data: Any
+ ):
+ """
+ Save a visualization to file or return it.
+
+ Args:
+ visualization_type: Type of visualization
+ entity_name: Name of the entity being visualized
+ data: Visualization data to save
+
+ Returns:
+ Path to saved file or visualization data
+ """
+ filename = self._generate_filename(visualization_type, entity_name)
+
+ if self.config.output_directory:
+ filepath = os.path.join(self.config.output_directory, filename)
+ else:
+ filepath = filename
+
+ if self.config.output_format == OutputFormat.JSON:
+ with open(filepath, "w") as f:
+ json.dump(data, f, indent=2)
+ elif self.config.output_format in [OutputFormat.PNG, OutputFormat.SVG]:
+ # Save matplotlib figure
+ plt.savefig(
+ filepath, format=self.config.output_format.value, bbox_inches="tight"
+ )
+ plt.close()
+ elif self.config.output_format == OutputFormat.DOT:
+ # Save as DOT file for Graphviz
+ try:
+ from networkx.drawing.nx_agraph import write_dot
+
+ write_dot(self.graph, filepath)
+ except ImportError:
+ logger.exception(
+ "networkx.drawing.nx_agraph not available. Install pygraphviz for DOT format."
+ )
+ return None
+
+ logger.info(f"Visualization saved to {filepath}")
+ return filepath
+
+ def _convert_graph_to_json(self):
+ """
+ Convert the networkx graph to a JSON-serializable dictionary.
+
+ Returns:
+ Dictionary representation of the graph
+ """
+ nodes = []
+ for node, attrs in self.graph.nodes(data=True):
+ # Create a serializable node
+ node_data = {
+ "id": node,
+ "name": attrs.get("name", ""),
+ "type": attrs.get("type", ""),
+ "color": attrs.get("color", "#BBBBBB"),
+ }
+
+ # Add file path if available
+ if "file_path" in attrs:
+ node_data["file_path"] = attrs["file_path"]
+
+ # Add other attributes
+ for key, value in attrs.items():
+ if key not in ["name", "type", "color", "file_path", "original_node"]:
+ if (
+ isinstance(value, str | int | float | bool | list | dict)
+ or value is None
+ ):
+ node_data[key] = value
+
+ nodes.append(node_data)
+
+ edges = []
+ for source, target, attrs in self.graph.edges(data=True):
+ # Create a serializable edge
+ edge_data = {
+ "source": source,
+ "target": target,
+ }
+
+ # Add other attributes
+ for key, value in attrs.items():
+ if (
+ isinstance(value, str | int | float | bool | list | dict)
+ or value is None
+ ):
+ edge_data[key] = value
+
+ edges.append(edge_data)
+
+ return {
+ "nodes": nodes,
+ "edges": edges,
+ "metadata": {
+ "visualization_type": self.current_visualization_type,
+ "entity_name": self.current_entity_name,
+ "timestamp": datetime.now().isoformat(),
+ "node_count": len(nodes),
+ "edge_count": len(edges),
+ },
+ }
+
+ def _plot_graph(self):
+ """
+ Plot the graph using matplotlib.
+
+ Returns:
+ Matplotlib figure
+ """
+ plt.figure(figsize=(12, 10))
+
+ # Extract node positions using specified layout algorithm
+ if self.config.layout_algorithm == "spring":
+ pos = nx.spring_layout(self.graph, seed=42)
+ elif self.config.layout_algorithm == "kamada_kawai":
+ pos = nx.kamada_kawai_layout(self.graph)
+ elif self.config.layout_algorithm == "spectral":
+ pos = nx.spectral_layout(self.graph)
+ else:
+ # Default to spring layout
+ pos = nx.spring_layout(self.graph, seed=42)
+
+ # Extract node colors
+ node_colors = [
+ attrs.get("color", "#BBBBBB") for _, attrs in self.graph.nodes(data=True)
+ ]
+
+ # Extract node sizes (can be based on some metric)
+ node_sizes = [self.config.node_size_base for _ in self.graph.nodes()]
+
+ # Draw nodes
+ nx.draw_networkx_nodes(
+ self.graph, pos, node_color=node_colors, node_size=node_sizes, alpha=0.8
+ )
+
+ # Draw edges
+ nx.draw_networkx_edges(
+ self.graph,
+ pos,
+ width=self.config.edge_width_base,
+ alpha=0.6,
+ arrows=True,
+ arrowsize=10,
+ )
+
+ # Draw labels
+ nx.draw_networkx_labels(
+ self.graph,
+ pos,
+ labels={
+ node: attrs.get("name", "")
+ for node, attrs in self.graph.nodes(data=True)
+ },
+ font_size=8,
+ font_weight="bold",
+ )
+
+ plt.title(f"{self.current_visualization_type} - {self.current_entity_name}")
+ plt.axis("off")
+
+ return plt.gcf()
+
+ def visualize_call_graph(self, function_name: str, max_depth: int | None = None):
+ """
+ Generate a call graph visualization for a function.
+
+ Args:
+ function_name: Name of the function to visualize
+ max_depth: Maximum depth of the call graph (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.CALL_GRAPH
+ self.current_entity_name = function_name
+
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the function in the codebase
+ function = None
+ for func in self.codebase.functions:
+ if func.name == function_name:
+ function = func
+ break
+
+ if not function:
+ logger.error(f"Function {function_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ function,
+ name=function_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add call relationships
+ visited = {function}
+
+ def add_calls(func, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no function calls attribute
+ if not hasattr(func, "function_calls"):
+ return
+
+ for call in func.function_calls:
+ # Skip recursive calls
+ if call.name == func.name:
+ continue
+
+ # Get the called function
+ called_func = call.function_definition
+ if not called_func:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(called_func, "is_external")
+ and called_func.is_external
+ ):
+ continue
+
+ # Generate name for display
+ if (
+ hasattr(called_func, "is_method")
+ and called_func.is_method
+ and hasattr(called_func, "parent_class")
+ ):
+ called_name = f"{called_func.parent_class.name}.{called_func.name}"
+ else:
+ called_name = called_func.name
+
+ # Add node for called function
+ self._add_node(
+ called_func,
+ name=called_name,
+ color=self.config.color_palette.get("Function"),
+ file_path=called_func.file.path
+ if hasattr(called_func, "file")
+ and hasattr(called_func.file, "path")
+ else None,
+ )
+
+ # Add edge for call relationship
+ self._add_edge(
+ function,
+ called_func,
+ type="call",
+ file_path=call.filepath if hasattr(call, "filepath") else None,
+ line=call.line if hasattr(call, "line") else None,
+ )
+
+ # Recursively process called function
+ if isinstance(called_func, Function) and called_func not in visited:
+ visited.add(called_func)
+ add_calls(called_func, depth + 1)
+
+ # Start from the root function
+ add_calls(function)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.CALL_GRAPH, function_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.CALL_GRAPH, function_name, fig
+ )
+
+ def visualize_dependency_graph(
+ self, symbol_name: str, max_depth: int | None = None
+ ):
+ """
+ Generate a dependency graph visualization for a symbol.
+
+ Args:
+ symbol_name: Name of the symbol to visualize
+ max_depth: Maximum depth of the dependency graph (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.DEPENDENCY_GRAPH
+ self.current_entity_name = symbol_name
+
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the symbol in the codebase
+ symbol = None
+ for sym in self.codebase.symbols:
+ if hasattr(sym, "name") and sym.name == symbol_name:
+ symbol = sym
+ break
+
+ if not symbol:
+ logger.error(f"Symbol {symbol_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ symbol,
+ name=symbol_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add dependencies
+ visited = {symbol}
+
+ def add_dependencies(sym, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no dependencies attribute
+ if not hasattr(sym, "dependencies"):
+ return
+
+ for dep in sym.dependencies:
+ dep_symbol = None
+
+ if isinstance(dep, Symbol):
+ dep_symbol = dep
+ elif isinstance(dep, Import) and hasattr(dep, "resolved_symbol"):
+ dep_symbol = dep.resolved_symbol
+
+ if not dep_symbol:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(dep_symbol, "is_external")
+ and dep_symbol.is_external
+ ):
+ continue
+
+ # Add node for dependency
+ self._add_node(
+ dep_symbol,
+ name=dep_symbol.name
+ if hasattr(dep_symbol, "name")
+ else str(dep_symbol),
+ color=self.config.color_palette.get(
+ dep_symbol.__class__.__name__, "#BBBBBB"
+ ),
+ file_path=dep_symbol.file.path
+ if hasattr(dep_symbol, "file") and hasattr(dep_symbol.file, "path")
+ else None,
+ )
+
+ # Add edge for dependency relationship
+ self._add_edge(sym, dep_symbol, type="depends_on")
+
+ # Recursively process dependency
+ if dep_symbol not in visited:
+ visited.add(dep_symbol)
+ add_dependencies(dep_symbol, depth + 1)
+
+ # Start from the root symbol
+ add_dependencies(symbol)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.DEPENDENCY_GRAPH, symbol_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.DEPENDENCY_GRAPH, symbol_name, fig
+ )
+
+ def visualize_blast_radius(self, symbol_name: str, max_depth: int | None = None):
+ """
+ Generate a blast radius visualization for a symbol.
+
+ Args:
+ symbol_name: Name of the symbol to visualize
+ max_depth: Maximum depth of the blast radius (overrides config)
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.BLAST_RADIUS
+ self.current_entity_name = symbol_name
+
+ # Set max depth
+ current_max_depth = (
+ max_depth if max_depth is not None else self.config.max_depth
+ )
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the symbol in the codebase
+ symbol = None
+ for sym in self.codebase.symbols:
+ if hasattr(sym, "name") and sym.name == symbol_name:
+ symbol = sym
+ break
+
+ if not symbol:
+ logger.error(f"Symbol {symbol_name} not found in codebase")
+ return None
+
+ # Add root node
+ self._add_node(
+ symbol,
+ name=symbol_name,
+ color=self.config.color_palette.get("Root"),
+ is_root=True,
+ )
+
+ # Recursively add usages (reverse dependencies)
+ visited = {symbol}
+
+ def add_usages(sym, depth=0):
+ if depth >= current_max_depth:
+ return
+
+ # Skip if no usages attribute
+ if not hasattr(sym, "usages"):
+ return
+
+ for usage in sym.usages:
+ # Skip if no usage symbol
+ if not hasattr(usage, "usage_symbol"):
+ continue
+
+ usage_symbol = usage.usage_symbol
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(usage_symbol, "is_external")
+ and usage_symbol.is_external
+ ):
+ continue
+
+ # Add node for usage
+ self._add_node(
+ usage_symbol,
+ name=usage_symbol.name
+ if hasattr(usage_symbol, "name")
+ else str(usage_symbol),
+ color=self.config.color_palette.get(
+ usage_symbol.__class__.__name__, "#BBBBBB"
+ ),
+ file_path=usage_symbol.file.path
+ if hasattr(usage_symbol, "file")
+ and hasattr(usage_symbol.file, "path")
+ else None,
+ )
+
+ # Add edge for usage relationship
+ self._add_edge(sym, usage_symbol, type="used_by")
+
+ # Recursively process usage
+ if usage_symbol not in visited:
+ visited.add(usage_symbol)
+ add_usages(usage_symbol, depth + 1)
+
+ # Start from the root symbol
+ add_usages(symbol)
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.BLAST_RADIUS, symbol_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.BLAST_RADIUS, symbol_name, fig
+ )
+
+ def visualize_class_methods(self, class_name: str):
+ """
+ Generate a class methods visualization.
+
+ Args:
+ class_name: Name of the class to visualize
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.CLASS_METHODS
+ self.current_entity_name = class_name
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Find the class in the codebase
+ class_obj = None
+ for cls in self.codebase.classes:
+ if cls.name == class_name:
+ class_obj = cls
+ break
+
+ if not class_obj:
+ logger.error(f"Class {class_name} not found in codebase")
+ return None
+
+ # Add class node
+ self._add_node(
+ class_obj,
+ name=class_name,
+ color=self.config.color_palette.get("Class"),
+ is_root=True,
+ )
+
+ # Skip if no methods attribute
+ if not hasattr(class_obj, "methods"):
+ logger.error(f"Class {class_name} has no methods attribute")
+ return None
+
+ # Add method nodes and connections
+ method_ids = {}
+ for method in class_obj.methods:
+ method_name = f"{class_name}.{method.name}"
+
+ # Add method node
+ method_id = self._add_node(
+ method,
+ name=method_name,
+ color=self.config.color_palette.get("Function"),
+ file_path=method.file.path
+ if hasattr(method, "file") and hasattr(method.file, "path")
+ else None,
+ )
+
+ method_ids[method.name] = method_id
+
+ # Add edge from class to method
+ self._add_edge(class_obj, method, type="contains")
+
+ # Add call relationships between methods
+ for method in class_obj.methods:
+ # Skip if no function calls attribute
+ if not hasattr(method, "function_calls"):
+ continue
+
+ for call in method.function_calls:
+ # Get the called function
+ called_func = call.function_definition
+ if not called_func:
+ continue
+
+ # Only add edges between methods of this class
+ if (
+ hasattr(called_func, "is_method")
+ and called_func.is_method
+ and hasattr(called_func, "parent_class")
+ and called_func.parent_class == class_obj
+ ):
+ self._add_edge(
+ method,
+ called_func,
+ type="calls",
+ line=call.line if hasattr(call, "line") else None,
+ )
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.CLASS_METHODS, class_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.CLASS_METHODS, class_name, fig
+ )
+
+ def visualize_module_dependencies(self, module_path: str):
+ """
+ Generate a module dependencies visualization.
+
+ Args:
+ module_path: Path to the module to visualize
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.MODULE_DEPENDENCIES
+ self.current_entity_name = module_path
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Get all files in the module
+ module_files = []
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path).startswith(module_path):
+ module_files.append(file)
+
+ if not module_files:
+ logger.error(f"No files found in module {module_path}")
+ return None
+
+ # Add file nodes
+ module_node_ids = {}
+ for file in module_files:
+ file_name = str(file.path).split("/")[-1]
+ file_module = "/".join(str(file.path).split("/")[:-1])
+
+ # Add file node
+ file_id = self._add_node(
+ file,
+ name=file_name,
+ module=file_module,
+ color=self.config.color_palette.get("File"),
+ file_path=str(file.path),
+ )
+
+ module_node_ids[str(file.path)] = file_id
+
+ # Add import relationships
+ for file in module_files:
+ # Skip if no imports attribute
+ if not hasattr(file, "imports"):
+ continue
+
+ for imp in file.imports:
+ imported_file = None
+
+ # Try to get imported file
+ if hasattr(imp, "resolved_file"):
+ imported_file = imp.resolved_file
+ elif hasattr(imp, "resolved_symbol") and hasattr(
+ imp.resolved_symbol, "file"
+ ):
+ imported_file = imp.resolved_symbol.file
+
+ if not imported_file:
+ continue
+
+ # Skip external modules if configured
+ if (
+ self.config.ignore_external
+ and hasattr(imported_file, "is_external")
+ and imported_file.is_external
+ ):
+ continue
+
+ # Add node for imported file if not already added
+ imported_path = (
+ str(imported_file.path) if hasattr(imported_file, "path") else ""
+ )
+
+ if imported_path not in module_node_ids:
+ imported_name = imported_path.split("/")[-1]
+ imported_module = "/".join(imported_path.split("/")[:-1])
+
+ imported_id = self._add_node(
+ imported_file,
+ name=imported_name,
+ module=imported_module,
+ color=self.config.color_palette.get(
+ "External"
+ if imported_path.startswith(module_path)
+ else "File"
+ ),
+ file_path=imported_path,
+ )
+
+ module_node_ids[imported_path] = imported_id
+
+ # Add edge for import relationship
+ self._add_edge(
+ file,
+ imported_file,
+ type="imports",
+ import_name=imp.name if hasattr(imp, "name") else "",
+ )
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.MODULE_DEPENDENCIES, module_path, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.MODULE_DEPENDENCIES, module_path, fig
+ )
+
+ def visualize_dead_code(self, path_filter: str | None = None):
+ """
+ Generate a visualization of dead (unused) code in the codebase.
+
+ Args:
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.DEAD_CODE
+ self.current_entity_name = path_filter or "codebase"
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Initialize analyzer if needed
+ if not self.analyzer:
+ logger.info("Initializing analyzer for dead code detection")
+ self.analyzer = CodebaseAnalyzer(
+ codebase=self.codebase,
+ repo_path=self.context.base_path
+ if hasattr(self.context, "base_path")
+ else None,
+ )
+
+ # Perform analysis if not already done
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.info("Running code analysis")
+ self.analyzer.analyze(AnalysisType.CODEBASE)
+
+ # Extract dead code information from analysis results
+ if not hasattr(self.analyzer, "results"):
+ logger.error("Analysis results not available")
+ return None
+
+ dead_code = {}
+ if (
+ "static_analysis" in self.analyzer.results
+ and "dead_code" in self.analyzer.results["static_analysis"]
+ ):
+ dead_code = self.analyzer.results["static_analysis"]["dead_code"]
+
+ if not dead_code:
+ logger.warning("No dead code detected in analysis results")
+ return None
+
+ # Create file nodes for containing dead code
+ file_nodes = {}
+
+ # Process unused functions
+ if "unused_functions" in dead_code:
+ for unused_func in dead_code["unused_functions"]:
+ file_path = unused_func.get("file", "")
+
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not file_path.startswith(path_filter):
+ continue
+
+ # Add file node if not already added
+ if file_path not in file_nodes:
+ # Find file in codebase
+ file_obj = None
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path) == file_path:
+ file_obj = file
+ break
+
+ if file_obj:
+ file_name = file_path.split("/")[-1]
+ self._add_node(
+ file_obj,
+ name=file_name,
+ color=self.config.color_palette.get("File"),
+ file_path=file_path,
+ )
+
+ file_nodes[file_path] = file_obj
+
+ # Add unused function node
+ func_name = unused_func.get("name", "")
+ func_line = unused_func.get("line", None)
+
+ # Create a placeholder for the function (we don't have the actual object)
+ func_obj = {
+ "name": func_name,
+ "file_path": file_path,
+ "line": func_line,
+ "type": "Function",
+ }
+
+ self._add_node(
+ func_obj,
+ name=func_name,
+ color=self.config.color_palette.get("Dead"),
+ file_path=file_path,
+ line=func_line,
+ is_dead=True,
+ )
+
+ # Add edge from file to function
+ if file_path in file_nodes:
+ self._add_edge(
+ file_nodes[file_path], func_obj, type="contains_dead"
+ )
+
+ # Process unused variables
+ if "unused_variables" in dead_code:
+ for unused_var in dead_code["unused_variables"]:
+ file_path = unused_var.get("file", "")
+
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not file_path.startswith(path_filter):
+ continue
+
+ # Add file node if not already added
+ if file_path not in file_nodes:
+ # Find file in codebase
+ file_obj = None
+ for file in self.codebase.files:
+ if hasattr(file, "path") and str(file.path) == file_path:
+ file_obj = file
+ break
+
+ if file_obj:
+ file_name = file_path.split("/")[-1]
+ self._add_node(
+ file_obj,
+ name=file_name,
+ color=self.config.color_palette.get("File"),
+ file_path=file_path,
+ )
+
+ file_nodes[file_path] = file_obj
+
+ # Add unused variable node
+ var_name = unused_var.get("name", "")
+ var_line = unused_var.get("line", None)
+
+ # Create a placeholder for the variable
+ var_obj = {
+ "name": var_name,
+ "file_path": file_path,
+ "line": var_line,
+ "type": "Variable",
+ }
+
+ self._add_node(
+ var_obj,
+ name=var_name,
+ color=self.config.color_palette.get("Dead"),
+ file_path=file_path,
+ line=var_line,
+ is_dead=True,
+ )
+
+ # Add edge from file to variable
+ if file_path in file_nodes:
+ self._add_edge(file_nodes[file_path], var_obj, type="contains_dead")
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.DEAD_CODE, self.current_entity_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.DEAD_CODE, self.current_entity_name, fig
+ )
+
+ def visualize_cyclomatic_complexity(self, path_filter: str | None = None):
+ """
+ Generate a heatmap visualization of cyclomatic complexity.
+
+ Args:
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.CYCLOMATIC_COMPLEXITY
+ self.current_entity_name = path_filter or "codebase"
+
+ # Initialize analyzer if needed
+ if not self.analyzer:
+ logger.info("Initializing analyzer for complexity analysis")
+ self.analyzer = CodebaseAnalyzer(
+ codebase=self.codebase,
+ repo_path=self.context.base_path
+ if hasattr(self.context, "base_path")
+ else None,
+ )
+
+ # Perform analysis if not already done
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.info("Running code analysis")
+ self.analyzer.analyze(AnalysisType.CODEBASE)
+
+ # Extract complexity information from analysis results
+ if not hasattr(self.analyzer, "results"):
+ logger.error("Analysis results not available")
+ return None
+
+ complexity_data = {}
+ if (
+ "static_analysis" in self.analyzer.results
+ and "code_complexity" in self.analyzer.results["static_analysis"]
+ ):
+ complexity_data = self.analyzer.results["static_analysis"][
+ "code_complexity"
+ ]
+
+ if not complexity_data:
+ logger.warning("No complexity data found in analysis results")
+ return None
+
+ # Extract function complexities
+ functions = []
+ if "function_complexity" in complexity_data:
+ for func_data in complexity_data["function_complexity"]:
+ # Skip if path filter is specified and doesn't match
+ if path_filter and not func_data.get("file", "").startswith(
+ path_filter
+ ):
+ continue
+
+ functions.append({
+ "name": func_data.get("name", ""),
+ "file": func_data.get("file", ""),
+ "complexity": func_data.get("complexity", 1),
+ "line": func_data.get("line", None),
+ })
+
+ # Sort functions by complexity (descending)
+ functions.sort(key=lambda x: x.get("complexity", 0), reverse=True)
+
+ # Generate heatmap visualization
+ plt.figure(figsize=(12, 10))
+
+ # Extract data for heatmap
+ func_names = [
+ f"{func['name']} ({func['file'].split('/')[-1]})" for func in functions[:30]
+ ]
+ complexities = [func.get("complexity", 0) for func in functions[:30]]
+
+ # Create horizontal bar chart
+ bars = plt.barh(func_names, complexities)
+
+ # Color bars by complexity
+ norm = plt.Normalize(1, max(10, max(complexities)))
+ cmap = plt.cm.get_cmap("YlOrRd")
+
+ for i, bar in enumerate(bars):
+ complexity = complexities[i]
+ bar.set_color(cmap(norm(complexity)))
+
+ # Add labels and title
+ plt.xlabel("Cyclomatic Complexity")
+ plt.title("Top Functions by Cyclomatic Complexity")
+ plt.grid(axis="x", linestyle="--", alpha=0.6)
+
+ # Add colorbar
+ plt.colorbar(plt.cm.ScalarMappable(norm=norm, cmap=cmap), label="Complexity")
+
+ # Save and return visualization
+ return self._save_visualization(
+ VisualizationType.CYCLOMATIC_COMPLEXITY, self.current_entity_name, plt.gcf()
+ )
+
+ def visualize_issues_heatmap(
+ self,
+ severity: IssueSeverity | None = None,
+ path_filter: str | None = None,
+ ):
+ """
+ Generate a heatmap visualization of issues in the codebase.
+
+ Args:
+ severity: Optional severity level to filter issues
+ path_filter: Optional path to filter files
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.ISSUES_HEATMAP
+ self.current_entity_name = f"{severity.value if severity else 'all'}_issues"
+
+ # Initialize analyzer if needed
+ if not self.analyzer:
+ logger.info("Initializing analyzer for issues analysis")
+ self.analyzer = CodebaseAnalyzer(
+ codebase=self.codebase,
+ repo_path=self.context.base_path
+ if hasattr(self.context, "base_path")
+ else None,
+ )
+
+ # Perform analysis if not already done
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.info("Running code analysis")
+ self.analyzer.analyze(AnalysisType.CODEBASE)
+
+ # Extract issues from analysis results
+ if (
+ not hasattr(self.analyzer, "results")
+ or "issues" not in self.analyzer.results
+ ):
+ logger.error("Issues not available in analysis results")
+ return None
+
+ issues = self.analyzer.results["issues"]
+
+ # Filter issues by severity if specified
+ if severity:
+ issues = [issue for issue in issues if issue.get("severity") == severity]
+
+ # Filter issues by path if specified
+ if path_filter:
+ issues = [
+ issue
+ for issue in issues
+ if issue.get("file", "").startswith(path_filter)
+ ]
+
+ if not issues:
+ logger.warning("No issues found matching the criteria")
+ return None
+
+ # Group issues by file
+ file_issues = {}
+ for issue in issues:
+ file_path = issue.get("file", "")
+ if file_path not in file_issues:
+ file_issues[file_path] = []
+
+ file_issues[file_path].append(issue)
+
+ # Generate heatmap visualization
+ plt.figure(figsize=(12, 10))
+
+ # Extract data for heatmap
+ files = list(file_issues.keys())
+ file_names = [file_path.split("/")[-1] for file_path in files]
+ issue_counts = [len(file_issues[file_path]) for file_path in files]
+
+ # Sort by issue count
+ sorted_data = sorted(
+ zip(file_names, issue_counts, files, strict=False),
+ key=lambda x: x[1],
+ reverse=True,
+ )
+ file_names, issue_counts, files = zip(*sorted_data, strict=False)
+
+ # Create horizontal bar chart
+ bars = plt.barh(file_names[:20], issue_counts[:20])
+
+ # Color bars by issue count
+ norm = plt.Normalize(1, max(5, max(issue_counts[:20])))
+ cmap = plt.cm.get_cmap("OrRd")
+
+ for i, bar in enumerate(bars):
+ count = issue_counts[i]
+ bar.set_color(cmap(norm(count)))
+
+ # Add labels and title
+ plt.xlabel("Number of Issues")
+ severity_text = f" ({severity.value})" if severity else ""
+ plt.title(f"Files with the Most Issues{severity_text}")
+ plt.grid(axis="x", linestyle="--", alpha=0.6)
+
+ # Add colorbar
+ plt.colorbar(plt.cm.ScalarMappable(norm=norm, cmap=cmap), label="Issue Count")
+
+ # Save and return visualization
+ return self._save_visualization(
+ VisualizationType.ISSUES_HEATMAP, self.current_entity_name, plt.gcf()
+ )
+
+ def visualize_pr_comparison(self):
+ """
+ Generate a visualization comparing base branch with PR.
+
+ Returns:
+ Visualization data or path to saved file
+ """
+ self.current_visualization_type = VisualizationType.PR_COMPARISON
+
+ # Check if analyzer has PR data
+ if (
+ not self.analyzer
+ or not self.analyzer.pr_codebase
+ or not self.analyzer.base_codebase
+ ):
+ logger.error("PR comparison requires analyzer with PR data")
+ return None
+
+ self.current_entity_name = (
+ f"pr_{self.analyzer.pr_number}"
+ if self.analyzer.pr_number
+ else "pr_comparison"
+ )
+
+ # Perform comparison analysis if not already done
+ if not hasattr(self.analyzer, "results") or not self.analyzer.results:
+ logger.info("Running PR comparison analysis")
+ self.analyzer.analyze(AnalysisType.COMPARISON)
+
+ # Extract comparison data from analysis results
+ if (
+ not hasattr(self.analyzer, "results")
+ or "comparison" not in self.analyzer.results
+ ):
+ logger.error("Comparison data not available in analysis results")
+ return None
+
+ comparison = self.analyzer.results["comparison"]
+
+ # Initialize graph
+ self._initialize_graph()
+
+ # Process symbol comparison data
+ if "symbol_comparison" in comparison:
+ for symbol_data in comparison["symbol_comparison"]:
+ symbol_name = symbol_data.get("name", "")
+ in_base = symbol_data.get("in_base", False)
+ in_pr = symbol_data.get("in_pr", False)
+
+ # Create a placeholder for the symbol
+ symbol_obj = {
+ "name": symbol_name,
+ "in_base": in_base,
+ "in_pr": in_pr,
+ "type": "Symbol",
+ }
+
+ # Determine node color based on presence in base and PR
+ if in_base and in_pr:
+ color = "#A5D6A7" # Light green (modified)
+ elif in_base:
+ color = "#EF9A9A" # Light red (removed)
+ else:
+ color = "#90CAF9" # Light blue (added)
+
+ # Add node for symbol
+ self._add_node(
+ symbol_obj,
+ name=symbol_name,
+ color=color,
+ in_base=in_base,
+ in_pr=in_pr,
+ )
+
+ # Process parameter changes if available
+ if "parameter_changes" in symbol_data:
+ param_changes = symbol_data["parameter_changes"]
+
+ # Process removed parameters
+ for param in param_changes.get("removed", []):
+ param_obj = {
+ "name": param,
+ "change_type": "removed",
+ "type": "Parameter",
+ }
+
+ self._add_node(
+ param_obj,
+ name=param,
+ color="#EF9A9A", # Light red (removed)
+ change_type="removed",
+ )
+
+ self._add_edge(symbol_obj, param_obj, type="removed_parameter")
+
+ # Process added parameters
+ for param in param_changes.get("added", []):
+ param_obj = {
+ "name": param,
+ "change_type": "added",
+ "type": "Parameter",
+ }
+
+ self._add_node(
+ param_obj,
+ name=param,
+ color="#90CAF9", # Light blue (added)
+ change_type="added",
+ )
+
+ self._add_edge(symbol_obj, param_obj, type="added_parameter")
+
+ # Process return type changes if available
+ if "return_type_change" in symbol_data:
+ return_type_change = symbol_data["return_type_change"]
+ old_type = return_type_change.get("old", "None")
+ new_type = return_type_change.get("new", "None")
+
+ return_obj = {
+ "name": f"{old_type} -> {new_type}",
+ "old_type": old_type,
+ "new_type": new_type,
+ "type": "ReturnType",
+ }
+
+ self._add_node(
+ return_obj,
+ name=f"{old_type} -> {new_type}",
+ color="#FFD54F", # Amber (changed)
+ old_type=old_type,
+ new_type=new_type,
+ )
+
+ self._add_edge(symbol_obj, return_obj, type="return_type_change")
+
+ # Process call site issues if available
+ if "call_site_issues" in symbol_data:
+ for issue in symbol_data["call_site_issues"]:
+ issue_file = issue.get("file", "")
+ issue_line = issue.get("line", None)
+ issue_text = issue.get("issue", "")
+
+ # Create a placeholder for the issue
+ issue_obj = {
+ "name": issue_text,
+ "file": issue_file,
+ "line": issue_line,
+ "type": "Issue",
+ }
+
+ self._add_node(
+ issue_obj,
+ name=f"{issue_file.split('/')[-1]}:{issue_line}",
+ color="#EF5350", # Red (error)
+ file_path=issue_file,
+ line=issue_line,
+ issue_text=issue_text,
+ )
+
+ self._add_edge(symbol_obj, issue_obj, type="call_site_issue")
+
+ # Generate visualization data
+ if self.config.output_format == OutputFormat.JSON:
+ data = self._convert_graph_to_json()
+ return self._save_visualization(
+ VisualizationType.PR_COMPARISON, self.current_entity_name, data
+ )
+ else:
+ fig = self._plot_graph()
+ return self._save_visualization(
+ VisualizationType.PR_COMPARISON, self.current_entity_name, fig
+ )
+
+
+# Command-line interface
+def main():
+ """
+ Command-line interface for the codebase visualizer.
+
+ This function parses command-line arguments and generates visualizations
+ based on the specified parameters.
+ """
+ parser = argparse.ArgumentParser(
+ description="Generate visualizations of codebase structure and analysis."
+ )
+
+ # Repository options
+ repo_group = parser.add_argument_group("Repository Options")
+ repo_group.add_argument("--repo-url", help="URL of the repository to analyze")
+ repo_group.add_argument(
+ "--repo-path", help="Local path to the repository to analyze"
+ )
+ repo_group.add_argument("--language", help="Programming language of the codebase")
+
+ # Visualization options
+ viz_group = parser.add_argument_group("Visualization Options")
+ viz_group.add_argument(
+ "--type",
+ choices=[t.value for t in VisualizationType],
+ required=True,
+ help="Type of visualization to generate",
+ )
+ viz_group.add_argument(
+ "--entity", help="Name of the entity to visualize (function, class, file, etc.)"
+ )
+ viz_group.add_argument(
+ "--max-depth",
+ type=int,
+ default=5,
+ help="Maximum depth for recursive visualizations",
+ )
+ viz_group.add_argument(
+ "--ignore-external", action="store_true", help="Ignore external dependencies"
+ )
+ viz_group.add_argument(
+ "--severity",
+ choices=[s.value for s in IssueSeverity],
+ help="Filter issues by severity",
+ )
+ viz_group.add_argument("--path-filter", help="Filter by file path")
+
+ # PR options
+ pr_group = parser.add_argument_group("PR Options")
+ pr_group.add_argument("--pr-number", type=int, help="PR number to analyze")
+ pr_group.add_argument(
+ "--base-branch", default="main", help="Base branch for comparison"
+ )
+
+ # Output options
+ output_group = parser.add_argument_group("Output Options")
+ output_group.add_argument(
+ "--output-format",
+ choices=[f.value for f in OutputFormat],
+ default="json",
+ help="Output format for the visualization",
+ )
+ output_group.add_argument(
+ "--output-directory", help="Directory to save visualizations"
+ )
+ output_group.add_argument(
+ "--layout",
+ choices=["spring", "kamada_kawai", "spectral"],
+ default="spring",
+ help="Layout algorithm for graph visualization",
+ )
+
+ args = parser.parse_args()
+
+ # Create visualizer configuration
+ config = VisualizationConfig(
+ max_depth=args.max_depth,
+ ignore_external=args.ignore_external,
+ output_format=OutputFormat(args.output_format),
+ output_directory=args.output_directory,
+ layout_algorithm=args.layout,
+ )
+
+ # Create codebase analyzer if needed for PR comparison
+ analyzer = None
+ if args.type == VisualizationType.PR_COMPARISON.value or args.pr_number:
+ analyzer = CodebaseAnalyzer(
+ repo_url=args.repo_url,
+ repo_path=args.repo_path,
+ base_branch=args.base_branch,
+ pr_number=args.pr_number,
+ language=args.language,
+ )
+
+ # Create visualizer
+ visualizer = CodebaseVisualizer(analyzer=analyzer, config=config)
+
+ # Generate visualization based on type
+ viz_type = VisualizationType(args.type)
+ result = None
+
+ if viz_type == VisualizationType.CALL_GRAPH:
+ if not args.entity:
+ logger.error("Entity name required for call graph visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_call_graph(args.entity)
+
+ elif viz_type == VisualizationType.DEPENDENCY_GRAPH:
+ if not args.entity:
+ logger.error("Entity name required for dependency graph visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_dependency_graph(args.entity)
+
+ elif viz_type == VisualizationType.BLAST_RADIUS:
+ if not args.entity:
+ logger.error("Entity name required for blast radius visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_blast_radius(args.entity)
+
+ elif viz_type == VisualizationType.CLASS_METHODS:
+ if not args.entity:
+ logger.error("Class name required for class methods visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_class_methods(args.entity)
+
+ elif viz_type == VisualizationType.MODULE_DEPENDENCIES:
+ if not args.entity:
+ logger.error("Module path required for module dependencies visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_module_dependencies(args.entity)
+
+ elif viz_type == VisualizationType.DEAD_CODE:
+ result = visualizer.visualize_dead_code(args.path_filter)
+
+ elif viz_type == VisualizationType.CYCLOMATIC_COMPLEXITY:
+ result = visualizer.visualize_cyclomatic_complexity(args.path_filter)
+
+ elif viz_type == VisualizationType.ISSUES_HEATMAP:
+ severity = IssueSeverity(args.severity) if args.severity else None
+ result = visualizer.visualize_issues_heatmap(severity, args.path_filter)
+
+ elif viz_type == VisualizationType.PR_COMPARISON:
+ if not args.pr_number:
+ logger.error("PR number required for PR comparison visualization")
+ sys.exit(1)
+
+ result = visualizer.visualize_pr_comparison()
+
+ # Output result
+ if result:
+ logger.info(f"Visualization completed: {result}")
+ else:
+ logger.error("Failed to generate visualization")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/__init__.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/__init__.py
new file mode 100644
index 000000000..5b9d135f7
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/__init__.py
@@ -0,0 +1,6 @@
+"""
+Dependency Graph Visualization Module
+
+This module provides tools for visualizing dependency relationships and impact analysis in a codebase.
+"""
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/blast_radius.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/blast_radius.py
new file mode 100644
index 000000000..42b039632
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/blast_radius.py
@@ -0,0 +1,119 @@
+import codegen
+import networkx as nx
+from codegen import Codebase
+from codegen.sdk.core.dataclasses.usage import Usage
+from codegen.sdk.core.function import PyFunction
+from codegen.sdk.core.symbol import PySymbol
+
+# Create a directed graph for visualizing relationships between code elements
+G = nx.DiGraph()
+
+# Maximum depth to traverse in the call graph to prevent infinite recursion
+MAX_DEPTH = 5
+
+# Define colors for different types of nodes in the visualization
+COLOR_PALETTE = {
+ "StartFunction": "#9cdcfe", # Starting function (light blue)
+ "PyFunction": "#a277ff", # Python functions (purple)
+ "PyClass": "#ffca85", # Python classes (orange)
+ "ExternalModule": "#f694ff", # External module imports (pink)
+ "HTTP_METHOD": "#ffca85", # HTTP method handlers (orange)
+}
+
+# List of common HTTP method names to identify route handlers
+HTTP_METHODS = ["get", "put", "patch", "post", "head", "delete"]
+
+
+def generate_edge_meta(usage: Usage) -> dict:
+ """
+ Generate metadata for graph edges based on a usage relationship.
+
+ Args:
+ usage: A Usage object representing how a symbol is used
+
+ Returns:
+ dict: Edge metadata including source location and symbol info
+ """
+ return {"name": usage.match.source, "file_path": usage.match.filepath, "start_point": usage.match.start_point, "end_point": usage.match.end_point, "symbol_name": usage.match.__class__.__name__}
+
+
+def is_http_method(symbol: PySymbol) -> bool:
+ """
+ Check if a symbol represents an HTTP method handler.
+
+ Args:
+ symbol: A Python symbol to check
+
+ Returns:
+ bool: True if symbol is an HTTP method handler
+ """
+ if isinstance(symbol, PyFunction) and symbol.is_method:
+ return symbol.name in HTTP_METHODS
+ return False
+
+
+def create_blast_radius_visualization(symbol: PySymbol, depth: int = 0):
+ """
+ Recursively build a graph visualization showing how a symbol is used.
+ Shows the "blast radius" - everything that would be affected by changes.
+
+ Args:
+ symbol: Starting symbol to analyze
+ depth: Current recursion depth
+ """
+ # Stop recursion if we hit max depth
+ if depth >= MAX_DEPTH:
+ return
+
+ # Process each usage of the symbol
+ for usage in symbol.usages:
+ usage_symbol = usage.usage_symbol
+
+ # Determine node color based on symbol type
+ if is_http_method(usage_symbol):
+ color = COLOR_PALETTE.get("HTTP_METHOD")
+ else:
+ color = COLOR_PALETTE.get(usage_symbol.__class__.__name__, "#f694ff")
+
+ # Add node and edge to graph
+ G.add_node(usage_symbol, color=color)
+ G.add_edge(symbol, usage_symbol, **generate_edge_meta(usage))
+
+ # Recurse to process usages of this symbol
+ create_blast_radius_visualization(usage_symbol, depth + 1)
+
+
+@codegen.function("visualize-function-blast-radius")
+def run(codebase: Codebase):
+ """
+ Generate a visualization showing the blast radius of changes to a function.
+
+ This codemod:
+ 1. Identifies all usages of a target function
+ 2. Creates a graph showing how the function is used throughout the codebase
+ 3. Highlights HTTP method handlers and different types of code elements
+ """
+ global G
+ G = nx.DiGraph()
+
+ # Get the target function to analyze
+ target_func = codebase.get_function("export_asset")
+
+ # Add starting function to graph with special color
+ G.add_node(target_func, color=COLOR_PALETTE.get("StartFunction"))
+
+ # Build the visualization starting from target function
+ create_blast_radius_visualization(target_func)
+
+ print(G)
+ print("Use codegen.sh to visualize the graph!")
+
+
+if __name__ == "__main__":
+ print("Initializing codebase...")
+ codebase = Codebase.from_repo("codegen-oss/posthog", commit="b174f2221ea4ae50e715eb6a7e70e9a2b0760800", language="python")
+ print(f"Codebase with {len(codebase.files)} files and {len(codebase.functions)} functions.")
+ print("Creating graph...")
+
+ run(codebase)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/dependency_trace.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/dependency_trace.py
new file mode 100644
index 000000000..85448ac4f
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/dependency_trace.py
@@ -0,0 +1,83 @@
+import codegen
+import networkx as nx
+from codegen import Codebase
+from codegen.sdk.core.class_definition import Class
+from codegen.sdk.core.import_resolution import Import
+from codegen.sdk.core.symbol import Symbol
+
+G = nx.DiGraph()
+
+IGNORE_EXTERNAL_MODULE_CALLS = True
+IGNORE_CLASS_CALLS = False
+MAX_DEPTH = 10
+
+COLOR_PALETTE = {
+ "StartFunction": "#9cdcfe", # Light blue for the starting function
+ "PyFunction": "#a277ff", # Purple for Python functions
+ "PyClass": "#ffca85", # Orange for Python classes
+ "ExternalModule": "#f694ff", # Pink for external module references
+}
+
+# Dictionary to track visited nodes and prevent cycles
+visited = {}
+
+
+def create_dependencies_visualization(symbol: Symbol, depth: int = 0):
+ """Creates a visualization of symbol dependencies in the codebase
+
+ Recursively traverses the dependency tree of a symbol (function, class, etc.)
+ and creates a directed graph representation. Dependencies can be either direct
+ symbol references or imports.
+
+ Args:
+ symbol (Symbol): The starting symbol whose dependencies will be mapped
+ depth (int): Current depth in the recursive traversal
+ """
+ if depth >= MAX_DEPTH:
+ return
+
+ for dep in symbol.dependencies:
+ dep_symbol = None
+
+ if isinstance(dep, Symbol):
+ dep_symbol = dep
+ elif isinstance(dep, Import):
+ dep_symbol = dep.resolved_symbol if dep.resolved_symbol else None
+
+ if dep_symbol:
+ G.add_node(dep_symbol, color=COLOR_PALETTE.get(dep_symbol.__class__.__name__, "#f694ff"))
+ G.add_edge(symbol, dep_symbol)
+
+ if not isinstance(dep_symbol, Class):
+ create_dependencies_visualization(dep_symbol, depth + 1)
+
+
+@codegen.function("visualize-symbol-dependencies")
+def run(codebase: Codebase):
+ """Generate a visualization of symbol dependencies in a codebase.
+
+ This codemod:
+ 1. Creates a directed graph of symbol dependencies starting from a target function
+ 2. Tracks relationships between functions, classes, and imports
+ 3. Generates a visual representation of the dependency hierarchy
+ """
+ global G
+ G = nx.DiGraph()
+
+ target_func = codebase.get_function("get_query_runner")
+ G.add_node(target_func, color=COLOR_PALETTE.get("StartFunction"))
+
+ create_dependencies_visualization(target_func)
+
+ print(G)
+ print("Use codegen.sh to visualize the graph!")
+
+
+if __name__ == "__main__":
+ print("Initializing codebase...")
+ codebase = Codebase.from_repo("codegen-oss/posthog", commit="b174f2221ea4ae50e715eb6a7e70e9a2b0760800", language="python")
+ print(f"Codebase with {len(codebase.files)} files and {len(codebase.functions)} functions.")
+ print("Creating graph...")
+
+ run(codebase)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/viz_dead_code.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/viz_dead_code.py
new file mode 100644
index 000000000..17e72a5a6
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/dependency_graph/viz_dead_code.py
@@ -0,0 +1,154 @@
+from abc import ABC
+
+import networkx as nx
+
+from codegen.sdk.core.codebase import CodebaseType
+from codegen.sdk.core.function import Function
+from codegen.sdk.core.import_resolution import Import
+from codegen.sdk.core.symbol import Symbol
+from codegen.shared.enums.programming_language import ProgrammingLanguage
+from tests.shared.skills.decorators import skill, skill_impl
+from tests.shared.skills.skill import Skill
+from tests.shared.skills.skill_test import SkillTestCase, SkillTestCasePyFile
+
+PyDeadCodeTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(
+ input="""
+# Live code
+def used_function():
+ return "I'm used!"
+
+class UsedClass:
+ def used_method(self):
+ return "I'm a used method!"
+
+# Dead code
+def unused_function():
+ return "I'm never called!"
+
+class UnusedClass:
+ def unused_method(self):
+ return "I'm never used!"
+
+# Second-order dead code
+def second_order_dead():
+ unused_function()
+ UnusedClass().unused_method()
+
+# More live code
+def another_used_function():
+ return used_function()
+
+# Main execution
+def main():
+ print(used_function())
+ print(UsedClass().used_method())
+ print(another_used_function())
+
+if __name__ == "__main__":
+ main()
+""",
+ filepath="example.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+# This file should be ignored by the DeadCode skill
+
+from example import used_function, UsedClass
+
+def test_used_function():
+ assert used_function() == "I'm used!"
+
+def test_used_class():
+ assert UsedClass().used_method() == "I'm a used method!"
+""",
+ filepath="test_example.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+# This file contains a decorated function that should be ignored
+
+from functools import lru_cache
+
+@lru_cache
+def cached_function():
+ return "I'm cached!"
+
+# This function is dead code but should be ignored due to decoration
+@deprecated
+def old_function():
+ return "I'm old but decorated!"
+
+# This function is dead code and should be detected
+def real_dead_code():
+ return "I'm really dead!"
+""",
+ filepath="decorated_functions.py",
+ ),
+ ],
+ graph=True,
+)
+
+
+@skill(
+ eval_skill=False,
+ prompt="Show me a visualization of the call graph from my_class and filter out test files and include only the methods that have the name post, get, patch, delete",
+ uid="ec5e98c9-b57f-43f8-8b3c-af1b30bb91e6",
+)
+class DeadCode(Skill, ABC):
+ """This skill shows a visualization of the dead code in the codebase.
+ It iterates through all functions in the codebase, identifying those
+ that have no usages and are not in test files or decorated. These functions
+ are considered 'dead code' and are added to a directed graph. The skill
+ then explores the dependencies of these dead code functions, adding them to
+ the graph as well. This process helps to identify not only directly unused code
+ but also code that might only be used by other dead code (second-order dead code).
+ The resulting visualization provides a clear picture of potentially removable code,
+ helping developers to clean up and optimize their codebase.
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[PyDeadCodeTest], language=ProgrammingLanguage.PYTHON)
+ @skill_impl(test_cases=[], skip_test=True, language=ProgrammingLanguage.TYPESCRIPT)
+ def skill_func(codebase: CodebaseType):
+ # Create a directed graph to visualize dead and second-order dead code
+ G = nx.DiGraph()
+
+ # First, identify all dead code
+ dead_code: list[Function] = []
+
+ # Iterate through all functions in the codebase
+ for function in codebase.functions:
+ # Filter down functions
+ if "test" in function.file.filepath:
+ continue
+
+ if function.decorators:
+ continue
+
+ # Check if the function has no usages
+ if not function.symbol_usages:
+ # Add the function to the dead code list
+ dead_code.append(function)
+ # Add the function to the graph as dead code
+ G.add_node(function, color="red")
+
+ # # Now, find second-order dead code
+ for symbol in dead_code:
+ # Get all usages of the dead code symbol
+ for dep in symbol.dependencies:
+ if isinstance(dep, Import):
+ dep = dep.imported_symbol
+ if isinstance(dep, Symbol):
+ if "test" not in dep.name:
+ G.add_node(dep)
+ G.add_edge(symbol, dep, color="red")
+ for usage_symbol in dep.symbol_usages:
+ if isinstance(usage_symbol, Function):
+ if "test" not in usage_symbol.name:
+ G.add_edge(usage_symbol, dep)
+
+ # Visualize the graph to show dead and second-order dead code
+ codebase.visualize(G)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/__init__.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/__init__.py
new file mode 100644
index 000000000..82dfcb765
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/__init__.py
@@ -0,0 +1,6 @@
+"""
+Structure Graph Visualization Module
+
+This module provides tools for visualizing code structure, directory trees, and database relationships.
+"""
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_dir_tree.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_dir_tree.py
new file mode 100644
index 000000000..67fe5e0a7
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_dir_tree.py
@@ -0,0 +1,111 @@
+from abc import ABC
+
+import networkx as nx
+
+from codegen.sdk.core.codebase import CodebaseType
+from codegen.shared.enums.programming_language import ProgrammingLanguage
+from tests.shared.skills.decorators import skill, skill_impl
+from tests.shared.skills.skill import Skill
+from tests.shared.skills.skill_test import SkillTestCase, SkillTestCasePyFile
+
+PyRepoDirTreeTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(input="# Root level file", filepath="README.md"),
+ SkillTestCasePyFile(input="# Configuration file", filepath="config.yaml"),
+ SkillTestCasePyFile(
+ input="""
+def main():
+ print("Hello, World!")
+
+if __name__ == "__main__":
+ main()
+""",
+ filepath="src/main.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+class User:
+ def __init__(self, name):
+ self.name = name
+""",
+ filepath="src/models/user.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+from src.models.user import User
+
+def create_user(name):
+ return User(name)
+""",
+ filepath="src/services/user_service.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+import unittest
+from src.models.user import User
+
+class TestUser(unittest.TestCase):
+ def test_user_creation(self):
+ user = User("Alice")
+ self.assertEqual(user.name, "Alice")
+""",
+ filepath="tests/test_user.py",
+ ),
+ SkillTestCasePyFile(
+ input="""
+{
+ "name": "my-project",
+ "version": "1.0.0",
+ "description": "A sample project"
+}
+""",
+ filepath="package.json",
+ ),
+ SkillTestCasePyFile(
+ input="""
+node_modules/
+*.log
+.DS_Store
+""",
+ filepath=".gitignore",
+ ),
+ ],
+ graph=True,
+)
+
+
+@skill(eval_skill=False, prompt="Show me the directory structure of this codebase", uid="ef9a5a54-d793-4749-992d-63ea3958056b")
+class RepoDirTree(Skill, ABC):
+ """This skill displays the directory or repository tree structure of a codebase. It analyzes the file paths within the codebase and constructs a hierarchical
+ representation of the directory structure. The skill creates a visual graph where each node represents a directory or file, and edges represent the parent-child
+ relationships between directories. This visualization helps developers understand the overall organization of the codebase, making it easier to navigate and
+ manage large projects. Additionally, it can be useful for identifying potential structural issues or inconsistencies in the project layout.
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[PyRepoDirTreeTest], language=ProgrammingLanguage.PYTHON)
+ @skill_impl(test_cases=[], skip_test=True, language=ProgrammingLanguage.TYPESCRIPT)
+ def skill_func(codebase: CodebaseType):
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # Iterate over all files in the codebase
+ for file in codebase.files:
+ # Get the full filepath
+ filepath = file.filepath
+ # Split the filepath into parts
+ parts = filepath.split("/")
+
+ # Add nodes and edges to the graph
+ for i in range(len(parts)):
+ # Create a path from the root to the current part
+ path = "/".join(parts[: i + 1])
+ # Add the node for the current directory
+ G.add_node(path)
+ # If it's not the root, add an edge from the parent directory to the current directory
+ if i > 0:
+ parent_path = "/".join(parts[:i])
+ G.add_edge(parent_path, path)
+
+ codebase.visualize(G)
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_foreign_key.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_foreign_key.py
new file mode 100644
index 000000000..1f453223b
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/structure_graph/graph_viz_foreign_key.py
@@ -0,0 +1,178 @@
+from abc import ABC
+
+import networkx as nx
+
+from codegen.sdk.core.codebase import CodebaseType
+from codegen.shared.enums.programming_language import ProgrammingLanguage
+from tests.shared.skills.decorators import skill, skill_impl
+from tests.shared.skills.skill import Skill
+from tests.shared.skills.skill_test import SkillTestCase, SkillTestCasePyFile
+
+PyForeignKeyGraphTest = SkillTestCase(
+ [
+ SkillTestCasePyFile(
+ input="""
+from sqlalchemy import Column, Integer, String, ForeignKey, BigInteger
+from app.models.base import BaseModel
+
+class UserModel(BaseModel):
+ __tablename__ = 'users'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(100), nullable=False)
+ email = Column(String(100), unique=True, nullable=False)
+
+class TaskModel(BaseModel):
+ __tablename__ = 'tasks'
+
+ id = Column(Integer, primary_key=True)
+ title = Column(String(200), nullable=False)
+ description = Column(String(500))
+ user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
+
+class CommentModel(BaseModel):
+ __tablename__ = 'comments'
+
+ id = Column(Integer, primary_key=True)
+ content = Column(String(500), nullable=False)
+ task_id = Column(Integer, ForeignKey("tasks.id", ondelete="CASCADE"), nullable=False)
+ user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
+
+class ProjectModel(BaseModel):
+ __tablename__ = 'projects'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(200), nullable=False)
+ description = Column(String(500))
+
+class TaskProjectModel(BaseModel):
+ __tablename__ = 'task_projects'
+
+ id = Column(Integer, primary_key=True)
+ task_id = Column(Integer, ForeignKey("tasks.id", ondelete="CASCADE"), nullable=False)
+ project_id = Column(Integer, ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
+
+class AgentRunModel(BaseModel):
+ __tablename__ = 'agent_runs'
+
+ id = Column(BigInteger, primary_key=True)
+ task_id = Column(BigInteger, ForeignKey("tasks.id", ondelete="CASCADE"), nullable=False)
+ agent_id = Column(BigInteger, ForeignKey("agents.id", ondelete="CASCADE"), nullable=False)
+
+class AgentModel(BaseModel):
+ __tablename__ = 'agents'
+
+ id = Column(BigInteger, primary_key=True)
+ name = Column(String(100), nullable=False)
+""",
+ filepath="app/models/schema.py",
+ )
+ ],
+ graph=True,
+)
+
+
+@skill(
+ eval_skill=False,
+ prompt="Help me analyze my data schema. I have a bunch of SQLAlchemy models with foreign keys to each other, all of them are classes like this that inherit BaseModel, like the one in this file.",
+ uid="2a5d8f4d-5f02-445e-9d00-77bdb9a0d268",
+)
+class ForeignKeyGraph(Skill, ABC):
+ """This skill helps analyze a data schema by creating a graph representation of SQLAlchemy models and their foreign key relationships.
+
+ It processes a collection of SQLAlchemy models with foreign keys referencing each other. All of these models are classes that inherit from BaseModel, similar to the one in this file. Foreign keys
+ are typically defined in the following format:
+ agent_run_id = Column(BigInteger, ForeignKey("AgentRun.id", ondelete="CASCADE"), nullable=False)
+
+ The skill iterates through all classes in the codebase, identifying those that are subclasses of BaseModel. For each relevant class, it examines the attributes to find ForeignKey definitions. It
+ then builds a mapping of these relationships.
+
+ Using this mapping, the skill constructs a directed graph where:
+ - Nodes represent the models (with the 'Model' suffix stripped from their names)
+ - Edges represent the foreign key relationships between models
+
+ This graph visualization allows for easy analysis of the data schema, showing how different models are interconnected through their foreign key relationships. The resulting graph can be used to
+ understand data dependencies, optimize queries, or refactor the database schema.
+ """
+
+ @staticmethod
+ @skill_impl(test_cases=[PyForeignKeyGraphTest], language=ProgrammingLanguage.PYTHON)
+ def skill_func(codebase: CodebaseType):
+ # Create a mapping dictionary to hold relationships
+ foreign_key_mapping = {}
+
+ # Iterate through all classes in the codebase
+ for cls in codebase.classes:
+ # Check if the class is a subclass of BaseModel and defined in the correct file
+ if cls.is_subclass_of("BaseModel") and "from app.models.base import BaseModel" in cls.file.content:
+ # Initialize an empty list for the current class
+ foreign_key_mapping[cls.name] = []
+
+ # Iterate through the attributes of the class
+ for attr in cls.attributes:
+ # Check if the attribute's source contains a ForeignKey definition
+ if "ForeignKey" in attr.source:
+ # Extract the table name from the ForeignKey string
+ start_index = attr.source.find('("') + 2
+ end_index = attr.source.find(".id", start_index)
+ if end_index != -1:
+ target_table = attr.source[start_index:end_index]
+ # Append the target table to the mapping, avoiding duplicates
+ if target_table not in foreign_key_mapping[cls.name]:
+ foreign_key_mapping[cls.name].append(target_table)
+
+ # Now foreign_key_mapping contains the desired relationships
+ # print(foreign_key_mapping)
+
+ # Create a directed graph
+ G = nx.DiGraph()
+
+ # Iterate through the foreign_key_mapping to add nodes and edges
+ for model, targets in foreign_key_mapping.items():
+ # Add the model node (strip 'Model' suffix)
+ model_name = model.replace("Model", "")
+ G.add_node(model_name)
+
+ # Add edges to the target tables
+ for target in targets:
+ G.add_node(target) # Ensure the target is also a node
+ G.add_edge(model_name, target)
+
+ # Now G contains the directed graph of models and their foreign key relationships
+ # You can visualize or analyze the graph as needed
+ codebase.visualize(G)
+
+ ##############################################################################################################
+ # IN DEGREE
+ ##############################################################################################################
+
+ # Calculate in-degrees for each node
+ in_degrees = G.in_degree()
+
+ # Create a list of nodes with their in-degree counts
+ in_degree_list = [(node, degree) for node, degree in in_degrees]
+
+ # Sort the list by in-degree in descending order
+ sorted_in_degrees = sorted(in_degree_list, key=lambda x: x[1], reverse=True)
+
+ # Print the nodes with their in-degrees
+ for node, degree in sorted_in_degrees:
+ print(f"Node: {node}, In-Degree: {degree}")
+ if degree == 0:
+ G.nodes[node]["color"] = "red"
+
+ ##############################################################################################################
+ # FIND MODELS MAPPING TO TASK
+ ##############################################################################################################
+
+ # Collect models that map to the Task model
+ models_mapping_to_task = []
+ for model, targets in foreign_key_mapping.items():
+ if "Task" in targets:
+ models_mapping_to_task.append(model)
+
+ # Print the models that map to Task
+ print("Models mapping to 'Task':")
+ for model in models_mapping_to_task:
+ print(f"> {model}")
+
diff --git a/codegen-on-oss/codegen_on_oss/analyzers/visualization/visualizer.py b/codegen-on-oss/codegen_on_oss/analyzers/visualization/visualizer.py
new file mode 100644
index 000000000..81f4f61be
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/analyzers/visualization/visualizer.py
@@ -0,0 +1,360 @@
+#!/usr/bin/env python3
+"""
+Core Visualization Module
+
+This module provides the base visualization capabilities for codebases and PR analyses.
+It defines the core classes and interfaces for generating visual representations
+of code structure, dependencies, and issues.
+"""
+
+import json
+import logging
+import os
+from dataclasses import dataclass, field
+from datetime import datetime
+from enum import Enum
+from typing import Any
+
+try:
+ import matplotlib.pyplot as plt
+ import networkx as nx
+ from matplotlib.colors import LinearSegmentedColormap
+except ImportError:
+ logging.warning(
+ "Visualization dependencies not found. Please install them with: pip install networkx matplotlib"
+ )
+
+
+class VisualizationType(str, Enum):
+ """Types of visualizations supported by this module."""
+
+ CALL_GRAPH = "call_graph"
+ DEPENDENCY_GRAPH = "dependency_graph"
+ BLAST_RADIUS = "blast_radius"
+ CLASS_METHODS = "class_methods"
+ MODULE_DEPENDENCIES = "module_dependencies"
+ DEAD_CODE = "dead_code"
+ CYCLOMATIC_COMPLEXITY = "cyclomatic_complexity"
+ ISSUES_HEATMAP = "issues_heatmap"
+ PR_COMPARISON = "pr_comparison"
+
+
+class OutputFormat(str, Enum):
+ """Output formats for visualizations."""
+
+ JSON = "json"
+ PNG = "png"
+ SVG = "svg"
+ HTML = "html"
+ DOT = "dot"
+
+
+@dataclass
+class VisualizationConfig:
+ """Configuration for visualization generation."""
+
+ max_depth: int = 5
+ ignore_external: bool = True
+ ignore_tests: bool = True
+ node_size_base: int = 300
+ edge_width_base: float = 1.0
+ filename_filter: list[str] | None = None
+ symbol_filter: list[str] | None = None
+ output_format: OutputFormat = OutputFormat.JSON
+ output_directory: str | None = None
+ layout_algorithm: str = "spring"
+ highlight_nodes: list[str] = field(default_factory=list)
+ highlight_color: str = "#ff5555"
+ color_palette: dict[str, str] = field(
+ default_factory=lambda: {
+ "Function": "#a277ff", # Purple
+ "Class": "#ffca85", # Orange
+ "File": "#80CBC4", # Teal
+ "Module": "#81D4FA", # Light Blue
+ "Variable": "#B39DDB", # Light Purple
+ "Root": "#ef5350", # Red
+ "Warning": "#FFCA28", # Amber
+ "Error": "#EF5350", # Red
+ "Dead": "#78909C", # Gray
+ "External": "#B0BEC5", # Light Gray
+ }
+ )
+
+
+class BaseVisualizer:
+ """
+ Base visualizer providing common functionality for different visualization types.
+
+ This class implements the core operations needed for visualization, including
+ graph creation, node and edge management, and output generation.
+ """
+
+ def __init__(self, config: VisualizationConfig | None = None):
+ """
+ Initialize the BaseVisualizer.
+
+ Args:
+ config: Visualization configuration options
+ """
+ self.config = config or VisualizationConfig()
+
+ # Create visualization directory if specified
+ if self.config.output_directory:
+ os.makedirs(self.config.output_directory, exist_ok=True)
+
+ # Initialize graph for visualization
+ self.graph = nx.DiGraph()
+
+ # Tracking current visualization
+ self.current_visualization_type = None
+ self.current_entity_name = None
+
+ def _initialize_graph(self):
+ """Initialize a fresh graph for visualization."""
+ self.graph = nx.DiGraph()
+
+ def _add_node(self, node: Any, **attrs):
+ """
+ Add a node to the visualization graph with attributes.
+
+ Args:
+ node: Node object to add
+ **attrs: Node attributes
+ """
+ # Skip if node already exists
+ if self.graph.has_node(node):
+ return
+
+ # Generate node ID (memory address for unique identification)
+ node_id = id(node)
+
+ # Get node name
+ if "name" in attrs:
+ node_name = attrs["name"]
+ elif hasattr(node, "name"):
+ node_name = node.name
+ elif hasattr(node, "path"):
+ node_name = str(node.path).split("/")[-1]
+ else:
+ node_name = str(node)
+
+ # Determine node type and color
+ node_type = node.__class__.__name__
+ color = attrs.get("color", self.config.color_palette.get(node_type, "#BBBBBB"))
+
+ # Add node with attributes
+ self.graph.add_node(
+ node_id,
+ original_node=node,
+ name=node_name,
+ type=node_type,
+ color=color,
+ **attrs,
+ )
+
+ return node_id
+
+ def _add_edge(self, source: Any, target: Any, **attrs):
+ """
+ Add an edge to the visualization graph with attributes.
+
+ Args:
+ source: Source node
+ target: Target node
+ **attrs: Edge attributes
+ """
+ # Get node IDs
+ source_id = id(source)
+ target_id = id(target)
+
+ # Add edge with attributes
+ self.graph.add_edge(source_id, target_id, **attrs)
+
+ def _generate_filename(
+ self, visualization_type: VisualizationType, entity_name: str
+ ):
+ """
+ Generate a filename for the visualization.
+
+ Args:
+ visualization_type: Type of visualization
+ entity_name: Name of the entity being visualized
+
+ Returns:
+ Generated filename
+ """
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ sanitized_name = (
+ entity_name.replace("/", "_").replace("\\", "_").replace(".", "_")
+ )
+ return f"{visualization_type.value}_{sanitized_name}_{timestamp}.{self.config.output_format.value}"
+
+ def _save_visualization(
+ self, visualization_type: VisualizationType, entity_name: str, data: Any
+ ):
+ """
+ Save a visualization to file or return it.
+
+ Args:
+ visualization_type: Type of visualization
+ entity_name: Name of the entity being visualized
+ data: Visualization data to save
+
+ Returns:
+ Path to saved file or visualization data
+ """
+ self.current_visualization_type = visualization_type
+ self.current_entity_name = entity_name
+
+ filename = self._generate_filename(visualization_type, entity_name)
+
+ if self.config.output_directory:
+ filepath = os.path.join(self.config.output_directory, filename)
+ else:
+ filepath = filename
+
+ if self.config.output_format == OutputFormat.JSON:
+ with open(filepath, "w") as f:
+ json.dump(data, f, indent=2)
+ elif self.config.output_format in [OutputFormat.PNG, OutputFormat.SVG]:
+ # Save matplotlib figure
+ plt.savefig(
+ filepath, format=self.config.output_format.value, bbox_inches="tight"
+ )
+ plt.close()
+ elif self.config.output_format == OutputFormat.DOT:
+ # Save as DOT file for Graphviz
+ try:
+ from networkx.drawing.nx_agraph import write_dot
+
+ write_dot(self.graph, filepath)
+ except ImportError:
+ logging.exception(
+ "networkx.drawing.nx_agraph not available. Install pygraphviz for DOT format."
+ )
+ return None
+
+ logging.info(f"Visualization saved to {filepath}")
+ return filepath
+
+ def _convert_graph_to_json(self):
+ """
+ Convert the networkx graph to a JSON-serializable dictionary.
+
+ Returns:
+ Dictionary representation of the graph
+ """
+ nodes = []
+ for node, attrs in self.graph.nodes(data=True):
+ # Create a serializable node
+ node_data = {
+ "id": node,
+ "name": attrs.get("name", ""),
+ "type": attrs.get("type", ""),
+ "color": attrs.get("color", "#BBBBBB"),
+ }
+
+ # Add file path if available
+ if "file_path" in attrs:
+ node_data["file_path"] = attrs["file_path"]
+
+ # Add other attributes
+ for key, value in attrs.items():
+ if key not in ["name", "type", "color", "file_path", "original_node"]:
+ if (
+ isinstance(value, str | int | float | bool | list | dict)
+ or value is None
+ ):
+ node_data[key] = value
+
+ nodes.append(node_data)
+
+ edges = []
+ for source, target, attrs in self.graph.edges(data=True):
+ # Create a serializable edge
+ edge_data = {
+ "source": source,
+ "target": target,
+ }
+
+ # Add other attributes
+ for key, value in attrs.items():
+ if (
+ isinstance(value, str | int | float | bool | list | dict)
+ or value is None
+ ):
+ edge_data[key] = value
+
+ edges.append(edge_data)
+
+ return {
+ "nodes": nodes,
+ "edges": edges,
+ "metadata": {
+ "visualization_type": self.current_visualization_type,
+ "entity_name": self.current_entity_name,
+ "timestamp": datetime.now().isoformat(),
+ "node_count": len(nodes),
+ "edge_count": len(edges),
+ },
+ }
+
+ def _plot_graph(self):
+ """
+ Plot the graph using matplotlib.
+
+ Returns:
+ Matplotlib figure
+ """
+ plt.figure(figsize=(12, 10))
+
+ # Extract node positions using specified layout algorithm
+ if self.config.layout_algorithm == "spring":
+ pos = nx.spring_layout(self.graph, seed=42)
+ elif self.config.layout_algorithm == "kamada_kawai":
+ pos = nx.kamada_kawai_layout(self.graph)
+ elif self.config.layout_algorithm == "spectral":
+ pos = nx.spectral_layout(self.graph)
+ else:
+ # Default to spring layout
+ pos = nx.spring_layout(self.graph, seed=42)
+
+ # Extract node colors
+ node_colors = [
+ attrs.get("color", "#BBBBBB") for _, attrs in self.graph.nodes(data=True)
+ ]
+
+ # Extract node sizes (can be based on some metric)
+ node_sizes = [self.config.node_size_base for _ in self.graph.nodes()]
+
+ # Draw nodes
+ nx.draw_networkx_nodes(
+ self.graph, pos, node_color=node_colors, node_size=node_sizes, alpha=0.8
+ )
+
+ # Draw edges
+ nx.draw_networkx_edges(
+ self.graph,
+ pos,
+ width=self.config.edge_width_base,
+ alpha=0.6,
+ arrows=True,
+ arrowsize=10,
+ )
+
+ # Draw labels
+ nx.draw_networkx_labels(
+ self.graph,
+ pos,
+ labels={
+ node: attrs.get("name", "")
+ for node, attrs in self.graph.nodes(data=True)
+ },
+ font_size=8,
+ font_weight="bold",
+ )
+
+ plt.title(f"{self.current_visualization_type} - {self.current_entity_name}")
+ plt.axis("off")
+
+ return plt.gcf()
diff --git a/codegen-on-oss/codegen_on_oss/bucket_store.py b/codegen-on-oss/codegen_on_oss/bucket_store.py
new file mode 100644
index 000000000..f068fd691
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/bucket_store.py
@@ -0,0 +1,26 @@
+from datetime import datetime
+from importlib.metadata import version
+from typing import TYPE_CHECKING
+
+from boto3 import client
+
+if TYPE_CHECKING:
+ from types_boto3_s3 import S3Client
+
+
+class BucketStore:
+ s3_client: "S3Client"
+
+ def __init__(self, bucket_name: str):
+ self.bucket_name = bucket_name
+ self.s3_client = client("s3")
+ self.key_prefix: str = str(version("codegen"))
+
+ def upload_file(self, local_path: str, remote_path: str) -> str:
+ key = f"{self.key_prefix}/{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}/{remote_path}"
+ self.s3_client.upload_file(
+ local_path,
+ self.bucket_name,
+ key,
+ )
+ return key
diff --git a/codegen-on-oss/codegen_on_oss/cache.py b/codegen-on-oss/codegen_on_oss/cache.py
new file mode 100644
index 000000000..6f1346a98
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/cache.py
@@ -0,0 +1,5 @@
+from pathlib import Path
+
+from platformdirs import user_cache_dir
+
+cachedir = Path(user_cache_dir("codegen-on-oss", "codegen"))
diff --git a/codegen-on-oss/codegen_on_oss/cli.py b/codegen-on-oss/codegen_on_oss/cli.py
new file mode 100644
index 000000000..c1807d13e
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/cli.py
@@ -0,0 +1,128 @@
+import sys
+from pathlib import Path
+
+import click
+from loguru import logger
+
+from codegen_on_oss.cache import cachedir
+from codegen_on_oss.metrics import MetricsProfiler
+from codegen_on_oss.outputs.csv_output import CSVOutput
+from codegen_on_oss.parser import CodegenParser
+from codegen_on_oss.sources import RepoSource, all_sources
+
+logger.remove(0)
+
+
+@click.group()
+def cli():
+ pass
+
+
+@cli.command(name="run-one")
+@click.argument("url", type=str)
+@click.option(
+ "--cache-dir",
+ type=click.Path(dir_okay=True),
+ help="Cache directory",
+ default=cachedir,
+)
+@click.option(
+ "--output-path",
+ type=click.Path(dir_okay=True),
+ help="Output path",
+ default="metrics.csv",
+)
+@click.option(
+ "--commit-hash",
+ type=str,
+ help="Commit hash to parse",
+)
+@click.option(
+ "--error-output-path",
+ type=click.Path(dir_okay=True),
+ help="Error output path",
+ default=cachedir / "errors.log",
+)
+@click.option(
+ "--debug",
+ is_flag=True,
+ help="Debug mode",
+)
+def run_one(
+ url: str,
+ cache_dir: str | Path = str(cachedir),
+ output_path: str = "metrics.csv",
+ commit_hash: str | None = None,
+ error_output_path: Path = str(cachedir / "errors.log"),
+ debug: bool = False,
+):
+ """
+ Parse a repository with codegen
+ """
+ logger.add(error_output_path, level="ERROR")
+ logger.add(sys.stdout, level="DEBUG" if debug else "INFO")
+ output = CSVOutput(MetricsProfiler.fields(), output_path)
+ metrics_profiler = MetricsProfiler(output)
+
+ parser = CodegenParser(Path(cache_dir) / "repositories", metrics_profiler)
+ parser.parse(url, commit_hash)
+
+
+@cli.command()
+@click.option(
+ "--source",
+ type=click.Choice(list(all_sources.keys())),
+ default="csv",
+)
+@click.option(
+ "--output-path",
+ type=click.Path(dir_okay=True),
+ help="Output path",
+ default="metrics.csv",
+)
+@click.option(
+ "--error-output-path",
+ type=click.Path(dir_okay=True),
+ help="Error output path",
+ default="errors.log",
+)
+@click.option(
+ "--cache-dir",
+ type=click.Path(dir_okay=True),
+ help="Cache directory",
+ default=cachedir,
+)
+@click.option(
+ "--debug",
+ is_flag=True,
+ help="Debug mode",
+)
+def run(
+ source: str,
+ output_path: str,
+ error_output_path: str,
+ cache_dir: str,
+ debug: bool,
+):
+ """
+ Run codegen parsing pipeline on repositories from a given repository source.
+ """
+ logger.add(
+ error_output_path, format="{time: HH:mm:ss} {level} {message}", level="ERROR"
+ )
+ logger.add(
+ sys.stdout,
+ format="{time: HH:mm:ss} {level} {message}",
+ level="DEBUG" if debug else "INFO",
+ )
+
+ repo_source = RepoSource.from_source_type(source)
+ output = CSVOutput(MetricsProfiler.fields(), output_path)
+ metrics_profiler = MetricsProfiler(output)
+ parser = CodegenParser(Path(cache_dir) / "repositories", metrics_profiler)
+ for repo_url, commit_hash in repo_source:
+ parser.parse(repo_url, commit_hash)
+
+
+if __name__ == "__main__":
+ cli()
diff --git a/codegen-on-oss/codegen_on_oss/errors.py b/codegen-on-oss/codegen_on_oss/errors.py
new file mode 100644
index 000000000..7e00dc085
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/errors.py
@@ -0,0 +1,6 @@
+class ParseRunError(Exception):
+ pass
+
+
+class PostValidationError(ParseRunError):
+ pass
diff --git a/codegen-on-oss/codegen_on_oss/frontend/app/favicon.ico b/codegen-on-oss/codegen_on_oss/frontend/app/favicon.ico
new file mode 100644
index 000000000..fd8587746
Binary files /dev/null and b/codegen-on-oss/codegen_on_oss/frontend/app/favicon.ico differ
diff --git a/codegen-on-oss/codegen_on_oss/frontend/app/globals.css b/codegen-on-oss/codegen_on_oss/frontend/app/globals.css
new file mode 100644
index 000000000..da39a2b85
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/app/globals.css
@@ -0,0 +1,94 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+body {
+ font-family: 'Inter', sans-serif;
+}
+
+@layer utilities {
+ .text-balance {
+ text-wrap: balance;
+ }
+}
+
+@layer base {
+ :root {
+ --background: 0 0% 100%;
+ --foreground: 0 0% 3.9%;
+ --card: 0 0% 100%;
+ --card-foreground: 0 0% 3.9%;
+ --popover: 0 0% 100%;
+ --popover-foreground: 0 0% 3.9%;
+ --primary: 0 0% 9%;
+ --primary-foreground: 0 0% 98%;
+ --secondary: 0 0% 96.1%;
+ --secondary-foreground: 0 0% 9%;
+ --muted: 0 0% 96.1%;
+ --muted-foreground: 0 0% 45.1%;
+ --accent: 0 0% 96.1%;
+ --accent-foreground: 0 0% 9%;
+ --destructive: 0 84.2% 60.2%;
+ --destructive-foreground: 0 0% 98%;
+ --border: 0 0% 89.8%;
+ --input: 0 0% 89.8%;
+ --ring: 0 0% 3.9%;
+ --chart-1: 12 76% 61%;
+ --chart-2: 173 58% 39%;
+ --chart-3: 197 37% 24%;
+ --chart-4: 43 74% 66%;
+ --chart-5: 27 87% 67%;
+ --radius: 0.5rem;
+ --sidebar-background: 0 0% 98%;
+ --sidebar-foreground: 240 5.3% 26.1%;
+ --sidebar-primary: 240 5.9% 10%;
+ --sidebar-primary-foreground: 0 0% 98%;
+ --sidebar-accent: 240 4.8% 95.9%;
+ --sidebar-accent-foreground: 240 5.9% 10%;
+ --sidebar-border: 220 13% 91%;
+ --sidebar-ring: 217.2 91.2% 59.8%;
+ }
+ .dark {
+ --background: 0 0% 3.9%;
+ --foreground: 0 0% 98%;
+ --card: 0 0% 3.9%;
+ --card-foreground: 0 0% 98%;
+ --popover: 0 0% 3.9%;
+ --popover-foreground: 0 0% 98%;
+ --primary: 0 0% 98%;
+ --primary-foreground: 0 0% 9%;
+ --secondary: 0 0% 14.9%;
+ --secondary-foreground: 0 0% 98%;
+ --muted: 0 0% 14.9%;
+ --muted-foreground: 0 0% 63.9%;
+ --accent: 0 0% 14.9%;
+ --accent-foreground: 0 0% 98%;
+ --destructive: 0 62.8% 30.6%;
+ --destructive-foreground: 0 0% 98%;
+ --border: 0 0% 14.9%;
+ --input: 0 0% 14.9%;
+ --ring: 0 0% 83.1%;
+ --chart-1: 220 70% 50%;
+ --chart-2: 160 60% 45%;
+ --chart-3: 30 80% 55%;
+ --chart-4: 280 65% 60%;
+ --chart-5: 340 75% 55%;
+ --sidebar-background: 240 5.9% 10%;
+ --sidebar-foreground: 240 4.8% 95.9%;
+ --sidebar-primary: 224.3 76.3% 48%;
+ --sidebar-primary-foreground: 0 0% 100%;
+ --sidebar-accent: 240 3.7% 15.9%;
+ --sidebar-accent-foreground: 240 4.8% 95.9%;
+ --sidebar-border: 240 3.7% 15.9%;
+ --sidebar-ring: 217.2 91.2% 59.8%;
+ }
+}
+
+@layer base {
+ * {
+ @apply border-border;
+ }
+ body {
+ @apply bg-background text-foreground;
+ }
+}
diff --git a/codegen-on-oss/codegen_on_oss/frontend/app/layout.tsx b/codegen-on-oss/codegen_on_oss/frontend/app/layout.tsx
new file mode 100644
index 000000000..0458c8b53
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/app/layout.tsx
@@ -0,0 +1,30 @@
+import "@/styles/globals.css"
+import type { Metadata } from "next"
+import type React from "react" // Import React
+
+import { ThemeProvider } from "@/components/theme-provider"
+
+export const metadata: Metadata = {
+ title: "Codebase Analytics Dashboard",
+ description: "Analytics dashboard for public GitHub repositories",
+}
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode
+}) {
+ return (
+
+
+
+ {children}
+
+
+
+ )
+}
+
+
+
+import './globals.css'
\ No newline at end of file
diff --git a/codegen-on-oss/codegen_on_oss/frontend/app/page.tsx b/codegen-on-oss/codegen_on_oss/frontend/app/page.tsx
new file mode 100644
index 000000000..04141cce1
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/app/page.tsx
@@ -0,0 +1,12 @@
+import type { Metadata } from "next"
+import RepoAnalyticsDashboard from "@/components/repo-analytics-dashboard"
+
+export const metadata: Metadata = {
+ title: "Codebase Analytics",
+ description: "Analytics dashboard for public GitHub repositories",
+}
+
+export default function Page() {
+ return
+}
+
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components.json b/codegen-on-oss/codegen_on_oss/frontend/components.json
new file mode 100644
index 000000000..d9ef0ae53
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components.json
@@ -0,0 +1,21 @@
+{
+ "$schema": "https://ui.shadcn.com/schema.json",
+ "style": "default",
+ "rsc": true,
+ "tsx": true,
+ "tailwind": {
+ "config": "tailwind.config.ts",
+ "css": "app/globals.css",
+ "baseColor": "neutral",
+ "cssVariables": true,
+ "prefix": ""
+ },
+ "aliases": {
+ "components": "@/components",
+ "utils": "@/lib/utils",
+ "ui": "@/components/ui",
+ "lib": "@/lib",
+ "hooks": "@/hooks"
+ },
+ "iconLibrary": "lucide"
+}
\ No newline at end of file
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/repo-analytics-dashboard.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/repo-analytics-dashboard.tsx
new file mode 100644
index 000000000..5f9da593e
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/repo-analytics-dashboard.tsx
@@ -0,0 +1,443 @@
+"use client"
+
+import { useState } from "react"
+import { BarChart3, Code2, FileCode2, GitBranch, Github, Settings, MessageSquare, FileText, Code, RefreshCcw, PaintBucket, Brain } from "lucide-react"
+import { Bar, BarChart, ResponsiveContainer, XAxis, YAxis } from "recharts"
+
+import { Button } from "@/components/ui/button"
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
+import { Input } from "@/components/ui/input"
+
+const mockRepoData = {
+ name: "vercel/next.js",
+ description: "The React Framework for the Web",
+ linesOfCode: 154321,
+ cyclomaticComplexity: 15.7,
+ depthOfInheritance: 3.2,
+ halsteadVolume: 987654,
+ maintainabilityIndex: 85,
+ commentDensity: 18.5,
+ sloc: 132456,
+ lloc: 98765,
+ numberOfFiles: 1200,
+ numberOfFunctions: 4500,
+ numberOfClasses: 300,
+}
+
+const mockCommitData = [
+ { month: "October", commits: 130 },
+ { month: "September", commits: 150 },
+ { month: "August", commits: 120 },
+ { month: "July", commits: 110 },
+ { month: "June", commits: 140 },
+ { month: "May", commits: 160 },
+ { month: "April", commits: 170 },
+ { month: "March", commits: 180 },
+ { month: "February", commits: 190 },
+ { month: "January", commits: 200 },
+ { month: "December", commits: 210 },
+ { month: "November", commits: 220 },
+];
+
+interface RepoAnalyticsResponse {
+ repo_url: string;
+ line_metrics: {
+ total: {
+ loc: number;
+ lloc: number;
+ sloc: number;
+ comments: number;
+ comment_density: number;
+ }
+ };
+ cyclomatic_complexity: { average: number };
+ depth_of_inheritance: { average: number };
+ halstead_metrics: {
+ total_volume: number;
+ average_volume: number;
+ };
+ maintainability_index: { average: number };
+ description: string;
+ num_files: number;
+ num_functions: number;
+ num_classes: number;
+ monthly_commits: Record;
+}
+
+interface RepoData {
+ name: string;
+ description: string;
+ linesOfCode: number;
+ cyclomaticComplexity: number;
+ depthOfInheritance: number;
+ halsteadVolume: number;
+ maintainabilityIndex: number;
+ commentDensity: number;
+ sloc: number;
+ lloc: number;
+ numberOfFiles: number;
+ numberOfFunctions: number;
+ numberOfClasses: number;
+}
+
+export default function RepoAnalyticsDashboard() {
+ const [repoUrl, setRepoUrl] = useState("")
+ const [repoData, setRepoData] = useState(mockRepoData)
+ const [hoveredCard, setHoveredCard] = useState(null)
+ const [commitData, setCommitData] = useState(mockCommitData)
+ const [isLoading, setIsLoading] = useState(false)
+ const [isLandingPage, setIsLandingPage] = useState(true)
+
+ const parseRepoUrl = (input: string): string => {
+ if (input.includes('github.com')) {
+ const url = new URL(input);
+ const pathParts = url.pathname.split('/').filter(Boolean);
+ if (pathParts.length >= 2) {
+ return `${pathParts[0]}/${pathParts[1]}`;
+ }
+ }
+ return input;
+ };
+
+ const handleFetchRepo = async () => {
+ console.log("Fetching repo data...");
+
+ const parsedRepoUrl = parseRepoUrl(repoUrl);
+ console.log(parsedRepoUrl);
+
+ setIsLoading(true);
+ setIsLandingPage(false);
+
+ try {
+ console.log("Fetching repo data...");
+ // https://codegen-sh-staging--analytics-app-fastapi-modal-app.modal.run/analyze_repo
+ // https://codegen-sh-staging--analytics-app-fastapi-modal-app-dev.modal.run/analyze_repo
+ const response = await fetch('https://codegen-sh--analytics-app-fastapi-modal-app.modal.run/analyze_repo', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ },
+ body: JSON.stringify({ repo_url: parsedRepoUrl }),
+ });
+
+ if (!response.ok) {
+ throw new Error(`HTTP error! status: ${response.status}`);
+ }
+
+ const data: RepoAnalyticsResponse = await response.json();
+
+ setRepoData({
+ name: parsedRepoUrl,
+ description: data.description,
+ linesOfCode: data.line_metrics.total.loc,
+ cyclomaticComplexity: data.cyclomatic_complexity.average,
+ depthOfInheritance: data.depth_of_inheritance.average,
+ halsteadVolume: data.halstead_metrics.total_volume,
+ maintainabilityIndex: data.maintainability_index.average,
+ commentDensity: data.line_metrics.total.comment_density,
+ sloc: data.line_metrics.total.sloc,
+ lloc: data.line_metrics.total.lloc,
+ numberOfFiles: data.num_files,
+ numberOfFunctions: data.num_functions,
+ numberOfClasses: data.num_classes,
+ });
+
+ const transformedCommitData = Object.entries(data.monthly_commits)
+ .map(([date, commits]) => ({
+ month: new Date(date).toLocaleString('default', { month: 'long' }),
+ commits,
+ }))
+ .slice(0, 12)
+ .reverse();
+
+ setCommitData(transformedCommitData);
+ } catch (error) {
+ console.error('Error fetching repo data:', error);
+ alert('Error fetching repository data. Please check the URL and try again.');
+ setIsLandingPage(true);
+ } finally {
+ setIsLoading(false);
+ }
+ };
+
+ const handleMouseEnter = (cardName: string) => {
+ setHoveredCard(cardName)
+ }
+
+ const handleMouseLeave = () => {
+ setHoveredCard(null)
+ }
+
+ const handleKeyPress = (e: React.KeyboardEvent) => {
+ if (e.key === 'Enter') {
+ handleFetchRepo();
+ }
+ }
+
+function calculateCodebaseGrade(data: RepoData) {
+ const { maintainabilityIndex } = data;
+
+ if (maintainabilityIndex >= 90) return 'A+';
+ if (maintainabilityIndex >= 85) return 'A';
+ if (maintainabilityIndex >= 80) return 'A-';
+ if (maintainabilityIndex >= 75) return 'B+';
+ if (maintainabilityIndex >= 70) return 'B';
+ if (maintainabilityIndex >= 65) return 'B-';
+ if (maintainabilityIndex >= 60) return 'C+';
+ if (maintainabilityIndex >= 55) return 'C';
+ if (maintainabilityIndex >= 50) return 'C-';
+ if (maintainabilityIndex >= 45) return 'D+';
+ if (maintainabilityIndex >= 40) return 'D';
+ return 'F';
+}
+
+
+
+
+ return (
+
+ {isLandingPage ? (
+
+
+
+
+ Codebase Analytics
+
+
Effortlessly calculate GitHub repository metrics in seconds
+
+
+ setRepoUrl(e.target.value)}
+ onKeyPress={handleKeyPress}
+ className="flex-1"
+ title="Format: https://github.com/owner/repo or owner/repo"
+ />
+
+ {isLoading ? "Loading..." : "Analyze"}
+
+
+
+
+ ) : isLoading ? (
+
+
+
Analyzing Repository
+
Please wait while we calculate codebase metrics with Codegen...
+
+
+
+ ) : (
+
+
+
+
+
+
window.location.reload()}
+ >
+
+ Codebase Analytics
+
+
+
+ setRepoUrl(e.target.value)}
+ onKeyPress={handleKeyPress}
+ className="w-[320px]"
+ title="Format: https://github.com/owner/repo or owner/repo"
+ />
+
+ {isLoading ? "Loading..." : "Analyze"}
+
+
+
+
+
+
+
+
+
+ Repository
+
+
+
+
+ {repoData.name}
+ {repoData.description}
+
+
+
+
+ {repoData.numberOfFiles.toLocaleString()} Files
+
+
+
+ {repoData.numberOfFunctions.toLocaleString()} Functions
+
+
+
+ {repoData.numberOfClasses.toLocaleString()} Classes
+
+
+
+
+
+
+
handleMouseEnter('Maintainability Index')} onMouseLeave={handleMouseLeave}>
+
+ Maintainability Index
+
+
+
+ {repoData.maintainabilityIndex}
+
+ {hoveredCard === 'Maintainability Index' ? 'This evaluates how easy it is to understand, modify, and maintain a codebase (ranging from 0 to 100).' : 'Code maintainability score (0-100)'}
+
+
+
+
handleMouseEnter('Cyclomatic Complexity')} onMouseLeave={handleMouseLeave}>
+
+ Cyclomatic Complexity
+
+
+
+ {repoData.cyclomaticComplexity.toFixed(1)}
+
+ {hoveredCard === 'Cyclomatic Complexity' ? 'This measures the number of independent paths through a program\'s source code' : 'Average complexity score'}
+
+
+
+
handleMouseEnter('Halstead Volume')} onMouseLeave={handleMouseLeave}>
+
+ Halstead Volume
+
+
+
+ {repoData.halsteadVolume.toLocaleString()}
+
+ {hoveredCard === 'Halstead Volume' ? 'This quantifies the amount of information in a program by measuring the size and complexity of its code using operators and operands.' : 'Code volume metric'}
+
+
+
+
handleMouseEnter('Depth of Inheritance')} onMouseLeave={handleMouseLeave}>
+
+ Depth of Inheritance
+
+
+
+ {repoData.depthOfInheritance.toFixed(1)}
+
+ {hoveredCard === 'Depth of Inheritance' ? 'This is the average measure of the number of classes that a class inherits from.' : 'Average inheritance depth'}
+
+
+
+
handleMouseEnter('Lines of Code')} onMouseLeave={handleMouseLeave}>
+
+ Lines of Code
+
+
+
+ {repoData.linesOfCode.toLocaleString()}
+
+ {hoveredCard === 'Lines of Code' ? 'This is the total number of lines of code within this codebase.' : 'Total lines in the repository'}
+
+
+
+
handleMouseEnter('SLOC')} onMouseLeave={handleMouseLeave}>
+
+ SLOC
+
+
+
+ {repoData.sloc.toLocaleString()}
+
+ {hoveredCard === 'SLOC' ? 'This is the number of textual lines of code within the codebase, ignoring whitespace and comments.' : 'Source Lines of Code'}
+
+
+
+
handleMouseEnter('LLOC')} onMouseLeave={handleMouseLeave}>
+
+ LLOC
+
+
+
+ {repoData.lloc.toLocaleString()}
+
+ {hoveredCard === 'LLOC' ? 'This is the number of lines of code that contribute to executable statements in the codebase.' : 'Logical Lines of Code'}
+
+
+
+
handleMouseEnter('Comment Density')} onMouseLeave={handleMouseLeave}>
+
+ Comment Density
+
+
+
+ {repoData.commentDensity.toFixed(1)}%
+
+ {hoveredCard === 'Comment Density' ? 'This is the percentage of the lines in the codebase that are comments.' : 'Percentage of comments in code'}
+
+
+
+
+
+
+ Monthly Commits
+ Number of commits, batched by month over the past year
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Codebase Grade
+ Overall grade based on code metrics
+
+
+ {calculateCodebaseGrade(repoData)}
+
+
+
+
+
+
+ Codebase Complexity
+ Judgment based on size and complexity
+
+
+ {repoData.numberOfFiles > 1000 ? "Large" : "Moderate"}
+
+
+
+
+
+
+
+ )}
+
+ )
+}
\ No newline at end of file
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/theme-provider.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/theme-provider.tsx
new file mode 100644
index 000000000..32797a7b5
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/theme-provider.tsx
@@ -0,0 +1,8 @@
+"use client"
+import { ThemeProvider as NextThemesProvider } from "next-themes"
+import type { ThemeProviderProps } from "next-themes"
+
+export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
+ return {children}
+}
+
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/accordion.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/accordion.tsx
new file mode 100644
index 000000000..24c788c2c
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/accordion.tsx
@@ -0,0 +1,58 @@
+"use client"
+
+import * as React from "react"
+import * as AccordionPrimitive from "@radix-ui/react-accordion"
+import { ChevronDown } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+
+const Accordion = AccordionPrimitive.Root
+
+const AccordionItem = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AccordionItem.displayName = "AccordionItem"
+
+const AccordionTrigger = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, ...props }, ref) => (
+
+ svg]:rotate-180",
+ className
+ )}
+ {...props}
+ >
+ {children}
+
+
+
+))
+AccordionTrigger.displayName = AccordionPrimitive.Trigger.displayName
+
+const AccordionContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, children, ...props }, ref) => (
+
+ {children}
+
+))
+
+AccordionContent.displayName = AccordionPrimitive.Content.displayName
+
+export { Accordion, AccordionItem, AccordionTrigger, AccordionContent }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert-dialog.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert-dialog.tsx
new file mode 100644
index 000000000..25e7b4744
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert-dialog.tsx
@@ -0,0 +1,141 @@
+"use client"
+
+import * as React from "react"
+import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog"
+
+import { cn } from "@/lib/utils"
+import { buttonVariants } from "@/components/ui/button"
+
+const AlertDialog = AlertDialogPrimitive.Root
+
+const AlertDialogTrigger = AlertDialogPrimitive.Trigger
+
+const AlertDialogPortal = AlertDialogPrimitive.Portal
+
+const AlertDialogOverlay = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName
+
+const AlertDialogContent = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+
+
+
+))
+AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName
+
+const AlertDialogHeader = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => (
+
+)
+AlertDialogHeader.displayName = "AlertDialogHeader"
+
+const AlertDialogFooter = ({
+ className,
+ ...props
+}: React.HTMLAttributes) => (
+
+)
+AlertDialogFooter.displayName = "AlertDialogFooter"
+
+const AlertDialogTitle = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName
+
+const AlertDialogDescription = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AlertDialogDescription.displayName =
+ AlertDialogPrimitive.Description.displayName
+
+const AlertDialogAction = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName
+
+const AlertDialogCancel = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName
+
+export {
+ AlertDialog,
+ AlertDialogPortal,
+ AlertDialogOverlay,
+ AlertDialogTrigger,
+ AlertDialogContent,
+ AlertDialogHeader,
+ AlertDialogFooter,
+ AlertDialogTitle,
+ AlertDialogDescription,
+ AlertDialogAction,
+ AlertDialogCancel,
+}
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert.tsx
new file mode 100644
index 000000000..41fa7e056
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/alert.tsx
@@ -0,0 +1,59 @@
+import * as React from "react"
+import { cva, type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+
+const alertVariants = cva(
+ "relative w-full rounded-lg border p-4 [&>svg~*]:pl-7 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-foreground",
+ {
+ variants: {
+ variant: {
+ default: "bg-background text-foreground",
+ destructive:
+ "border-destructive/50 text-destructive dark:border-destructive [&>svg]:text-destructive",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ },
+ }
+)
+
+const Alert = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes & VariantProps
+>(({ className, variant, ...props }, ref) => (
+
+))
+Alert.displayName = "Alert"
+
+const AlertTitle = React.forwardRef<
+ HTMLParagraphElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+AlertTitle.displayName = "AlertTitle"
+
+const AlertDescription = React.forwardRef<
+ HTMLParagraphElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+AlertDescription.displayName = "AlertDescription"
+
+export { Alert, AlertTitle, AlertDescription }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/aspect-ratio.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/aspect-ratio.tsx
new file mode 100644
index 000000000..d6a5226f5
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/aspect-ratio.tsx
@@ -0,0 +1,7 @@
+"use client"
+
+import * as AspectRatioPrimitive from "@radix-ui/react-aspect-ratio"
+
+const AspectRatio = AspectRatioPrimitive.Root
+
+export { AspectRatio }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/avatar.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/avatar.tsx
new file mode 100644
index 000000000..51e507ba9
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/avatar.tsx
@@ -0,0 +1,50 @@
+"use client"
+
+import * as React from "react"
+import * as AvatarPrimitive from "@radix-ui/react-avatar"
+
+import { cn } from "@/lib/utils"
+
+const Avatar = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+Avatar.displayName = AvatarPrimitive.Root.displayName
+
+const AvatarImage = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AvatarImage.displayName = AvatarPrimitive.Image.displayName
+
+const AvatarFallback = React.forwardRef<
+ React.ElementRef,
+ React.ComponentPropsWithoutRef
+>(({ className, ...props }, ref) => (
+
+))
+AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName
+
+export { Avatar, AvatarImage, AvatarFallback }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/badge.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/badge.tsx
new file mode 100644
index 000000000..f000e3ef5
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/badge.tsx
@@ -0,0 +1,36 @@
+import * as React from "react"
+import { cva, type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+
+const badgeVariants = cva(
+ "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2",
+ {
+ variants: {
+ variant: {
+ default:
+ "border-transparent bg-primary text-primary-foreground hover:bg-primary/80",
+ secondary:
+ "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80",
+ destructive:
+ "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80",
+ outline: "text-foreground",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ },
+ }
+)
+
+export interface BadgeProps
+ extends React.HTMLAttributes,
+ VariantProps {}
+
+function Badge({ className, variant, ...props }: BadgeProps) {
+ return (
+
+ )
+}
+
+export { Badge, badgeVariants }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/breadcrumb.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/breadcrumb.tsx
new file mode 100644
index 000000000..60e6c96f7
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/breadcrumb.tsx
@@ -0,0 +1,115 @@
+import * as React from "react"
+import { Slot } from "@radix-ui/react-slot"
+import { ChevronRight, MoreHorizontal } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+
+const Breadcrumb = React.forwardRef<
+ HTMLElement,
+ React.ComponentPropsWithoutRef<"nav"> & {
+ separator?: React.ReactNode
+ }
+>(({ ...props }, ref) => )
+Breadcrumb.displayName = "Breadcrumb"
+
+const BreadcrumbList = React.forwardRef<
+ HTMLOListElement,
+ React.ComponentPropsWithoutRef<"ol">
+>(({ className, ...props }, ref) => (
+
+))
+BreadcrumbList.displayName = "BreadcrumbList"
+
+const BreadcrumbItem = React.forwardRef<
+ HTMLLIElement,
+ React.ComponentPropsWithoutRef<"li">
+>(({ className, ...props }, ref) => (
+
+))
+BreadcrumbItem.displayName = "BreadcrumbItem"
+
+const BreadcrumbLink = React.forwardRef<
+ HTMLAnchorElement,
+ React.ComponentPropsWithoutRef<"a"> & {
+ asChild?: boolean
+ }
+>(({ asChild, className, ...props }, ref) => {
+ const Comp = asChild ? Slot : "a"
+
+ return (
+
+ )
+})
+BreadcrumbLink.displayName = "BreadcrumbLink"
+
+const BreadcrumbPage = React.forwardRef<
+ HTMLSpanElement,
+ React.ComponentPropsWithoutRef<"span">
+>(({ className, ...props }, ref) => (
+
+))
+BreadcrumbPage.displayName = "BreadcrumbPage"
+
+const BreadcrumbSeparator = ({
+ children,
+ className,
+ ...props
+}: React.ComponentProps<"li">) => (
+ svg]:w-3.5 [&>svg]:h-3.5", className)}
+ {...props}
+ >
+ {children ?? }
+
+)
+BreadcrumbSeparator.displayName = "BreadcrumbSeparator"
+
+const BreadcrumbEllipsis = ({
+ className,
+ ...props
+}: React.ComponentProps<"span">) => (
+
+
+ More
+
+)
+BreadcrumbEllipsis.displayName = "BreadcrumbElipssis"
+
+export {
+ Breadcrumb,
+ BreadcrumbList,
+ BreadcrumbItem,
+ BreadcrumbLink,
+ BreadcrumbPage,
+ BreadcrumbSeparator,
+ BreadcrumbEllipsis,
+}
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/button.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/button.tsx
new file mode 100644
index 000000000..36496a287
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/button.tsx
@@ -0,0 +1,56 @@
+import * as React from "react"
+import { Slot } from "@radix-ui/react-slot"
+import { cva, type VariantProps } from "class-variance-authority"
+
+import { cn } from "@/lib/utils"
+
+const buttonVariants = cva(
+ "inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
+ {
+ variants: {
+ variant: {
+ default: "bg-primary text-primary-foreground hover:bg-primary/90",
+ destructive:
+ "bg-destructive text-destructive-foreground hover:bg-destructive/90",
+ outline:
+ "border border-input bg-background hover:bg-accent hover:text-accent-foreground",
+ secondary:
+ "bg-secondary text-secondary-foreground hover:bg-secondary/80",
+ ghost: "hover:bg-accent hover:text-accent-foreground",
+ link: "text-primary underline-offset-4 hover:underline",
+ },
+ size: {
+ default: "h-10 px-4 py-2",
+ sm: "h-9 rounded-md px-3",
+ lg: "h-11 rounded-md px-8",
+ icon: "h-10 w-10",
+ },
+ },
+ defaultVariants: {
+ variant: "default",
+ size: "default",
+ },
+ }
+)
+
+export interface ButtonProps
+ extends React.ButtonHTMLAttributes,
+ VariantProps {
+ asChild?: boolean
+}
+
+const Button = React.forwardRef(
+ ({ className, variant, size, asChild = false, ...props }, ref) => {
+ const Comp = asChild ? Slot : "button"
+ return (
+
+ )
+ }
+)
+Button.displayName = "Button"
+
+export { Button, buttonVariants }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/calendar.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/calendar.tsx
new file mode 100644
index 000000000..61d2b451e
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/calendar.tsx
@@ -0,0 +1,66 @@
+"use client"
+
+import * as React from "react"
+import { ChevronLeft, ChevronRight } from "lucide-react"
+import { DayPicker } from "react-day-picker"
+
+import { cn } from "@/lib/utils"
+import { buttonVariants } from "@/components/ui/button"
+
+export type CalendarProps = React.ComponentProps
+
+function Calendar({
+ className,
+ classNames,
+ showOutsideDays = true,
+ ...props
+}: CalendarProps) {
+ return (
+ ,
+ IconRight: ({ ...props }) => ,
+ }}
+ {...props}
+ />
+ )
+}
+Calendar.displayName = "Calendar"
+
+export { Calendar }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/card.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/card.tsx
new file mode 100644
index 000000000..f62edea57
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/card.tsx
@@ -0,0 +1,79 @@
+import * as React from "react"
+
+import { cn } from "@/lib/utils"
+
+const Card = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+Card.displayName = "Card"
+
+const CardHeader = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardHeader.displayName = "CardHeader"
+
+const CardTitle = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardTitle.displayName = "CardTitle"
+
+const CardDescription = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardDescription.displayName = "CardDescription"
+
+const CardContent = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardContent.displayName = "CardContent"
+
+const CardFooter = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => (
+
+))
+CardFooter.displayName = "CardFooter"
+
+export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/carousel.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/carousel.tsx
new file mode 100644
index 000000000..ec505d00d
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/carousel.tsx
@@ -0,0 +1,262 @@
+"use client"
+
+import * as React from "react"
+import useEmblaCarousel, {
+ type UseEmblaCarouselType,
+} from "embla-carousel-react"
+import { ArrowLeft, ArrowRight } from "lucide-react"
+
+import { cn } from "@/lib/utils"
+import { Button } from "@/components/ui/button"
+
+type CarouselApi = UseEmblaCarouselType[1]
+type UseCarouselParameters = Parameters
+type CarouselOptions = UseCarouselParameters[0]
+type CarouselPlugin = UseCarouselParameters[1]
+
+type CarouselProps = {
+ opts?: CarouselOptions
+ plugins?: CarouselPlugin
+ orientation?: "horizontal" | "vertical"
+ setApi?: (api: CarouselApi) => void
+}
+
+type CarouselContextProps = {
+ carouselRef: ReturnType[0]
+ api: ReturnType[1]
+ scrollPrev: () => void
+ scrollNext: () => void
+ canScrollPrev: boolean
+ canScrollNext: boolean
+} & CarouselProps
+
+const CarouselContext = React.createContext(null)
+
+function useCarousel() {
+ const context = React.useContext(CarouselContext)
+
+ if (!context) {
+ throw new Error("useCarousel must be used within a ")
+ }
+
+ return context
+}
+
+const Carousel = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes & CarouselProps
+>(
+ (
+ {
+ orientation = "horizontal",
+ opts,
+ setApi,
+ plugins,
+ className,
+ children,
+ ...props
+ },
+ ref
+ ) => {
+ const [carouselRef, api] = useEmblaCarousel(
+ {
+ ...opts,
+ axis: orientation === "horizontal" ? "x" : "y",
+ },
+ plugins
+ )
+ const [canScrollPrev, setCanScrollPrev] = React.useState(false)
+ const [canScrollNext, setCanScrollNext] = React.useState(false)
+
+ const onSelect = React.useCallback((api: CarouselApi) => {
+ if (!api) {
+ return
+ }
+
+ setCanScrollPrev(api.canScrollPrev())
+ setCanScrollNext(api.canScrollNext())
+ }, [])
+
+ const scrollPrev = React.useCallback(() => {
+ api?.scrollPrev()
+ }, [api])
+
+ const scrollNext = React.useCallback(() => {
+ api?.scrollNext()
+ }, [api])
+
+ const handleKeyDown = React.useCallback(
+ (event: React.KeyboardEvent) => {
+ if (event.key === "ArrowLeft") {
+ event.preventDefault()
+ scrollPrev()
+ } else if (event.key === "ArrowRight") {
+ event.preventDefault()
+ scrollNext()
+ }
+ },
+ [scrollPrev, scrollNext]
+ )
+
+ React.useEffect(() => {
+ if (!api || !setApi) {
+ return
+ }
+
+ setApi(api)
+ }, [api, setApi])
+
+ React.useEffect(() => {
+ if (!api) {
+ return
+ }
+
+ onSelect(api)
+ api.on("reInit", onSelect)
+ api.on("select", onSelect)
+
+ return () => {
+ api?.off("select", onSelect)
+ }
+ }, [api, onSelect])
+
+ return (
+
+
+ {children}
+
+
+ )
+ }
+)
+Carousel.displayName = "Carousel"
+
+const CarouselContent = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => {
+ const { carouselRef, orientation } = useCarousel()
+
+ return (
+
+ )
+})
+CarouselContent.displayName = "CarouselContent"
+
+const CarouselItem = React.forwardRef<
+ HTMLDivElement,
+ React.HTMLAttributes
+>(({ className, ...props }, ref) => {
+ const { orientation } = useCarousel()
+
+ return (
+
+ )
+})
+CarouselItem.displayName = "CarouselItem"
+
+const CarouselPrevious = React.forwardRef<
+ HTMLButtonElement,
+ React.ComponentProps
+>(({ className, variant = "outline", size = "icon", ...props }, ref) => {
+ const { orientation, scrollPrev, canScrollPrev } = useCarousel()
+
+ return (
+
+
+ Previous slide
+
+ )
+})
+CarouselPrevious.displayName = "CarouselPrevious"
+
+const CarouselNext = React.forwardRef<
+ HTMLButtonElement,
+ React.ComponentProps
+>(({ className, variant = "outline", size = "icon", ...props }, ref) => {
+ const { orientation, scrollNext, canScrollNext } = useCarousel()
+
+ return (
+
+
+ Next slide
+
+ )
+})
+CarouselNext.displayName = "CarouselNext"
+
+export {
+ type CarouselApi,
+ Carousel,
+ CarouselContent,
+ CarouselItem,
+ CarouselPrevious,
+ CarouselNext,
+}
diff --git a/codegen-on-oss/codegen_on_oss/frontend/components/ui/chart.tsx b/codegen-on-oss/codegen_on_oss/frontend/components/ui/chart.tsx
new file mode 100644
index 000000000..8620baa3b
--- /dev/null
+++ b/codegen-on-oss/codegen_on_oss/frontend/components/ui/chart.tsx
@@ -0,0 +1,365 @@
+"use client"
+
+import * as React from "react"
+import * as RechartsPrimitive from "recharts"
+
+import { cn } from "@/lib/utils"
+
+// Format: { THEME_NAME: CSS_SELECTOR }
+const THEMES = { light: "", dark: ".dark" } as const
+
+export type ChartConfig = {
+ [k in string]: {
+ label?: React.ReactNode
+ icon?: React.ComponentType
+ } & (
+ | { color?: string; theme?: never }
+ | { color?: never; theme: Record }
+ )
+}
+
+type ChartContextProps = {
+ config: ChartConfig
+}
+
+const ChartContext = React.createContext(null)
+
+function useChart() {
+ const context = React.useContext(ChartContext)
+
+ if (!context) {
+ throw new Error("useChart must be used within a ")
+ }
+
+ return context
+}
+
+const ChartContainer = React.forwardRef<
+ HTMLDivElement,
+ React.ComponentProps<"div"> & {
+ config: ChartConfig
+ children: React.ComponentProps<
+ typeof RechartsPrimitive.ResponsiveContainer
+ >["children"]
+ }
+>(({ id, className, children, config, ...props }, ref) => {
+ const uniqueId = React.useId()
+ const chartId = `chart-${id || uniqueId.replace(/:/g, "")}`
+
+ return (
+
+
+
+
+ {children}
+
+
+
+ )
+})
+ChartContainer.displayName = "Chart"
+
+const ChartStyle = ({ id, config }: { id: string; config: ChartConfig }) => {
+ const colorConfig = Object.entries(config).filter(
+ ([_, config]) => config.theme || config.color
+ )
+
+ if (!colorConfig.length) {
+ return null
+ }
+
+ return (
+