diff --git a/server/pyproject.toml b/server/pyproject.toml index 3836168..45aba02 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -16,6 +16,7 @@ dependencies = [ "openai>=2.14.0", "pandas>=2.3.3", "pydantic>=2.12.5", + "pydantic-ai-slim[openai,openrouter,retries]>=1.52.0", "pytest>=9.0.2", "pytest-asyncio>=1.3.0", "pytest-cov>=7.0.0", diff --git a/server/src/celery/tasks.py b/server/src/celery/tasks.py index 3fe84af..83c6298 100644 --- a/server/src/celery/tasks.py +++ b/server/src/celery/tasks.py @@ -1,8 +1,11 @@ import asyncio import logging -import random from typing import Dict +from httpx import AsyncClient, HTTPStatusError +from pydantic_ai.retries import AsyncTenacityTransport, RetryConfig, wait_retry_after +from tenacity import retry_if_exception_type, stop_after_attempt, wait_exponential + from celery import Task from src.crud.jobtask_crud import JobTaskCrud from src.crud.project_crud import ProjectCrud @@ -43,28 +46,24 @@ async def _publish_redis_event(redis, event_name, value): ) -async def _run_with_retry( - func, max_retries: int = 3, base_delay: float = 1.0, jitter: float = 0.3 -): - retries = 0 - while True: - try: - return await func() - except Exception as e: - retries += 1 - if retries > max_retries: - raise - - delay = base_delay * (2 ** (retries - 1)) - - jitter_amount = delay * jitter - delay = delay + random.uniform(-jitter_amount, jitter_amount) - - logger.warning( - f"Retrying job task (attempt {retries}/{max_retries}) - Error: {e}" - ) - - await asyncio.sleep(delay) +def _create_retrying_client(max_attempts: int = 3, max_wait_seconds=60) -> AsyncClient: + def should_retry_status(response): + if response.status_code in (429, 502, 503, 504): + response.raise_for_status() + + transport = AsyncTenacityTransport( + config=RetryConfig( + retry=retry_if_exception_type((HTTPStatusError, ConnectionError)), + wait=wait_retry_after( + fallback_strategy=wait_exponential(multiplier=1, max=60), + max_wait=max_wait_seconds, + ), + stop=stop_after_attempt(max_attempts), + reraise=True, + ), + validate_response=should_retry_status, + ) + return AsyncClient(transport=transport) async def _process_job_task( @@ -76,7 +75,7 @@ async def _process_job_task( redis, counter: Dict[str, int], counter_lock: asyncio.Lock, - max_retries: int, + client: AsyncClient, ): async with semaphore: try: @@ -98,15 +97,13 @@ async def _process_job_task( {"job_task_id": job_task.id, "status": JobTaskStatus.RUNNING}, ) - llm_result = await _run_with_retry( - lambda: get_structured_response( - llm_service, - paper_service, - job_task, - job_data, - project_criteria, - ), - max_retries=max_retries, + llm_result = await get_structured_response( + llm_service, + paper_service, + job_task, + job_data, + project_criteria, + client, ) await jobtask_crud.update_job_task_result(job_task.id, llm_result) @@ -179,6 +176,8 @@ async def process_job( logger.info("process_job: Starting to process job %s", job_id) redis = get_redis_client() + client = _create_retrying_client(max_attempts=max_retries) + async with DBContext() as db_ctx: project_crud = db_ctx.crud(ProjectCrud) jobtask_crud = db_ctx.crud(JobTaskCrud) @@ -211,7 +210,7 @@ async def process_job( redis=redis, counter=counter, counter_lock=counter_lock, - max_retries=max_retries, + client=client, ) for jt_id in job_task_ids ] diff --git a/server/src/core/llm/providers/local_openai_sdk.py b/server/src/core/llm/providers/local_openai_sdk.py index c7d305f..465800b 100644 --- a/server/src/core/llm/providers/local_openai_sdk.py +++ b/server/src/core/llm/providers/local_openai_sdk.py @@ -1,9 +1,14 @@ from typing import Any, List, Type +from httpx import AsyncClient +from openai.types.model import Model from pydantic import BaseModel, Field +from pydantic_ai import Agent +from pydantic_ai.models.openai import OpenAIResponsesModel, OpenAIResponsesModelSettings +from pydantic_ai.output import ToolOutput +from pydantic_ai.providers.openai import OpenAIProvider as PAI_OpenAIProvider -from src.core.llm.providers.provider import T, BaseLLMParams, LLMProvider -from openai.types.model import Model +from src.core.llm.providers.provider import BaseLLMParams, LLMProvider, T class LocalOpenAISDKProviderParams(BaseModel): @@ -29,42 +34,51 @@ class LocalOpenAISDKProvider( config_parameters = [] async def generate_answer_async( - self, model_parameters: dict[str, Any], schema: Type[T], prompt: str - ) -> tuple[T, str]: + self, + client: AsyncClient, + model_parameters: dict[str, Any], + schema: Type[T], + prompt: str, + ) -> T: model_cfg = self.parse_model_parameters(model_parameters) - from openai import AsyncOpenAI - if self.provider_parameters is None: raise RuntimeError("Provider parameters needs to be defined") if self.runtime_parameters.model is None: raise RuntimeError("Model needs to be defined") - async with AsyncOpenAI( + provider = PAI_OpenAIProvider( api_key="Foo", base_url=self.provider_parameters.base_url, - ) as client: - try: - response = await client.responses.parse( - model=self.runtime_parameters.model, - input=[ - { - "role": "system", - "content": self.runtime_parameters.system_prompt, - }, - {"role": "user", "content": prompt}, - ], - top_p=model_cfg.top_p, - temperature=model_cfg.temperature, - # Structured Outputs is available in OpenAI's latest large language models, starting with GPT-4o - text_format=schema, - ) - if response.output_parsed is None: - raise RuntimeError("Output from LLM was empty") - return response.output_parsed, "" - except Exception as e: - raise RuntimeError("LLM call failed") from e + http_client=client, + ) + + settings = OpenAIResponsesModelSettings( + temperature=model_cfg.temperature, + top_p=model_cfg.top_p, + ) + + model = OpenAIResponsesModel( + str(self.runtime_parameters.model), + provider=provider, + settings=settings, + ) + + agent = Agent( + model, + system_prompt=self.runtime_parameters.system_prompt, + retries=3, + output_retries=5, + output_type=ToolOutput(schema, name=schema.__name__.lower()), + ) + + result = await agent.run(prompt) + + if result.output is None: + raise RuntimeError("Output from LLM was empty") + + return result.output async def get_available_models(self) -> List[Model]: if self.provider_parameters is None: diff --git a/server/src/core/llm/providers/mock.py b/server/src/core/llm/providers/mock.py index 88eb342..75c765b 100644 --- a/server/src/core/llm/providers/mock.py +++ b/server/src/core/llm/providers/mock.py @@ -2,6 +2,7 @@ import random from typing import Any, List +from httpx import AsyncClient from pydantic import BaseModel, Field from src.core.llm.providers.provider import ( @@ -55,8 +56,12 @@ def __init__( config_parameters = [] async def generate_answer_async( - self, model_parameters: dict[str, Any], schema: type[T], prompt - ) -> tuple[StructuredResponse, str]: + self, + client: AsyncClient, + model_parameters: dict[str, Any], + schema: type[T], + prompt, + ) -> StructuredResponse: if self.provider_parameters is None: raise RuntimeError("Provider parameters needs to be defined") @@ -67,38 +72,35 @@ async def generate_answer_async( delay_ms = max(0.0, self.provider_parameters.delay + jitter_ms) await asyncio.sleep(delay_ms / 1000.0) - return ( - StructuredResponse( - overall_decision=Decision( - binary_decision=True, - probability_decision=1.0, - likert_decision=LikertDecision.stronglyAgree, - reason="The paper completely meets the inclusion criteria.", - ), - inclusion_criteria=[ - Criterion( - name="Example criteria", - decision=Decision( - binary_decision=True, - probability_decision=1.0, - likert_decision=LikertDecision.stronglyAgree, - reason="The criteria is met.", - ), - ) - ], - exclusion_criteria=[ - Criterion( - name="Example criteria", - decision=Decision( - binary_decision=False, - probability_decision=0.0, - likert_decision=LikertDecision.stronglyDisagree, - reason="The criteria is not met.", - ), - ) - ], + return StructuredResponse( + overall_decision=Decision( + binary_decision=True, + probability_decision=1.0, + likert_decision=LikertDecision.stronglyAgree, + reason="The paper completely meets the inclusion criteria.", ), - "", + inclusion_criteria=[ + Criterion( + name="Example criteria", + decision=Decision( + binary_decision=True, + probability_decision=1.0, + likert_decision=LikertDecision.stronglyAgree, + reason="The criteria is met.", + ), + ) + ], + exclusion_criteria=[ + Criterion( + name="Example criteria", + decision=Decision( + binary_decision=False, + probability_decision=0.0, + likert_decision=LikertDecision.stronglyDisagree, + reason="The criteria is not met.", + ), + ) + ], ) async def get_available_models(self) -> List[Model]: diff --git a/server/src/core/llm/providers/openai.py b/server/src/core/llm/providers/openai.py index e0a1e4b..36932d2 100644 --- a/server/src/core/llm/providers/openai.py +++ b/server/src/core/llm/providers/openai.py @@ -1,15 +1,19 @@ from typing import Any, List +from httpx import AsyncClient +from openai.types.model import Model from pydantic import BaseModel +from pydantic_ai import Agent +from pydantic_ai.models.openai import OpenAIResponsesModel, OpenAIResponsesModelSettings +from pydantic_ai.output import ToolOutput +from pydantic_ai.providers.openai import OpenAIProvider as PAI_OpenAIProvider from src.core.llm.providers.provider import ( - T, BaseLLMParams, ConfigParameter, LLMProvider, + T, ) -from openai.types.model import Model - from src.schemas.llm import ProviderRuntimeParameters @@ -40,56 +44,48 @@ def __init__( config_parameters = [api_key_config_parameter] async def generate_answer_async( - self, model_parameters: dict[str, Any], schema: type[T], prompt - ) -> tuple[T, str]: + self, + client: AsyncClient, + model_parameters: dict[str, Any], + schema: type[T], + prompt, + ) -> T: model_cfg = self.parse_model_parameters(model_parameters) - from openai import AsyncOpenAI - import openai - if self.runtime_parameters.model is None: raise RuntimeError("Model needs to be defined") if self.runtime_parameters.api_key is None: raise RuntimeError("API Key is not defined") - async with AsyncOpenAI(api_key=self.runtime_parameters.api_key) as client: - try: - response = await client.responses.parse( - model=self.runtime_parameters.model, - input=[ - { - "role": "system", - "content": self.runtime_parameters.system_prompt, - }, - {"role": "user", "content": prompt}, - ], - top_p=model_cfg.top_p, - temperature=model_cfg.temperature, - text_format=schema, - ) - if response.output_parsed is None: - raise RuntimeError("LLM response was empty") - - return response.output_parsed, "" - except openai.APIConnectionError as e: - print("The server could not be reached") - print(e.__cause__) - raise e - except openai.RateLimitError as e: - print("HTTP 429 status code was received; we should back off a bit.") - print(e.status_code) - print(e.response) - raise e - except openai.APIStatusError as e: - print("Another non-200-range status code was received") - print(e.status_code) - print(e.response) - raise e - except Exception as e: - raise RuntimeError("LLM call failed") from e - - raise RuntimeError("Failed to call LLM") + settings = OpenAIResponsesModelSettings( + extra_headers={ + "X-Title": "AISysRev", + "HTTP-Referer": "https://github.com/EvoTestOps/AISysRev", + }, + temperature=model_cfg.temperature, + top_p=model_cfg.top_p, + ) + + model = OpenAIResponsesModel( + str(self.runtime_parameters.model), + provider=PAI_OpenAIProvider( + api_key=self.runtime_parameters.api_key, http_client=client + ), + settings=settings, + ) + + agent = Agent( + model, + system_prompt=self.runtime_parameters.system_prompt, + retries=3, + output_retries=5, + output_type=ToolOutput(schema, name=schema.__name__.lower()), + ) + + result = await agent.run(prompt) + + return result.output async def get_available_models(self) -> List[Model]: from openai import AsyncOpenAI diff --git a/server/src/core/llm/providers/openrouter.py b/server/src/core/llm/providers/openrouter.py index bce6d03..377253e 100644 --- a/server/src/core/llm/providers/openrouter.py +++ b/server/src/core/llm/providers/openrouter.py @@ -1,17 +1,24 @@ from typing import Any, List, Type -from pydantic import BaseModel, ValidationError +from httpx import AsyncClient +from openai.types.model import Model +from pydantic import BaseModel +from pydantic_ai import Agent +from pydantic_ai.models.openrouter import OpenRouterModel, OpenRouterModelSettings +from pydantic_ai.output import ToolOutput +from pydantic_ai.providers.openrouter import ( + OpenRouterProvider as PAI_OpenRouterProvider, +) + from src.core.llm.providers.provider import ( - T, BaseLLMParams, ConfigParameter, LLMProvider, + T, ) - from src.schemas.llm import ( ProviderRuntimeParameters, ) -from openai.types.model import Model class OpenRouterProviderParams(BaseModel): @@ -46,96 +53,61 @@ def __init__( async def generate_answer_async( self, + client: AsyncClient, model_parameters: dict[str, Any], schema: Type[T], prompt: str, - ) -> tuple[T, str]: - cfg = self.parse_model_parameters(model_parameters) - - import aiohttp - from openai.lib._pydantic import to_strict_json_schema - import json + ) -> T: import logging logger = logging.getLogger(__name__) + model_cfg = self.parse_model_parameters(model_parameters) + if self.runtime_parameters.api_key is None: raise RuntimeError("API Key is not defined") - if self.provider_parameters is None: raise RuntimeError("Provider parameters needs to be defined") - content = None - data = None - async with aiohttp.ClientSession() as session: - data = { - "model": self.runtime_parameters.model, - "messages": [ - { - "role": "system", - # "content": system_prompt + "\r\n" + json_instruct_prompt, <-- Test if JSON responses work without this - "content": self.runtime_parameters.system_prompt, - }, - {"role": "user", "content": prompt}, - ], - "provider": {"require_parameters": True, "data_collection": "deny"}, - "max_tokens": 8193, - "response_format": { - "type": "json_schema", - "json_schema": { - "name": "structured_response", - "strict": True, - "schema": to_strict_json_schema(schema), - }, - }, - "temperature": cfg.temperature, - "top_p": cfg.top_p, - } - - async with session.post( - "https://openrouter.ai/api/v1/chat/completions", - headers={ - "Authorization": f"Bearer {self.runtime_parameters.api_key}", - "Content-type": "application/json", - }, - json=data, - ) as response: - logger.info("Status: %s", response.status) - logger.info("Content-type: %s", response.headers["content-type"]) - - if response.status != 200: - text = await response.text() - logger.error( - "LLM request failed with response status %s", response.status - ) - logger.error("Response: %s", text) - raise RuntimeError(text) - - completion = await response.json() - data = json.dumps(completion) - - # For type-safety, validate the response JSON - # Things might have gotten better in OpenRouter's infrastructure, so JSON is properly outputted from the OpenRouter interface. - import re - - json_match = re.search( - r"json\s*(\{.*\})", - completion["choices"][0]["message"]["content"], - re.DOTALL, - ) - json_str = ( - # First, check if the response starts with "json" - json_match.group(1).strip() - if json_match - # Assume that the content is valid JSON - else completion["choices"][0]["message"]["content"].strip() - ) - try: - content = schema.model_validate_json(json_str) - except ValidationError as e: - logger.error(e) - raise e - return content, data + settings = OpenRouterModelSettings( + openrouter_provider={ + "require_parameters": True, + "data_collection": "deny", + }, + extra_headers={ + "X-Title": "AISysRev", + "HTTP-Referer": "https://github.com/EvoTestOps/AISysRev", + }, + temperature=model_cfg.temperature, + top_p=model_cfg.top_p, + ) + + model = OpenRouterModel( + str(self.runtime_parameters.model), + provider=PAI_OpenRouterProvider( + api_key=self.runtime_parameters.api_key, http_client=client + ), + settings=settings, + ) + + agent = Agent( + model, + system_prompt=self.runtime_parameters.system_prompt, + retries=3, # TODO: Maybe should be configurable + output_retries=5, + output_type=ToolOutput(schema, name=schema.__name__.lower()), + ) + + logger.debug( + "Sending prompt to OpenRouter model %s", self.runtime_parameters.model + ) + logger.debug("Prompt: %s", prompt) + logger.debug("Model parameters: %s", model_cfg.model_dump()) + + result = await agent.run(prompt) + + logger.debug("Received structured response: %s", result.output) + return result.output async def get_available_models(self) -> List[Model]: if self.provider_parameters is None: diff --git a/server/src/core/llm/providers/provider.py b/server/src/core/llm/providers/provider.py index 57928cd..e801c3f 100644 --- a/server/src/core/llm/providers/provider.py +++ b/server/src/core/llm/providers/provider.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod from typing import Any, ClassVar, Generic, List, Literal, Optional, Type, TypeVar, Union +from httpx import AsyncClient from pydantic import BaseModel, Field from src.schemas.llm import ProviderRuntimeParameters @@ -94,10 +95,11 @@ async def get_available_models(self) -> List["Model"]: @abstractmethod async def generate_answer_async( self, + client: AsyncClient, model_parameters: dict[str, Any], schema: Type[T], prompt: str, - ) -> tuple[T, str]: + ) -> T: pass diff --git a/server/src/services/llm_service.py b/server/src/services/llm_service.py index a48365a..855fc19 100644 --- a/server/src/services/llm_service.py +++ b/server/src/services/llm_service.py @@ -1,5 +1,6 @@ from typing import Any, TypeVar +from httpx import AsyncClient from pydantic import BaseModel from src.core.llm.providers import llm_providers @@ -30,13 +31,15 @@ async def call_llm( runtime_parameters: ProviderRuntimeParameters, model_parameters: dict[str, Any], user_prompt: str, - ): - response_formatted, response_raw = await llm( + client: AsyncClient, + ) -> T: + response_formatted = await llm( provider_parameters, runtime_parameters ).generate_answer_async( model_parameters=model_parameters, prompt=user_prompt, schema=response_schema, + client=client, ) return response_formatted diff --git a/server/src/tools/llm_decision_creator.py b/server/src/tools/llm_decision_creator.py index de42e79..29e82a2 100644 --- a/server/src/tools/llm_decision_creator.py +++ b/server/src/tools/llm_decision_creator.py @@ -1,3 +1,5 @@ +from httpx import AsyncClient + from src.core.prompts import few_shot_task_prompt, zero_shot_task_prompt from src.db.models.jobtask import JobTask from src.schemas.job import ( @@ -17,12 +19,10 @@ def _create_few_shot_examples(papers: list[PaperRead]): txt_parts = [] for paper in papers: - txt_parts.append( - f"""Title: {paper.title} + txt_parts.append(f"""Title: {paper.title} Abstract: \"{paper.abstract}\" Decision: {"Include" if paper.human_result == PaperHumanResult.INCLUDE else "Exclude"} -""" - ) +""") return "\n\n".join(txt_parts) @@ -45,6 +45,7 @@ async def get_structured_response( job_task_data: JobTask, job_data: JobCreate, inc_exc_criteria: Criteria, + client: AsyncClient, ) -> StructuredResponse: # TODO: Move to another place additional_instructions = "The paper is included, if all inclusion criteria match. If the paper matches any exclusion criteria, it is excluded." @@ -84,6 +85,7 @@ async def get_structured_response( ), response_schema=StructuredResponse, user_prompt=prompt_text, + client=client, ) return result elif isinstance(cfg, FewShotPromptingConfig): @@ -107,6 +109,7 @@ async def get_structured_response( ), response_schema=StructuredResponse, user_prompt=prompt_text, + client=client, ) return result else: diff --git a/server/uv.lock b/server/uv.lock index bbf8297..4bc2af4 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -283,6 +283,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + [[package]] name = "click" version = "8.3.1" @@ -588,6 +613,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] +[[package]] +name = "genai-prices" +version = "0.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8e/87/bdc11c1671e3a3fe701c3c4aaae4aa2bb7a84a6bb1812dfb5693c87d3872/genai_prices-0.0.52.tar.gz", hash = "sha256:0df7420b555fa3a48d09e5c7802ba35b5dfa9fd49b0c3bb2c150c59060d83f52", size = 58364, upload-time = "2026-01-28T12:07:49.386Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/33/6316b4907a0bffc1bcc99074c7e2d01184fdfeee401c864146a40d55ad10/genai_prices-0.0.52-py3-none-any.whl", hash = "sha256:639e7a2ae7eddf5710febb9779b9c9e31ff5acf464b4eb1f6018798ea642e6d3", size = 60937, upload-time = "2026-01-28T12:07:47.921Z" }, +] + [[package]] name = "greenlet" version = "3.3.0" @@ -611,6 +649,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, ] +[[package]] +name = "griffe" +version = "1.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -681,6 +731,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -755,6 +817,15 @@ redis = [ { name = "redis" }, ] +[[package]] +name = "logfire-api" +version = "4.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/d5/c183261d5560e33335443b377c921aa6a15e9890ceac63024237e8c1279b/logfire_api-4.21.0.tar.gz", hash = "sha256:5d709a0d3adfd573db70964cb48c03b750966de395ed9c8da4de111707a75fab", size = 59331, upload-time = "2026-01-28T18:55:44.985Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/00/5045f889be4a450b321db998d0a5581d30423138a04dffe18b52730cb926/logfire_api-4.21.0-py3-none-any.whl", hash = "sha256:32f9b48e6b73c270d1aeb6478dcbecc5f82120b8eae70559e0d1b05d1b86541e", size = 98061, upload-time = "2026-01-28T18:55:42.342Z" }, +] + [[package]] name = "mako" version = "1.3.10" @@ -928,6 +999,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1081,6 +1165,36 @@ email = [ { name = "email-validator" }, ] +[[package]] +name = "pydantic-ai-slim" +version = "1.52.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "genai-prices" }, + { name = "griffe" }, + { name = "httpx" }, + { name = "opentelemetry-api" }, + { name = "pydantic" }, + { name = "pydantic-graph" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/89/09f263b957185a8ae5c088fb37e2513c03618e4fd189410a10babc9121f3/pydantic_ai_slim-1.52.0.tar.gz", hash = "sha256:e89b609163c5465450a1c20c4f44ddcdfc0ec09d7715ab20a4e8e31a4151c47d", size = 405607, upload-time = "2026-02-03T00:51:06.504Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/b1/a1e994e26c7a1af409f9114aaff936379bfcccac7c20f5a5c746a77214a5/pydantic_ai_slim-1.52.0-py3-none-any.whl", hash = "sha256:9510a02fdae13e4583ce6e9e500d4c43acb5428238a671b3e1d255c2f466b2f2", size = 529901, upload-time = "2026-02-03T00:50:56.445Z" }, +] + +[package.optional-dependencies] +openai = [ + { name = "openai" }, + { name = "tiktoken" }, +] +openrouter = [ + { name = "openai" }, +] +retries = [ + { name = "tenacity" }, +] + [[package]] name = "pydantic-core" version = "2.41.5" @@ -1120,6 +1234,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, ] +[[package]] +name = "pydantic-graph" +version = "1.52.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "logfire-api" }, + { name = "pydantic" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/b1/21b868c08e7cac71ee9ea22a3cb2f205df6a9fe8a742be4669120908acf0/pydantic_graph-1.52.0.tar.gz", hash = "sha256:ac63aa93d51940c5411f95af949146eecffaf48e6773173aced4cadff07336d9", size = 58460, upload-time = "2026-02-03T00:51:08.585Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/d4/4d299d3a919b0c6cb4a67c906633e5b0f564603aef601f385e121b5f534b/pydantic_graph-1.52.0-py3-none-any.whl", hash = "sha256:3141e5c10a72ad1bdddab58af8d531242d6892d953b6a9fb20c8fe6ba414d735", size = 72343, upload-time = "2026-02-03T00:51:00.264Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -1245,6 +1374,61 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, ] +[[package]] +name = "regex" +version = "2026.1.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/0a/47fa888ec7cbbc7d62c5f2a6a888878e76169170ead271a35239edd8f0e8/regex-2026.1.15-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac", size = 489170, upload-time = "2026-01-14T23:16:19.835Z" }, + { url = "https://files.pythonhosted.org/packages/ac/c4/d000e9b7296c15737c9301708e9e7fbdea009f8e93541b6b43bdb8219646/regex-2026.1.15-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6", size = 291146, upload-time = "2026-01-14T23:16:21.541Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b6/921cc61982e538682bdf3bdf5b2c6ab6b34368da1f8e98a6c1ddc503c9cf/regex-2026.1.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2", size = 288986, upload-time = "2026-01-14T23:16:23.381Z" }, + { url = "https://files.pythonhosted.org/packages/ca/33/eb7383dde0bbc93f4fb9d03453aab97e18ad4024ac7e26cef8d1f0a2cff0/regex-2026.1.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846", size = 799098, upload-time = "2026-01-14T23:16:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/b664dccae898fc8d8b4c23accd853f723bde0f026c747b6f6262b688029c/regex-2026.1.15-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b", size = 864980, upload-time = "2026-01-14T23:16:27.297Z" }, + { url = "https://files.pythonhosted.org/packages/16/40/0999e064a170eddd237bae9ccfcd8f28b3aa98a38bf727a086425542a4fc/regex-2026.1.15-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e", size = 911607, upload-time = "2026-01-14T23:16:29.235Z" }, + { url = "https://files.pythonhosted.org/packages/07/78/c77f644b68ab054e5a674fb4da40ff7bffb2c88df58afa82dbf86573092d/regex-2026.1.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde", size = 803358, upload-time = "2026-01-14T23:16:31.369Z" }, + { url = "https://files.pythonhosted.org/packages/27/31/d4292ea8566eaa551fafc07797961c5963cf5235c797cc2ae19b85dfd04d/regex-2026.1.15-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5", size = 775833, upload-time = "2026-01-14T23:16:33.141Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b2/cff3bf2fea4133aa6fb0d1e370b37544d18c8350a2fa118c7e11d1db0e14/regex-2026.1.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34", size = 788045, upload-time = "2026-01-14T23:16:35.005Z" }, + { url = "https://files.pythonhosted.org/packages/8d/99/2cb9b69045372ec877b6f5124bda4eb4253bc58b8fe5848c973f752bc52c/regex-2026.1.15-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75", size = 859374, upload-time = "2026-01-14T23:16:36.919Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/710b0a5abe8e077b1729a562d2f297224ad079f3a66dce46844c193416c8/regex-2026.1.15-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e", size = 763940, upload-time = "2026-01-14T23:16:38.685Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/7585c8e744e40eb3d32f119191969b91de04c073fca98ec14299041f6e7e/regex-2026.1.15-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160", size = 850112, upload-time = "2026-01-14T23:16:40.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/d6/43e1dd85df86c49a347aa57c1f69d12c652c7b60e37ec162e3096194a278/regex-2026.1.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1", size = 789586, upload-time = "2026-01-14T23:16:42.799Z" }, + { url = "https://files.pythonhosted.org/packages/93/38/77142422f631e013f316aaae83234c629555729a9fbc952b8a63ac91462a/regex-2026.1.15-cp314-cp314-win32.whl", hash = "sha256:d639a750223132afbfb8f429c60d9d318aeba03281a5f1ab49f877456448dcf1", size = 271691, upload-time = "2026-01-14T23:16:44.671Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a9/ab16b4649524ca9e05213c1cdbb7faa85cc2aa90a0230d2f796cbaf22736/regex-2026.1.15-cp314-cp314-win_amd64.whl", hash = "sha256:4161d87f85fa831e31469bfd82c186923070fc970b9de75339b68f0c75b51903", size = 280422, upload-time = "2026-01-14T23:16:46.607Z" }, + { url = "https://files.pythonhosted.org/packages/be/2a/20fd057bf3521cb4791f69f869635f73e0aaf2b9ad2d260f728144f9047c/regex-2026.1.15-cp314-cp314-win_arm64.whl", hash = "sha256:91c5036ebb62663a6b3999bdd2e559fd8456d17e2b485bf509784cd31a8b1705", size = 273467, upload-time = "2026-01-14T23:16:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/ad/77/0b1e81857060b92b9cad239104c46507dd481b3ff1fa79f8e7f865aae38a/regex-2026.1.15-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8", size = 492073, upload-time = "2026-01-14T23:16:51.154Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/f8302b0c208b22c1e4f423147e1913fd475ddd6230565b299925353de644/regex-2026.1.15-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf", size = 292757, upload-time = "2026-01-14T23:16:53.08Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f0/ef55de2460f3b4a6da9d9e7daacd0cb79d4ef75c64a2af316e68447f0df0/regex-2026.1.15-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d", size = 291122, upload-time = "2026-01-14T23:16:55.383Z" }, + { url = "https://files.pythonhosted.org/packages/cf/55/bb8ccbacabbc3a11d863ee62a9f18b160a83084ea95cdfc5d207bfc3dd75/regex-2026.1.15-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84", size = 807761, upload-time = "2026-01-14T23:16:57.251Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/f75d937f17f81e55679a0509e86176e29caa7298c38bd1db7ce9c0bf6075/regex-2026.1.15-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df", size = 873538, upload-time = "2026-01-14T23:16:59.349Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/0da86327df70349aa8d86390da91171bd3ca4f0e7c1d1d453a9c10344da3/regex-2026.1.15-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434", size = 915066, upload-time = "2026-01-14T23:17:01.607Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5e/f660fb23fc77baa2a61aa1f1fe3a4eea2bbb8a286ddec148030672e18834/regex-2026.1.15-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a", size = 812938, upload-time = "2026-01-14T23:17:04.366Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/a47a29bfecebbbfd1e5cd3f26b28020a97e4820f1c5148e66e3b7d4b4992/regex-2026.1.15-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10", size = 781314, upload-time = "2026-01-14T23:17:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/65/ec/7ec2bbfd4c3f4e494a24dec4c6943a668e2030426b1b8b949a6462d2c17b/regex-2026.1.15-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac", size = 795652, upload-time = "2026-01-14T23:17:08.521Z" }, + { url = "https://files.pythonhosted.org/packages/46/79/a5d8651ae131fe27d7c521ad300aa7f1c7be1dbeee4d446498af5411b8a9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea", size = 868550, upload-time = "2026-01-14T23:17:10.573Z" }, + { url = "https://files.pythonhosted.org/packages/06/b7/25635d2809664b79f183070786a5552dd4e627e5aedb0065f4e3cf8ee37d/regex-2026.1.15-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e", size = 769981, upload-time = "2026-01-14T23:17:12.871Z" }, + { url = "https://files.pythonhosted.org/packages/16/8b/fc3fcbb2393dcfa4a6c5ffad92dc498e842df4581ea9d14309fcd3c55fb9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521", size = 854780, upload-time = "2026-01-14T23:17:14.837Z" }, + { url = "https://files.pythonhosted.org/packages/d0/38/dde117c76c624713c8a2842530be9c93ca8b606c0f6102d86e8cd1ce8bea/regex-2026.1.15-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db", size = 799778, upload-time = "2026-01-14T23:17:17.369Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0d/3a6cfa9ae99606afb612d8fb7a66b245a9d5ff0f29bb347c8a30b6ad561b/regex-2026.1.15-cp314-cp314t-win32.whl", hash = "sha256:e90b8db97f6f2c97eb045b51a6b2c5ed69cedd8392459e0642d4199b94fabd7e", size = 274667, upload-time = "2026-01-14T23:17:19.301Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/297293bb0742fd06b8d8e2572db41a855cdf1cae0bf009b1cb74fe07e196/regex-2026.1.15-cp314-cp314t-win_amd64.whl", hash = "sha256:5ef19071f4ac9f0834793af85bd04a920b4407715624e40cb7a0631a11137cdf", size = 284386, upload-time = "2026-01-14T23:17:21.231Z" }, + { url = "https://files.pythonhosted.org/packages/95/e4/a3b9480c78cf8ee86626cb06f8d931d74d775897d44201ccb813097ae697/regex-2026.1.15-cp314-cp314t-win_arm64.whl", hash = "sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70", size = 274837, upload-time = "2026-01-14T23:17:23.146Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + [[package]] name = "rich" version = "14.2.0" @@ -1365,6 +1549,7 @@ dependencies = [ { name = "openai" }, { name = "pandas" }, { name = "pydantic" }, + { name = "pydantic-ai-slim", extra = ["openai", "openrouter", "retries"] }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -1388,6 +1573,7 @@ requires-dist = [ { name = "openai", specifier = ">=2.14.0" }, { name = "pandas", specifier = ">=2.3.3" }, { name = "pydantic", specifier = ">=2.12.5" }, + { name = "pydantic-ai-slim", extras = ["openai", "openrouter", "retries"], specifier = ">=1.52.0" }, { name = "pytest", specifier = ">=9.0.2" }, { name = "pytest-asyncio", specifier = ">=1.3.0" }, { name = "pytest-cov", specifier = ">=7.0.0" }, @@ -1463,6 +1649,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, ] +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +] + [[package]] name = "tornado" version = "6.5.4" @@ -1710,3 +1931,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]