Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "infermesh"
version = "0.3.1"
version = "0.3.2"
description = "Run large LLM batches from notebooks and scripts without rewriting concurrency or rate-limit glue."
readme = "README.md"
license = { text = "Apache-2.0" }
Expand Down
13 changes: 11 additions & 2 deletions src/infermesh/_client_runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,16 @@ def _create_litellm_module(self) -> Any:
import litellm
import litellm._logging

litellm._logging.verbose_logger.setLevel(logging.WARNING)
self._set_default_litellm_logger_level(litellm._logging.verbose_logger)
if hasattr(litellm._logging, "verbose_router_logger"):
litellm._logging.verbose_router_logger.setLevel(logging.WARNING)
self._set_default_litellm_logger_level(
litellm._logging.verbose_router_logger
)
return litellm

@staticmethod
def _set_default_litellm_logger_level(litellm_logger: logging.Logger) -> None:
"""Keep LiteLLM quiet unless the caller already configured its logger."""

if litellm_logger.level == logging.NOTSET:
litellm_logger.setLevel(logging.WARNING)
46 changes: 46 additions & 0 deletions tests/test_client_runtime.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import base64
import logging
from pathlib import Path
from typing import Any
from unittest.mock import AsyncMock, patch
Expand All @@ -18,6 +19,51 @@
)


def _litellm_verbose_loggers() -> list[logging.Logger]:
"""Return LiteLLM loggers that ``LMClient`` quiets by default."""

import litellm._logging

loggers = [litellm._logging.verbose_logger]
router_logger = getattr(litellm._logging, "verbose_router_logger", None)
if router_logger is not None:
loggers.append(router_logger)
return loggers


def _restore_logger_levels(levels: dict[logging.Logger, int]) -> None:
for logger, level in levels.items():
logger.setLevel(level)


def test_litellm_debug_loggers_are_preserved() -> None:
loggers = _litellm_verbose_loggers()
original_levels = {logger: logger.level for logger in loggers}
for logger in loggers:
logger.setLevel(logging.DEBUG)

client = LMClient(model="openai/test", api_base="http://localhost")
try:
assert all(logger.level == logging.DEBUG for logger in loggers)
finally:
client.close()
_restore_logger_levels(original_levels)


def test_litellm_loggers_default_to_warning_when_unconfigured() -> None:
loggers = _litellm_verbose_loggers()
original_levels = {logger: logger.level for logger in loggers}
for logger in loggers:
logger.setLevel(logging.NOTSET)

client = LMClient(model="openai/test", api_base="http://localhost")
try:
assert all(logger.level == logging.WARNING for logger in loggers)
finally:
client.close()
_restore_logger_levels(original_levels)


@pytest.mark.asyncio
async def test_generate_responses_path(fake_client: LMClient) -> None:
result = await fake_client.agenerate(
Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading