diff --git a/.env.template b/.env.template index 4866554..532625c 100644 --- a/.env.template +++ b/.env.template @@ -1,4 +1,5 @@ # Rename this file to .env and add your API keys ANTHROPIC_API_KEY=your_anthropic_api_key_here OPENAI_API_KEY=your_openai_api_key_here -GOOGLE_API_KEY=your_google_api_key_here \ No newline at end of file +GOOGLE_API_KEY=your_google_api_key_here +LAMBDA_API_KEY=your_lambda_api_key_here \ No newline at end of file diff --git a/config.py b/config.py index c35e5ce..31b3501 100644 --- a/config.py +++ b/config.py @@ -6,7 +6,8 @@ API_KEYS = { "anthropic": os.getenv("ANTHROPIC_API_KEY"), "openai": os.getenv("OPENAI_API_KEY"), - "google": os.getenv("GOOGLE_API_KEY") + "google": os.getenv("GOOGLE_API_KEY"), + "lambda": os.getenv("LAMBDA_API_KEY") } # Print diagnostic info diff --git a/llm/lambda_client.py b/llm/lambda_client.py new file mode 100644 index 0000000..f9a3560 --- /dev/null +++ b/llm/lambda_client.py @@ -0,0 +1,63 @@ +try: + import openai + OPENAI_AVAILABLE = True +except ImportError: + print("OpenAI library not installed. Install with: pip install openai") + OPENAI_AVAILABLE = False + +from typing import List, Dict +from llm.base import LLMClient, retry_with_backoff +import asyncio +import logging + +class LambdaClient(LLMClient): + def __init__(self, api_key: str): + if not OPENAI_AVAILABLE: + raise ImportError("OpenAI library not installed. Run: pip install openai") + + if not api_key or api_key == "your_lambda_api_key_here": + raise ValueError("Invalid Lambda API key. Please check your .env file") + + try: + self.client = openai.OpenAI( + api_key=api_key, + base_url="https://api.lambda.ai/v1" + ) + logging.info("Lambda client initialized successfully") + except Exception as e: + logging.error(f"Failed to initialize Lambda client: {e}") + raise + + async def generate_response( + self, + system_prompt: str, + messages: List[Dict], + temperature: float = 0.7, + max_tokens: int = 2048 + ) -> str: + async def _generate(): + try: + messages_formatted = [{"role": "system", "content": system_prompt}] + messages + + # Convert to sync call wrapped in async + response = await asyncio.to_thread( + self.client.chat.completions.create, + model="deepseek-llama3.3-70b", + messages=messages_formatted, + temperature=temperature, + max_tokens=max_tokens + ) + + if response and response.choices and response.choices[0].message: + return response.choices[0].message.content + else: + raise ValueError("Empty response from Lambda API") + + except openai.APIError as e: + logging.error(f"Lambda API error: {e}") + raise + except Exception as e: + logging.error(f"Unexpected error calling Lambda API: {e}") + raise + + return await retry_with_backoff(_generate) \ No newline at end of file diff --git a/main.py b/main.py index d5fa51d..003fde4 100644 --- a/main.py +++ b/main.py @@ -6,6 +6,7 @@ from llm.anthropic_client import ClaudeClient from llm.openai_client import GPTClient from llm.google_client import GeminiClient +from llm.lambda_client import LambdaClient from moderator.turn_manager import TurnManager from ui.terminal import TerminalUI from storage.session_logger import SessionLogger @@ -29,7 +30,8 @@ def __init__(self): "claude_moderator": ClaudeClient(API_KEYS["anthropic"]), "claude": ClaudeClient(API_KEYS["anthropic"]), "gpt5": GPTClient(API_KEYS["openai"]), - "gemini": GeminiClient(API_KEYS["google"]) + "gemini": GeminiClient(API_KEYS["google"]), + "deepseek": LambdaClient(API_KEYS["lambda"]) } except Exception as e: self.ui.console.print(f"[red]Error initializing LLM clients: {e}[/red]") @@ -40,7 +42,8 @@ def __init__(self): "claude_moderator": "Claude 4.1 Opus", "claude": "Claude 4.1 Opus", "gpt5": "GPT-5 Thinking", - "gemini": "Gemini 2.5 Pro" + "gemini": "Gemini 2.5 Pro", + "deepseek": "DeepSeek LLaMA 3.3 70B" } self.current_session_file = None diff --git a/moderator/turn_manager.py b/moderator/turn_manager.py index 01e7bde..dbc4fc0 100644 --- a/moderator/turn_manager.py +++ b/moderator/turn_manager.py @@ -4,7 +4,7 @@ class TurnManager: def __init__(self): - self.panelist_ids = ["gpt5", "claude", "gemini"] + self.panelist_ids = ["gpt5", "claude", "gemini", "deepseek"] self.moderator_id = "claude_moderator" def determine_next_speaker(self, state: DiscussionState) -> str: diff --git a/tests/test_basic.py b/tests/test_basic.py index 6744dd0..c539b20 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -87,6 +87,7 @@ def test_turn_manager_initialization(): assert "gpt5" in manager.panelist_ids assert "claude" in manager.panelist_ids assert "gemini" in manager.panelist_ids + assert "deepseek" in manager.panelist_ids def test_turn_manager_agenda_speaker(): """Test that moderator speaks first in agenda round""" @@ -218,6 +219,7 @@ def test_llm_client_initialization_mocked(mock_gemini_model, mock_gemini_config, from llm.anthropic_client import ClaudeClient from llm.openai_client import GPTClient from llm.google_client import GeminiClient + from llm.lambda_client import LambdaClient # These should not raise errors with valid keys claude = ClaudeClient("sk-ant-api03-valid-key-for-testing") @@ -228,6 +230,9 @@ def test_llm_client_initialization_mocked(mock_gemini_model, mock_gemini_config, gemini = GeminiClient("AIza-valid-key-for-testing") assert gemini.model is not None + + lambda_client = LambdaClient("lambda-valid-key-for-testing") + assert lambda_client.client is not None def test_config_loading(): """Test configuration loading""" @@ -238,7 +243,8 @@ def test_config_loading(): with patch.dict(os.environ, { 'ANTHROPIC_API_KEY': 'test_anthropic', 'OPENAI_API_KEY': 'test_openai', - 'GOOGLE_API_KEY': 'test_google' + 'GOOGLE_API_KEY': 'test_google', + 'LAMBDA_API_KEY': 'test_lambda' }): # Reimport config to get mocked values import importlib @@ -247,4 +253,5 @@ def test_config_loading(): assert config.API_KEYS['anthropic'] == 'test_anthropic' assert config.API_KEYS['openai'] == 'test_openai' - assert config.API_KEYS['google'] == 'test_google' \ No newline at end of file + assert config.API_KEYS['google'] == 'test_google' + assert config.API_KEYS['lambda'] == 'test_lambda' \ No newline at end of file