-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLLM.py
53 lines (38 loc) · 1.5 KB
/
LLM.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import os
from typing import List, Tuple
from dotenv import load_dotenv
from openai import OpenAI
load_dotenv()
class BaseModel:
def __init__(self) -> None:
pass
def chat(self, prompt: str, history: List[dict], meta_instruction: str = ''):
pass
class OpenAIChat(BaseModel):
def __init__(self) -> None:
self.client = OpenAI(
api_key=os.getenv('OPENAI_API_KEY'),
base_url=os.getenv('OPENAI_API_BASE'))
def chat(self, prompt: str, history: List[dict], meta_instruction: str = '') -> Tuple[str, List[dict]]:
messages = []
if meta_instruction:
messages.append({"role": "system", "content": meta_instruction})
for msg in history:
if 'user' in msg:
messages.append({"role": "user", "content": msg['user']})
if 'assistant' in msg:
messages.append({"role": "assistant", "content": msg['assistant']})
messages.append({"role": "user", "content": prompt})
response = self.client.chat.completions.create(
model=os.getenv('LLM_MODEL'),
messages=messages,
temperature=0.1
)
assistant_message = response.choices[0].message.content
history.append({"user": prompt, "assistant": assistant_message})
return assistant_message, history
# 使用示例
if __name__ == '__main__':
model = OpenAIChat() # 确保设置了环境变量
response, history = model.chat('Hello', [])
print(response)