diff --git a/terminalgpt/chat_utils.py b/terminalgpt/chat_utils.py index 0d23c64..0c4d59e 100644 --- a/terminalgpt/chat_utils.py +++ b/terminalgpt/chat_utils.py @@ -3,7 +3,8 @@ import os import sys import time - +import litellm +from litellm import completion import openai import tiktoken from colorama import Back, Fore, Style @@ -106,7 +107,7 @@ def get_user_answer(messages, model): color="blue", side="right", ): - answer = openai.ChatCompletion.create(model=model, messages=messages) + answer = completion(model=model, messages=messages) return answer except openai.InvalidRequestError as error: if "Please reduce the length of the messages" in str(error): diff --git a/terminalgpt/conversations.py b/terminalgpt/conversations.py index db4d99f..c358696 100644 --- a/terminalgpt/conversations.py +++ b/terminalgpt/conversations.py @@ -5,6 +5,8 @@ import time import openai +import litellm +from litellm import completion from colorama import Back, Style from terminalgpt import config, print_utils @@ -79,7 +81,7 @@ def get_system_answer(messages): while True: try: - answer = openai.ChatCompletion.create( + answer = completion( model=config.DEFAULT_MODEL, messages=messages ) return answer