From c53fc986a426db646206a7410ed42c020cdb2ba2 Mon Sep 17 00:00:00 2001
From: LeonOstrez <41999013+LeonOstrez@users.noreply.github.com>
Date: Tue, 1 Oct 2024 19:49:48 +0200
Subject: [PATCH 1/3] Update README.md
---
README.md | 13 -------------
1 file changed, 13 deletions(-)
diff --git a/README.md b/README.md
index 7b8497061..24df917f9 100644
--- a/README.md
+++ b/README.md
@@ -44,16 +44,6 @@
---
-
-
-

-
-
-
-GPT Pilot is the core technology for the [Pythagora VS Code extension](https://bit.ly/3IeZxp6) that aims to provide **the first real AI developer companion**. Not just an autocomplete or a helper for PR messages but rather a real AI developer that can write full features, debug them, talk to you about issues, ask for review, etc.
-
----
-
📫 If you would like to get updates on future releases or just get in touch, join our [Discord server](https://discord.gg/HaqXugmxr9) or you [can add your email here](http://eepurl.com/iD6Mpo). 📬
---
@@ -97,9 +87,6 @@ If you are interested in our learnings during this project, you can check [our l
- **Python 3.9+**
# 🚦How to start using gpt-pilot?
-👉 If you are using VS Code as your IDE, the easiest way to start is by downloading [GPT Pilot VS Code extension](https://bit.ly/3IeZxp6). 👈
-
-Otherwise, you can use the CLI tool.
### If you're new to GPT Pilot:
From 5985ed3987b2204dad6ce0f4d7181e6f5b6e73cf Mon Sep 17 00:00:00 2001
From: Ivan
Date: Wed, 23 Oct 2024 11:47:58 +0400
Subject: [PATCH 2/3] Added base support for requests to aiml
---
core/config/__init__.py | 1 +
core/llm/aiml_client.py | 24 ++++++++++++++++++++++++
core/llm/base.py | 3 +++
example-config.json | 9 ++++++++-
4 files changed, 36 insertions(+), 1 deletion(-)
create mode 100644 core/llm/aiml_client.py
diff --git a/core/config/__init__.py b/core/config/__init__.py
index 90ac3c6a0..f77864794 100644
--- a/core/config/__init__.py
+++ b/core/config/__init__.py
@@ -70,6 +70,7 @@ class LLMProvider(str, Enum):
GROQ = "groq"
LM_STUDIO = "lm-studio"
AZURE = "azure"
+ AIML = "aiml"
class UIAdapter(str, Enum):
diff --git a/core/llm/aiml_client.py b/core/llm/aiml_client.py
new file mode 100644
index 000000000..76688f515
--- /dev/null
+++ b/core/llm/aiml_client.py
@@ -0,0 +1,24 @@
+from httpx import Timeout
+from openai import AsyncOpenAI
+
+from core.config import LLMProvider
+from core.llm.openai_client import OpenAIClient
+from core.log import get_logger
+
+log = get_logger(__name__)
+
+
+class AIMLClient(OpenAIClient):
+ provider = LLMProvider.AIML
+ stream_options = None
+
+ def _init_client(self):
+ self.client = AsyncOpenAI(
+ api_key=self.config.api_key,
+ base_url=self.config.base_url,
+ timeout=Timeout(
+ max(self.config.connect_timeout, self.config.read_timeout),
+ connect=self.config.connect_timeout,
+ read=self.config.read_timeout,
+ ),
+ )
diff --git a/core/llm/base.py b/core/llm/base.py
index 1c1143ffa..4c28d474b 100644
--- a/core/llm/base.py
+++ b/core/llm/base.py
@@ -330,6 +330,7 @@ def for_provider(provider: LLMProvider) -> type["BaseLLMClient"]:
:param provider: Provider to return the client for.
:return: Client class for the specified provider.
"""
+ from .aiml_client import AIMLClient
from .anthropic_client import AnthropicClient
from .azure_client import AzureClient
from .groq_client import GroqClient
@@ -337,6 +338,8 @@ def for_provider(provider: LLMProvider) -> type["BaseLLMClient"]:
if provider == LLMProvider.OPENAI:
return OpenAIClient
+ elif provider == LLMProvider.AIML:
+ return AIMLClient
elif provider == LLMProvider.ANTHROPIC:
return AnthropicClient
elif provider == LLMProvider.GROQ:
diff --git a/example-config.json b/example-config.json
index b6c00729f..e9fa7bed7 100644
--- a/example-config.json
+++ b/example-config.json
@@ -10,6 +10,13 @@
"connect_timeout": 60.0,
"read_timeout": 20.0
},
+ "aiml": {
+ // Example config for AIML (to access 100+ model - see https://docs.aimlapi.com/api-overview/text-models-llm)
+ "base_url": "https://api.aimlapi.com/v1",
+ "api_key": "your-aiml-api-key",
+ "connect_timeout": 60.0,
+ "read_timeout": 20.0
+ },
// Example config for Anthropic (see https://docs.anthropic.com/docs/api-reference)
"anthropic": {
"base_url": "https://api.anthropic.com",
@@ -86,4 +93,4 @@
// Files larger than 50KB will be ignored, even if they otherwise wouldn't be.
"ignore_size_threshold": 50000
}
-}
+}
\ No newline at end of file
From b03a42d3e5c9bd2e1d4e4b759dfefff5ab94b72b Mon Sep 17 00:00:00 2001
From: Ivan
Date: Thu, 24 Oct 2024 11:49:36 +0400
Subject: [PATCH 3/3] Added a mention of aiml
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 24df917f9..dcf9afe45 100644
--- a/README.md
+++ b/README.md
@@ -99,7 +99,7 @@ After you have Python and (optionally) PostgreSQL installed, follow these steps:
5. `pip install -r requirements.txt` (install the dependencies)
6. `cp example-config.json config.json` (create `config.json` file)
7. Set your key and other settings in `config.json` file:
- - LLM Provider (`openai`, `anthropic` or `groq`) key and endpoints (leave `null` for default) (note that Azure and OpenRouter are suppored via the `openai` setting)
+ - LLM Provider (`openai`, `aiml`, `anthropic` or `groq`) key and endpoints (leave `null` for default) (note that Azure and OpenRouter are suppored via the `openai` setting)
- Your API key (if `null`, will be read from the environment variables)
- database settings: sqlite is used by default, PostgreSQL should also work
- optionally update `fs.ignore_paths` and add files or folders which shouldn't be tracked by GPT Pilot in workspace, useful to ignore folders created by compilers