-
Notifications
You must be signed in to change notification settings - Fork 5.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Sean Smith <[email protected]>
- Loading branch information
1 parent
3c01c98
commit 6239fdd
Showing
10 changed files
with
248 additions
and
0 deletions.
There are no files selected for viewing
19 changes: 19 additions & 0 deletions
19
llama-index-integrations/llms/llama-index-llms-contextual/README.md
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
# Contextual LLM Integration for LlamaIndex | ||
|
||
This package provides a Contextual LLM integration for LlamaIndex. | ||
|
||
## Installation | ||
|
||
```bash | ||
pip install llama-index-llms-contextual | ||
``` | ||
|
||
## Usage | ||
|
||
```python | ||
from llama_index.llms.contextual import Contextual | ||
|
||
llm = Contextual(model="contextual-clm", api_key="your_api_key") | ||
|
||
response = llm.complete("Explain the importance of Grounded Language Models.") | ||
``` |
1 change: 1 addition & 0 deletions
1
llama-index-integrations/llms/llama-index-llms-contextual/llama_index/llms/contextual/BUILD
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python_sources() |
3 changes: 3 additions & 0 deletions
3
...dex-integrations/llms/llama-index-llms-contextual/llama_index/llms/contextual/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
from llama_index.llms.contextual.base import Contextual | ||
|
||
__all__ = ["Contextual"] |
117 changes: 117 additions & 0 deletions
117
...a-index-integrations/llms/llama-index-llms-contextual/llama_index/llms/contextual/base.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
import os | ||
from typing import Any, Optional | ||
|
||
from llama_index.llms.openai_like import OpenAILike | ||
from pydantic import Field | ||
from llama_index.core.llms.callbacks import ( | ||
llm_chat_callback, | ||
llm_completion_callback, | ||
) | ||
from llama_index.core.base.llms.types import ( | ||
CompletionResponse, | ||
CompletionResponseGen, | ||
ChatResponse, | ||
ChatResponseGen, | ||
ChatResponseAsyncGen, | ||
CompletionResponseAsyncGen, | ||
LLMMetadata, | ||
MessageRole, | ||
ChatMessage, | ||
) | ||
|
||
|
||
from contextual import ContextualAI | ||
|
||
class Contextual(OpenAILike): | ||
""" | ||
Generate a response using Contextual's Grounded Language Model (GLM), an LLM engineered specifically to prioritize faithfulness to in-context retrievals over parametric knowledge to reduce hallucinations in Retrieval-Augmented Generation. | ||
The total request cannot exceed 32,000 tokens. Email [email protected] with any feedback or questions. | ||
Examples: | ||
`pip install llama-index-llms-contextual` | ||
```python | ||
from llama_index.llms.contextual import Contextual | ||
# Set up the Contextual class with the required model and API key | ||
llm = Contextual(model="contextual-clm", api_key="your_api_key") | ||
# Call the complete method with a query | ||
response = llm.complete("Explain the importance of low latency LLMs") | ||
print(response) | ||
``` | ||
""" | ||
|
||
model: str = Field(description="The model to use. Currently only supports `v1`.") | ||
api_key: str = Field(description="The API key to use.", default=os.environ.get("API_KEY", None)) | ||
base_url: str = Field(description="The base URL to use.", default="https://api.contextual.com") | ||
system_prompt: str = Field(description="Instructions that the model follows when generating responses. Note that we do not guarantee that the model follows these instructions exactly.") | ||
avoid_commentary: bool = Field(description="Flag to indicate whether the model should avoid providing additional commentary in responses. Commentary is conversational in nature and does not contain verifiable claims; therefore, commentary is not strictly grounded in available context. However, commentary may provide useful context which improves the helpfulness of responses.", default=False) | ||
client: Any = Field(default=None, exclude=True, description="Contextual AI Client") | ||
|
||
def __init__( | ||
self, | ||
model: str, | ||
api_key: Optional[str] = None, | ||
base_url: str = "https://api.contextual.com", | ||
system_prompt: str = "", | ||
avoid_commentary: bool = False, | ||
**openai_llm_kwargs: Any, | ||
) -> None: | ||
api_key = api_key or os.environ.get("API_KEY", None) | ||
|
||
try: | ||
self.client = ContextualAI(api_key=api_key, base_url=base_url) | ||
except Exception as e: | ||
raise ValueError(f"Failed to initialize Contextual client: {e}") | ||
|
||
|
||
super().__init__( | ||
model=model, | ||
api_key=api_key, | ||
api_base=base_url, | ||
is_chat_model=openai_llm_kwargs.pop("is_chat_model", True), | ||
**openai_llm_kwargs, | ||
) | ||
|
||
@classmethod | ||
def class_name(cls) -> str: | ||
"""Get class name.""" | ||
return "contextual-clm" | ||
|
||
# Synchronous Methods | ||
@llm_completion_callback() | ||
def complete(self, prompt: str, **kwargs) -> CompletionResponse: | ||
""" | ||
Generate completion for the given prompt. | ||
Args: | ||
prompt (str): The input prompt to generate completion for. | ||
**kwargs: Additional keyword arguments for the API request. | ||
Returns: | ||
str: The generated text completion. | ||
""" | ||
return self._generate( | ||
knowledge=None, | ||
messages=[ChatMessage(role=MessageRole.USER, content=prompt)], | ||
model=self.model, | ||
system_prompt=self.system_prompt, | ||
**kwargs, | ||
) | ||
|
||
def _generate(self, knowledge, messages, model, system_prompt, **kwargs) -> CompletionResponse: | ||
""" | ||
Generate completion for the given prompt. | ||
""" | ||
raw_message = self.client.generate.create( | ||
messages=messages, | ||
knowledge=knowledge, | ||
model=self.model_name, | ||
system_prompt=system_prompt, | ||
extra_body={ | ||
"avoid_commentary": self.avoid_commentary, | ||
}, | ||
) |
52 changes: 52 additions & 0 deletions
52
...ations/llms/llama-index-llms-contextual/llama_index/llms/contextual/test-contextual.ipynb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"# Contextual LLM" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": { | ||
"vscode": { | ||
"languageId": "plaintext" | ||
} | ||
}, | ||
"outputs": [], | ||
"source": [ | ||
"!pip install llama-index-llms-deepseek" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": { | ||
"vscode": { | ||
"languageId": "plaintext" | ||
} | ||
}, | ||
"outputs": [], | ||
"source": [ | ||
"from llama_index.llms.contextual import Contextual\n", | ||
"\n", | ||
"# Set up the Contextual class with the required model and API key\n", | ||
"llm = Contextual(model=\"v1\", api_key=\"your_api_key\")\n", | ||
"\n", | ||
"# Call the complete method with a query\n", | ||
"response = llm.complete(\"Explain the importance of Grounded Language Models.\")\n", | ||
"\n", | ||
"print(response)" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"language_info": { | ||
"name": "python" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 2 | ||
} |
Empty file.
46 changes: 46 additions & 0 deletions
46
llama-index-integrations/llms/llama-index-llms-contextual/pyproject.toml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
[build-system] | ||
build-backend = "poetry.core.masonry.api" | ||
requires = ["poetry-core"] | ||
|
||
[tool.codespell] | ||
check-filenames = true | ||
check-hidden = true | ||
skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" | ||
|
||
[tool.llamahub] | ||
contains_example = false | ||
import_path = "llama_index.llms.contextual" | ||
|
||
[tool.llamahub.class_authors] | ||
Contextual = "sean-smith" | ||
|
||
[tool.mypy] | ||
disallow_untyped_defs = true | ||
exclude = ["_static", "build", "examples", "notebooks", "venv"] | ||
ignore_missing_imports = true | ||
python_version = "3.8" | ||
|
||
[tool.poetry] | ||
authors = ["Sean Smith <[email protected]>"] | ||
description = "llama-index contextual integration" | ||
exclude = ["**/BUILD"] | ||
license = "MIT" | ||
name = "llama-index-llms-contextual" | ||
readme = "README.md" | ||
version = "0.0.1" | ||
|
||
[tool.poetry.dependencies] | ||
python = ">=3.9,<4.0" | ||
llama-index-llms-openai-like = "^0.3.3" | ||
contextual-client = "^0.2.0" | ||
|
||
[tool.poetry.group.dev.dependencies.black] | ||
extras = ["jupyter"] | ||
version = "<=23.9.1,>=23.7.0" | ||
|
||
[tool.poetry.group.dev.dependencies.codespell] | ||
extras = ["toml"] | ||
version = ">=v2.2.6" | ||
|
||
[[tool.poetry.packages]] | ||
include = "llama_index/" |
1 change: 1 addition & 0 deletions
1
llama-index-integrations/llms/llama-index-llms-contextual/tests/BUILD
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python_sources() |
Empty file.
9 changes: 9 additions & 0 deletions
9
llama-index-integrations/llms/llama-index-llms-contextual/tests/test.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
from llama_index.llms.contextual import Contextual | ||
|
||
# Set up the Contextual class with the required model and API key | ||
llm = Contextual(model="contextual-clm", api_key="key-47EdcA3Tim9bif-W96XAsA0nFPf9ZPvkwtE4cL7vP5s7NtAHw") | ||
|
||
# Call the complete method with a query | ||
response = llm.complete("Explain the importance of low latency LLMs") | ||
|
||
print(response) |