-
Notifications
You must be signed in to change notification settings - Fork 5.7k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: Sean Smith <[email protected]>
- Loading branch information
1 parent
ebc728c
commit 0c332c3
Showing
10 changed files
with
146 additions
and
0 deletions.
There are no files selected for viewing
19 changes: 19 additions & 0 deletions
19
llama-index-integrations/llms/llama-index-llms-contextual/README.md
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
# Contextual LLM Integration for LlamaIndex | ||
|
||
This package provides a Contextual LLM integration for LlamaIndex. | ||
|
||
## Installation | ||
|
||
```bash | ||
pip install llama-index-llms-contextual | ||
``` | ||
|
||
## Usage | ||
|
||
```python | ||
from llama_index.llms.contextual import Contextual | ||
|
||
llm = Contextual(model="contextual-clm", api_key="your_api_key") | ||
|
||
response = llm.complete("Explain the importance of Grounded Language Models.") | ||
``` |
1 change: 1 addition & 0 deletions
1
llama-index-integrations/llms/llama-index-llms-contextual/llama_index/BUILD
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python_sources() |
3 changes: 3 additions & 0 deletions
3
llama-index-integrations/llms/llama-index-llms-contextual/llama_index/__utils__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
from llama_index.llms.contextual.base import Contextual | ||
|
||
__all__ = ["Contextual"] |
61 changes: 61 additions & 0 deletions
61
llama-index-integrations/llms/llama-index-llms-contextual/llama_index/base.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
import os | ||
from typing import Any, Optional | ||
|
||
from llama_index.core.llms.base import OpenAILike | ||
from llama_index.llms.contextual.utils import get_context_window, FUNCTION_CALLING_MODELS | ||
from pydantic import Field | ||
|
||
class Contextual(OpenAILike): | ||
""" | ||
Generate a response using Contextual's Grounded Language Model (GLM), an LLM engineered specifically to prioritize faithfulness to in-context retrievals over parametric knowledge to reduce hallucinations in Retrieval-Augmented Generation. | ||
The total request cannot exceed 32,000 tokens. Email [email protected] with any feedback or questions. | ||
Examples: | ||
`pip install llama-index-llms-contextual` | ||
```python | ||
from llama_index.llms.contextual import Contextual | ||
# Set up the Contextual class with the required model and API key | ||
llm = Contextual(model="contextual-clm", api_key="your_api_key") | ||
# Call the complete method with a query | ||
response = llm.complete("Explain the importance of low latency LLMs") | ||
print(response) | ||
``` | ||
""" | ||
|
||
model: str = Field(description="The model to use. Currently only supports `v1`.") | ||
base_url: str = Field(description="The base URL to use.", default="https://api.contextual.com") | ||
system_prompt: str = Field(description="Instructions that the model follows when generating responses. Note that we do not guarantee that the model follows these instructions exactly.") | ||
avoid_commentary: bool = Field(description="Flag to indicate whether the model should avoid providing additional commentary in responses. Commentary is conversational in nature and does not contain verifiable claims; therefore, commentary is not strictly grounded in available context. However, commentary may provide useful context which improves the helpfulness of responses.", default=False) | ||
|
||
|
||
def __init__( | ||
self, | ||
model: str, | ||
api_key: Optional[str] = None, | ||
base_url: str = "https://api.contextual.com", | ||
**openai_llm_kwargs: Any, | ||
) -> None: | ||
api_key = api_key or os.environ.get("API_KEY", None) | ||
context_window = openai_llm_kwargs.pop( | ||
"context_window", get_context_window(model) | ||
) | ||
super().__init__( | ||
model=model, | ||
api_key=api_key, | ||
api_base=base_url, | ||
is_chat_model=openai_llm_kwargs.pop("is_chat_model", True), | ||
is_function_calling_model=openai_llm_kwargs.pop( | ||
"is_function_calling_model", model in FUNCTION_CALLING_MODELS | ||
), | ||
**openai_llm_kwargs, | ||
) | ||
|
||
@classmethod | ||
def class_name(cls) -> str: | ||
"""Get class name.""" | ||
return "contextual-clm" |
9 changes: 9 additions & 0 deletions
9
llama-index-integrations/llms/llama-index-llms-contextual/llama_index/test.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
from llama_index.llms.contextual import Contextual | ||
|
||
# Set up the Contextual class with the required model and API key | ||
llm = Contextual(model="v1", api_key="your_api_key") | ||
|
||
# Call the complete method with a query | ||
response = llm.complete("Explain the importance of Grounded Language Models.") | ||
|
||
print(response) |
Empty file.
45 changes: 45 additions & 0 deletions
45
llama-index-integrations/llms/llama-index-llms-contextual/pyproject.toml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
[build-system] | ||
build-backend = "poetry.core.masonry.api" | ||
requires = ["poetry-core"] | ||
|
||
[tool.codespell] | ||
check-filenames = true | ||
check-hidden = true | ||
skip = "*.csv,*.html,*.json,*.jsonl,*.pdf,*.txt,*.ipynb" | ||
|
||
[tool.llamahub] | ||
contains_example = false | ||
import_path = "llama_index.llms.contextual" | ||
|
||
[tool.llamahub.class_authors] | ||
Contextual = "sean-smith" | ||
|
||
[tool.mypy] | ||
disallow_untyped_defs = true | ||
exclude = ["_static", "build", "examples", "notebooks", "venv"] | ||
ignore_missing_imports = true | ||
python_version = "3.8" | ||
|
||
[tool.poetry] | ||
authors = ["Sean Smith <[email protected]>"] | ||
description = "llama-index contextual integration" | ||
exclude = ["**/BUILD"] | ||
license = "MIT" | ||
name = "llama-index-llms-contextual" | ||
readme = "README.md" | ||
version = "0.0.1" | ||
|
||
[tool.poetry.dependencies] | ||
python = ">=3.9,<4.0" | ||
llama-index-llms-openai-like = "^0.3.3" | ||
|
||
[tool.poetry.group.dev.dependencies.black] | ||
extras = ["jupyter"] | ||
version = "<=23.9.1,>=23.7.0" | ||
|
||
[tool.poetry.group.dev.dependencies.codespell] | ||
extras = ["toml"] | ||
version = ">=v2.2.6" | ||
|
||
[[tool.poetry.packages]] | ||
include = "llama_index/" |
1 change: 1 addition & 0 deletions
1
llama-index-integrations/llms/llama-index-llms-contextual/tests/BUILD
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
python_sources() |
Empty file.
7 changes: 7 additions & 0 deletions
7
llama-index-integrations/llms/llama-index-llms-contextual/tests/test.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
from llama_index.core.base.llms.base import BaseLLM | ||
from llama_index.llms.contextual import Contextual | ||
|
||
|
||
def test_llm_class(): | ||
names_of_base_classes = [b.__name__ for b in Contextual.__mro__] | ||
assert BaseLLM.__name__ in names_of_base_classes |