Skip to content

Commit

Permalink
Add agents personality to extract_references flow as well, for each r…
Browse files Browse the repository at this point in the history
…espective chat model
  • Loading branch information
sabaimran committed Sep 16, 2024
1 parent 4ffcc0e commit 5b7bc46
Show file tree
Hide file tree
Showing 8 changed files with 28 additions and 7 deletions.
2 changes: 2 additions & 0 deletions src/khoj/processor/conversation/anthropic/anthropic_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def extract_questions_anthropic(
temperature=0.7,
location_data: LocationData = None,
user: KhojUser = None,
personality_context: Optional[str] = None,
):
"""
Infer search queries to retrieve relevant notes to answer user query
Expand Down Expand Up @@ -59,6 +60,7 @@ def extract_questions_anthropic(
yesterday_date=(today - timedelta(days=1)).strftime("%Y-%m-%d"),
location=location,
username=username,
personality_context=personality_context,
)

prompt = prompts.extract_questions_anthropic_user_message.format(
Expand Down
2 changes: 2 additions & 0 deletions src/khoj/processor/conversation/google/gemini_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def extract_questions_gemini(
max_tokens=None,
location_data: LocationData = None,
user: KhojUser = None,
personality_context: Optional[str] = None,
):
"""
Infer search queries to retrieve relevant notes to answer user query
Expand Down Expand Up @@ -60,6 +61,7 @@ def extract_questions_gemini(
yesterday_date=(today - timedelta(days=1)).strftime("%Y-%m-%d"),
location=location,
username=username,
personality_context=personality_context,
)

prompt = prompts.extract_questions_anthropic_user_message.format(
Expand Down
4 changes: 3 additions & 1 deletion src/khoj/processor/conversation/offline/chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
from datetime import datetime, timedelta
from threading import Thread
from typing import Any, Iterator, List, Union
from typing import Any, Iterator, List, Optional, Union

from langchain.schema import ChatMessage
from llama_cpp import Llama
Expand Down Expand Up @@ -33,6 +33,7 @@ def extract_questions_offline(
user: KhojUser = None,
max_prompt_size: int = None,
temperature: float = 0.7,
personality_context: Optional[str] = None,
) -> List[str]:
"""
Infer search queries to retrieve relevant notes to answer user query
Expand Down Expand Up @@ -73,6 +74,7 @@ def extract_questions_offline(
this_year=today.year,
location=location,
username=username,
personality_context=personality_context,
)

messages = generate_chatml_messages_with_context(
Expand Down
2 changes: 2 additions & 0 deletions src/khoj/processor/conversation/openai/gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def extract_questions(
user: KhojUser = None,
uploaded_image_url: Optional[str] = None,
vision_enabled: bool = False,
personality_context: Optional[str] = None,
):
"""
Infer search queries to retrieve relevant notes to answer user query
Expand Down Expand Up @@ -68,6 +69,7 @@ def extract_questions(
yesterday_date=(today - timedelta(days=1)).strftime("%Y-%m-%d"),
location=location,
username=username,
personality_context=personality_context,
)

prompt = construct_structured_message(
Expand Down
5 changes: 3 additions & 2 deletions src/khoj/processor/conversation/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,7 @@
- Add date filters to your search queries from questions and answers when required to retrieve the relevant information.
- When asked a meta, vague or random questions, search for a variety of broad topics to answer the user's question.
- Share relevant search queries as a JSON list of strings. Do not say anything else.
{personality_context}
Current Date: {day_of_week}, {current_date}
User's Location: {location}
Expand Down Expand Up @@ -261,7 +262,7 @@
- Break messages into multiple search queries when required to retrieve the relevant information.
- Add date filters to your search queries from questions and answers when required to retrieve the relevant information.
- When asked a meta, vague or random questions, search for a variety of broad topics to answer the user's question.
{personality_context}
What searches will you perform to answer the users question? Respond with search queries as list of strings in a JSON object.
Current Date: {day_of_week}, {current_date}
User's Location: {location}
Expand Down Expand Up @@ -318,7 +319,7 @@
- Break messages into multiple search queries when required to retrieve the relevant information.
- Add date filters to your search queries from questions and answers when required to retrieve the relevant information.
- When asked a meta, vague or random questions, search for a variety of broad topics to answer the user's question.
{personality_context}
What searches will you perform to answer the users question? Respond with a JSON object with the key "queries" mapping to a list of searches you would perform on the user's knowledge base. Just return the queries and nothing else.
Current Date: {day_of_week}, {current_date}
Expand Down
15 changes: 14 additions & 1 deletion src/khoj/routers/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,13 @@
get_user_photo,
get_user_search_model_or_default,
)
from khoj.database.models import ChatModelOptions, KhojUser, SpeechToTextModelOptions
from khoj.database.models import (
Agent,
ChatModelOptions,
KhojUser,
SpeechToTextModelOptions,
)
from khoj.processor.conversation import prompts
from khoj.processor.conversation.anthropic.anthropic_chat import (
extract_questions_anthropic,
)
Expand Down Expand Up @@ -333,6 +339,7 @@ async def extract_references_and_questions(
location_data: LocationData = None,
send_status_func: Optional[Callable] = None,
uploaded_image_url: Optional[str] = None,
agent: Agent = None,
):
user = request.user.object if request.user.is_authenticated else None

Expand Down Expand Up @@ -368,6 +375,8 @@ async def extract_references_and_questions(
using_offline_chat = False
logger.debug(f"Filters in query: {filters_in_query}")

personality_context = prompts.personality_context(personality=agent.personality) if agent else ""

# Infer search queries from user message
with timer("Extracting search queries took", logger):
# If we've reached here, either the user has enabled offline chat or the openai model is enabled.
Expand All @@ -392,6 +401,7 @@ async def extract_references_and_questions(
location_data=location_data,
user=user,
max_prompt_size=conversation_config.max_prompt_size,
personality_context=personality_context,
)
elif conversation_config.model_type == ChatModelOptions.ModelType.OPENAI:
openai_chat_config = conversation_config.openai_config
Expand All @@ -408,6 +418,7 @@ async def extract_references_and_questions(
user=user,
uploaded_image_url=uploaded_image_url,
vision_enabled=vision_enabled,
personality_context=personality_context,
)
elif conversation_config.model_type == ChatModelOptions.ModelType.ANTHROPIC:
api_key = conversation_config.openai_config.api_key
Expand All @@ -419,6 +430,7 @@ async def extract_references_and_questions(
conversation_log=meta_log,
location_data=location_data,
user=user,
personality_context=personality_context,
)
elif conversation_config.model_type == ChatModelOptions.ModelType.GOOGLE:
api_key = conversation_config.openai_config.api_key
Expand All @@ -431,6 +443,7 @@ async def extract_references_and_questions(
location_data=location_data,
max_tokens=conversation_config.max_prompt_size,
user=user,
personality_context=personality_context,
)

# Collate search results as context for GPT
Expand Down
1 change: 1 addition & 0 deletions src/khoj/routers/api_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -849,6 +849,7 @@ def collect_telemetry():
location,
partial(send_event, ChatEvent.STATUS),
uploaded_image_url=uploaded_image_url,
agent=agent,
):
if isinstance(result, dict) and ChatEvent.STATUS in result:
yield result[ChatEvent.STATUS]
Expand Down
4 changes: 1 addition & 3 deletions src/khoj/routers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -703,15 +703,13 @@ async def send_message_to_model_wrapper(
model_type=conversation_config.model_type,
)

openai_response = send_message_to_model(
return send_message_to_model(
messages=truncated_messages,
api_key=api_key,
model=chat_model,
response_type=response_type,
api_base_url=api_base_url,
)

return openai_response
elif model_type == ChatModelOptions.ModelType.ANTHROPIC:
api_key = conversation_config.openai_config.api_key
truncated_messages = generate_chatml_messages_with_context(
Expand Down

0 comments on commit 5b7bc46

Please sign in to comment.