Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 29 additions & 4 deletions agents/lg101-weather-agent.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,36 @@
from langchain_core.tools import tool
from langchain.agents import create_agent
import requests
import json

@tool
def get_weather(city: str) -> str:
"""Get the current weather for a given city."""
# In a real app, this would call a weather API
return f"It's 72Β°F and sunny in {city}!"
def get_weather(latitude: float, longitude: float) -> str:
"""Get current temperature in Fahrenheit and weather code for given coordinates.

Args:
latitude: Latitude coordinate
longitude: Longitude coordinate

Returns:
JSON string with temperature_fahrenheit and weather_code (do not include the code in your response, translate it to plain English)
"""
url = "https://api.open-meteo.com/v1/forecast"
params = {
"latitude": latitude,
"longitude": longitude,
"current": "temperature_2m,weather_code",
"temperature_unit": "fahrenheit"
}

weather = requests.get(url, params=params).json()["current"]
temperature = weather["temperature_2m"]
weather_code = weather["weather_code"]
result = {
"temperature_fahrenheit": temperature,
"weather_code": weather_code
}

return json.dumps(result)

@tool
def get_user_preferences(user_id: str) -> str:
Expand Down
16 changes: 12 additions & 4 deletions agents/memory_enabled_music_store_supervisor_with_interrupt.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,15 +124,22 @@ def format_user_memory(user_data):
"""Formats music preferences from users, if available."""
profile = user_data['memory']
result = ""
if hasattr(profile, 'music_preferences') and profile.music_preferences:
result += f"Music Preferences: {', '.join(profile.music_preferences)}"

# Handle both Pydantic model (attributes) and dict (keys) representations
if isinstance(profile, dict):
music_prefs = profile.get('music_preferences', [])
else:
music_prefs = getattr(profile, 'music_preferences', [])

if music_prefs:
result += f"Music Preferences: {', '.join(music_prefs)}"
return result.strip()

# Node
def load_memory(state: State, store: BaseStore):
"""Loads music preferences from users, if available."""

user_id = state["customer_id"]
user_id = str(state["customer_id"]) # Convert to string to match create_memory
namespace = ("memory_profile", user_id)
existing_memory = store.get(namespace, "user_memory")
formatted_memory = ""
Expand Down Expand Up @@ -194,7 +201,8 @@ def create_memory(state: State, store: BaseStore):
formatted_system_message = SystemMessage(content=create_memory_prompt.format(conversation=state["messages"], memory_profile=formatted_memory))
updated_memory = llm.with_structured_output(UserProfile).invoke([formatted_system_message])
key = "user_memory"
store.put(namespace, key, {"memory": updated_memory})
# Convert Pydantic model to dict to avoid pickle issues during hot-reload
store.put(namespace, key, {"memory": updated_memory.model_dump()})


multi_agent_final = StateGraph(State, input_schema = InputState)
Expand Down
2 changes: 1 addition & 1 deletion agents/music_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@

class InputState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
loaded_memory: NotRequired[str]

class State(InputState):
customer_id: NotRequired[str]
loaded_memory: NotRequired[str]
remaining_steps: NotRequired[RemainingSteps]


Expand Down
12 changes: 8 additions & 4 deletions agents/music_store_supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from typing_extensions import TypedDict
from typing import Annotated
from langgraph.graph.message import AnyMessage, add_messages
from langchain.messages import HumanMessage

class InputState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
Expand All @@ -16,14 +17,17 @@ class State(InputState):
loaded_memory: str
remaining_steps: int



supervisor_prompt = """You are an expert customer support assistant for a digital music store. You can handle music catalog or invoice related question regarding past purchases, song or album availabilities.
You are dedicated to providing exceptional service and ensuring customer queries are answered thoroughly, and have a team of subagents that you can use to help answer queries from customers.
Your primary role is to serve as a supervisor/planner for this multi-agent team that helps answer queries from customers. Always respond to the customer through summarizing the conversation, including individual responses from subagents.
If a question is unrelated to music or invoice, politely remind the customer regarding your scope of work. Do not answer unrelated answers.

Your team is composed of two subagents that you can use to help answer the customer's request:
1. music_catalog_information_subagent: this subagent has access to user's saved music preferences. It can also retrieve information about the digital music store's music
catalog (albums, tracks, songs, etc.) from the database.
catalog (albums, tracks, songs, etc.) from the database. This subagent has access to the user's memory profile, and music preferences! It will automatically be able to infer user's music preferences from the memory profile.
no need to pass customer identifier to this subagent.
2. invoice_information_subagent: this subagent is able to retrieve information about a customer's past purchases or invoices
from the database.

Expand All @@ -40,7 +44,7 @@ def call_invoice_information_subagent(runtime: ToolRuntime, query: str):
print('made it here')
print(f"invoice subagent input: {query}")
result = invoice_agent.invoke({
"messages": [{"role": "user", "content": query}],
"messages": [HumanMessage(content=query)],
"customer_id": runtime.state.get("customer_id", {})
})
subagent_response = result["messages"][-1].content
Expand All @@ -55,8 +59,8 @@ def call_invoice_information_subagent(runtime: ToolRuntime, query: str):
)
def call_music_catalog_subagent(runtime: ToolRuntime, query: str):
result = music_agent.invoke({
"messages": [{"role": "user", "content": query}],
"customer_id": runtime.state.get("customer_id", {})
"messages": [HumanMessage(content=query)],
"loaded_memory": runtime.state.get("loaded_memory", {})
})
subagent_response = result["messages"][-1].content
return subagent_response
Expand Down
Loading