Skip to content

Commit 5ec1947

Browse files
committed
support request api
1 parent cbebd03 commit 5ec1947

File tree

2 files changed

+29
-23
lines changed

2 files changed

+29
-23
lines changed

templates/types/streaming/fastapi/app/api/routers/chat.py

+29-22
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
import json
12
import logging
23

34
from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, status
5+
from llama_index.core.agent.workflow import AgentOutput
46
from llama_index.core.llms import MessageRole
57

68
from app.api.callbacks.llamacloud import LlamaCloudFileDownload
@@ -10,7 +12,6 @@
1012
ChatData,
1113
Message,
1214
Result,
13-
SourceNodes,
1415
)
1516
from app.engine.engine import get_engine
1617
from app.engine.query_filter import generate_filters
@@ -58,27 +59,33 @@ async def chat(
5859
) from e
5960

6061

61-
# TODO: Update non-streaming endpoint
62-
# Would be better if we use same chat.py endpoint for both agent and multiagent templates
63-
# # non-streaming endpoint - delete if not needed
64-
# @r.post("/request")
65-
# async def chat_request(
66-
# data: ChatData,
67-
# ) -> Result:
68-
# last_message_content = data.get_last_message_content()
69-
# messages = data.get_history_messages()
62+
# non-streaming endpoint - delete if not needed
63+
@r.post("/request")
64+
async def chat_request(
65+
data: ChatData,
66+
) -> Result:
67+
last_message_content = data.get_last_message_content()
68+
messages = data.get_history_messages()
7069

71-
# doc_ids = data.get_chat_document_ids()
72-
# filters = generate_filters(doc_ids)
73-
# params = data.data or {}
74-
# logger.info(
75-
# f"Creating chat engine with filters: {str(filters)}",
76-
# )
70+
doc_ids = data.get_chat_document_ids()
71+
filters = generate_filters(doc_ids)
72+
params = data.data or {}
73+
logger.info(
74+
f"Creating chat engine with filters: {str(filters)}",
75+
)
76+
engine = get_engine(filters=filters, params=params)
7777

78-
# chat_engine = get_chat_engine(filters=filters, params=params)
78+
response = await engine.run(
79+
user_msg=last_message_content,
80+
chat_history=messages,
81+
stream=False,
82+
)
83+
output = response
84+
if isinstance(output, AgentOutput):
85+
content = output.response.content
86+
else:
87+
content = json.dumps(output)
7988

80-
# response = await chat_engine.achat(last_message_content, messages)
81-
# return Result(
82-
# result=Message(role=MessageRole.ASSISTANT, content=response.response),
83-
# nodes=SourceNodes.from_source_nodes(response.source_nodes),
84-
# )
89+
return Result(
90+
result=Message(role=MessageRole.ASSISTANT, content=content),
91+
)

templates/types/streaming/fastapi/app/api/routers/models.py

-1
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,6 @@ def from_source_nodes(cls, source_nodes: List[NodeWithScore]):
317317

318318
class Result(BaseModel):
319319
result: Message
320-
nodes: List[SourceNodes]
321320

322321

323322
class ChatConfig(BaseModel):

0 commit comments

Comments
 (0)