|
| 1 | +import json |
1 | 2 | import logging
|
2 | 3 |
|
3 | 4 | from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, status
|
| 5 | +from llama_index.core.agent.workflow import AgentOutput |
4 | 6 | from llama_index.core.llms import MessageRole
|
5 | 7 |
|
6 | 8 | from app.api.callbacks.llamacloud import LlamaCloudFileDownload
|
|
10 | 12 | ChatData,
|
11 | 13 | Message,
|
12 | 14 | Result,
|
13 |
| - SourceNodes, |
14 | 15 | )
|
15 | 16 | from app.engine.engine import get_engine
|
16 | 17 | from app.engine.query_filter import generate_filters
|
@@ -58,27 +59,33 @@ async def chat(
|
58 | 59 | ) from e
|
59 | 60 |
|
60 | 61 |
|
61 |
| -# TODO: Update non-streaming endpoint |
62 |
| -# Would be better if we use same chat.py endpoint for both agent and multiagent templates |
63 |
| -# # non-streaming endpoint - delete if not needed |
64 |
| -# @r.post("/request") |
65 |
| -# async def chat_request( |
66 |
| -# data: ChatData, |
67 |
| -# ) -> Result: |
68 |
| -# last_message_content = data.get_last_message_content() |
69 |
| -# messages = data.get_history_messages() |
| 62 | +# non-streaming endpoint - delete if not needed |
| 63 | +@r.post("/request") |
| 64 | +async def chat_request( |
| 65 | + data: ChatData, |
| 66 | +) -> Result: |
| 67 | + last_message_content = data.get_last_message_content() |
| 68 | + messages = data.get_history_messages() |
70 | 69 |
|
71 |
| -# doc_ids = data.get_chat_document_ids() |
72 |
| -# filters = generate_filters(doc_ids) |
73 |
| -# params = data.data or {} |
74 |
| -# logger.info( |
75 |
| -# f"Creating chat engine with filters: {str(filters)}", |
76 |
| -# ) |
| 70 | + doc_ids = data.get_chat_document_ids() |
| 71 | + filters = generate_filters(doc_ids) |
| 72 | + params = data.data or {} |
| 73 | + logger.info( |
| 74 | + f"Creating chat engine with filters: {str(filters)}", |
| 75 | + ) |
| 76 | + engine = get_engine(filters=filters, params=params) |
77 | 77 |
|
78 |
| -# chat_engine = get_chat_engine(filters=filters, params=params) |
| 78 | + response = await engine.run( |
| 79 | + user_msg=last_message_content, |
| 80 | + chat_history=messages, |
| 81 | + stream=False, |
| 82 | + ) |
| 83 | + output = response |
| 84 | + if isinstance(output, AgentOutput): |
| 85 | + content = output.response.content |
| 86 | + else: |
| 87 | + content = json.dumps(output) |
79 | 88 |
|
80 |
| -# response = await chat_engine.achat(last_message_content, messages) |
81 |
| -# return Result( |
82 |
| -# result=Message(role=MessageRole.ASSISTANT, content=response.response), |
83 |
| -# nodes=SourceNodes.from_source_nodes(response.source_nodes), |
84 |
| -# ) |
| 89 | + return Result( |
| 90 | + result=Message(role=MessageRole.ASSISTANT, content=content), |
| 91 | + ) |
0 commit comments