1
- import json
2
1
import logging
3
2
4
3
from fastapi import APIRouter , BackgroundTasks , HTTPException , Request , status
5
- from llama_index .core .agent .workflow import AgentOutput
6
- from llama_index .core .llms import MessageRole
7
4
8
5
from app .api .callbacks .llamacloud import LlamaCloudFileDownload
9
6
from app .api .callbacks .next_question import SuggestNextQuestions
10
- from app .api .callbacks .source_nodes import AddNodeUrl
11
7
from app .api .callbacks .stream_handler import StreamHandler
8
+ from app .api .callbacks .source_nodes import AddNodeUrl
12
9
from app .api .routers .models import (
13
10
ChatData ,
14
- Message ,
15
- Result ,
16
11
)
17
- from app .engine .engine import get_engine
18
12
from app .engine .query_filter import generate_filters
13
+ from app .workflows import create_workflow
19
14
20
15
chat_router = r = APIRouter ()
21
16
22
17
logger = logging .getLogger ("uvicorn" )
23
18
24
19
25
- # streaming endpoint - delete if not needed
26
20
@r .post ("" )
27
21
async def chat (
28
22
request : Request ,
@@ -31,16 +25,18 @@ async def chat(
31
25
):
32
26
try :
33
27
last_message_content = data .get_last_message_content ()
34
- messages = data .get_history_messages ()
28
+ messages = data .get_history_messages (include_agent_messages = True )
35
29
36
30
doc_ids = data .get_chat_document_ids ()
37
31
filters = generate_filters (doc_ids )
38
32
params = data .data or {}
39
- logger .info (
40
- f"Creating chat engine with filters: { str (filters )} " ,
33
+
34
+ workflow = create_workflow (
35
+ params = params ,
36
+ filters = filters ,
41
37
)
42
- engine = get_engine ( filters = filters , params = params )
43
- handler = engine .run (
38
+
39
+ handler = workflow .run (
44
40
user_msg = last_message_content ,
45
41
chat_history = messages ,
46
42
stream = True ,
@@ -59,35 +55,3 @@ async def chat(
59
55
status_code = status .HTTP_500_INTERNAL_SERVER_ERROR ,
60
56
detail = f"Error in chat engine: { e } " ,
61
57
) from e
62
-
63
-
64
- # non-streaming endpoint - delete if not needed
65
- @r .post ("/request" )
66
- async def chat_request (
67
- data : ChatData ,
68
- ) -> Result :
69
- last_message_content = data .get_last_message_content ()
70
- messages = data .get_history_messages ()
71
-
72
- doc_ids = data .get_chat_document_ids ()
73
- filters = generate_filters (doc_ids )
74
- params = data .data or {}
75
- logger .info (
76
- f"Creating chat engine with filters: { str (filters )} " ,
77
- )
78
- engine = get_engine (filters = filters , params = params )
79
-
80
- response = await engine .run (
81
- user_msg = last_message_content ,
82
- chat_history = messages ,
83
- stream = False ,
84
- )
85
- output = response
86
- if isinstance (output , AgentOutput ):
87
- content = output .response .content
88
- else :
89
- content = json .dumps (output )
90
-
91
- return Result (
92
- result = Message (role = MessageRole .ASSISTANT , content = content ),
93
- )
0 commit comments