11from chatfaq_sdk import ChatFAQSDK
2- from chatfaq_sdk .clients import llm_request
2+ from chatfaq_sdk .clients . agent import Agent
33from chatfaq_sdk .fsm import FSMDefinition , State , Transition
4- from chatfaq_sdk .layers import Message , StreamingMessage , ToolUse , ToolResult
5- from chatfaq_sdk .utils import convert_mml_to_llm_format
6-
4+ from chatfaq_sdk .layers import Message
75
86MODEL_NAME = "gemini-2.0-flash"
97
@@ -21,77 +19,19 @@ async def send_greeting(sdk: ChatFAQSDK, ctx: dict):
2119
2220
2321async def send_answer (sdk : ChatFAQSDK , ctx : dict ):
24- messages = convert_mml_to_llm_format (ctx ["conv_mml" ][1 :]) # skip the greeting message and get the user message
25- messages .insert (0 , {"role" : "system" , "content" : "You are a knowledgeable weather assistant. Use provided tools when necessary." })
26-
27- response = await llm_request (
28- sdk ,
29- MODEL_NAME ,
30- use_conversation_context = False ,
31- conversation_id = ctx ["conversation_id" ],
32- bot_channel_name = ctx ["bot_channel_name" ],
33- messages = messages ,
22+ agent = Agent (
23+ sdk = sdk ,
3424 tools = [get_weather ],
35- tool_choice = "auto" ,
36- stream = False ,
25+ system_instruction = "You are a knowledgeable weather assistant. Use provided tools when necessary."
3726 )
38- print (response )
39-
40- tool_results = []
41- for content in response ["content" ]:
42- if content ["type" ] == "text" :
43- yield Message (content ["text" ])
44- elif content ["type" ] == "tool_use" :
45- yield ToolUse (name = content ["tool_use" ]["name" ], id = content ["tool_use" ]["id" ], args = content ["tool_use" ]["args" ])
46- if content ["tool_use" ]["name" ] == "get_weather" :
47- result = get_weather (content ["tool_use" ]["args" ]["location" ])
48- yield ToolResult (id = content ["tool_use" ]["id" ], name = content ["tool_use" ]["name" ], result = result )
49- tool_results .append (
50- {
51- "id" : content ["tool_use" ]["id" ],
52- "name" : content ["tool_use" ]["name" ],
53- "result" : result
54- }
55- )
56-
57- if tool_results :
58- # If there are tool results it means that the model has called a tool
59- # so we need to append the tool results to the messages and ask the model to continue
60- messages .append ({
61- "role" : "assistant" ,
62- "content" : response ["content" ]
63- })
64- messages .append ({
65- "role" : "user" ,
66- "content" : [{"type" : "tool_result" , "tool_result" : tool_result } for tool_result in tool_results ]
67- })
68-
69- response = await llm_request (
70- sdk ,
71- MODEL_NAME ,
72- messages = messages ,
73- use_conversation_context = False ,
74- conversation_id = ctx ["conversation_id" ],
75- bot_channel_name = ctx ["bot_channel_name" ],
76- tools = [get_weather ],
77- tool_choice = "auto" ,
78- stream = False ,
79- )
80- print (response )
81- yield Message (response ["content" ][0 ]["text" ])
82-
27+ async for item in agent .run (ctx ):
28+ yield item
8329
8430
8531greeting_state = State (name = "Greeting" , events = [send_greeting ], initial = True )
32+ answering_state = State (name = "Answering" , events = [send_answer ])
8633
87- answering_state = State (
88- name = "Answering" ,
89- events = [send_answer ],
90- )
91-
92- _to_answer = Transition (
93- dest = answering_state ,
94- )
34+ _to_answer = Transition (dest = answering_state )
9535
9636fsm_definition = FSMDefinition (
9737 states = [greeting_state , answering_state ],
0 commit comments