11import json
22import uuid
33from logging import getLogger
4- from typing import Dict , List , Optional
4+ from typing import Dict , List , Optional , Callable , Awaitable
55
66from channels .db import database_sync_to_async
77from channels .generic .websocket import AsyncJsonWebsocketConsumer
@@ -210,16 +210,19 @@ async def query_llm(
210210 cache_config : Optional [Dict ] = None ,
211211 response_schema : Optional [Dict ] = None ,
212212 stream : bool = False ,
213+ error_handler : Callable [[dict ], Awaitable [None ]] = None ,
213214):
214215 try :
215216 llm_config = await database_sync_to_async (LLMConfig .enabled_objects .get )(
216217 name = llm_config_name
217218 )
218219 except LLMConfig .DoesNotExist :
219- yield {
220- "content" : [{"type" : "text" , "text" : f"LLM config with name: { llm_config_name } does not exist." }],
221- "last_chunk" : True ,
222- }
220+ await error_handler ({
221+ "payload" : {
222+ "errors" : f"LLM config with name: { llm_config_name } does not exist." ,
223+ "request_info" : {"llm_config_name" : llm_config_name },
224+ }
225+ })
223226 return
224227
225228 conv = await database_sync_to_async (Conversation .objects .get )(pk = conversation_id )
@@ -238,25 +241,24 @@ async def query_llm(
238241 # pop the system message
239242 messages = messages [1 :]
240243 elif not prev_messages :
241- yield {
242- "content" : [
243- {
244- "type" : "text" ,
245- "text" : "Error: No previous messages and no messages provided." ,
246- }
247- ],
248- "last_chunk" : True ,
249- }
244+ await error_handler ({
245+ "payload" : {
246+ "errors" : "Error: No previous messages and no messages provided." ,
247+ "request_info" : {"conversation_id" : conversation_id },
248+ }
249+ })
250250 return
251251 if messages :
252252 new_messages .extend (messages )
253253 else :
254254 new_messages = messages
255255 if new_messages is None :
256- yield {
257- "content" : [{"type" : "text" , "text" : "Error: No messages provided." }],
258- "last_chunk" : True ,
259- }
256+ await error_handler ({
257+ "payload" : {
258+ "errors" : "Error: No messages provided." ,
259+ "request_info" : {"conversation_id" : conversation_id },
260+ }
261+ })
260262 return
261263
262264 try :
@@ -323,10 +325,12 @@ async def query_llm(
323325
324326 except Exception as e :
325327 logger .error ("Error during LLM query" , exc_info = e )
326- yield {
327- "content" : [{"type" : "text" , "text" : "There was an error generating the response. Please try again or contact the administrator." }],
328- "last_chunk" : True ,
329- }
328+ await error_handler ({
329+ "payload" : {
330+ "errors" : "There was an error generating the response. Please try again or contact the administrator." ,
331+ "request_info" : {"conversation_id" : conversation_id },
332+ }
333+ })
330334 return
331335
332336
@@ -425,6 +429,7 @@ async def process_llm_request(self, data):
425429 data .get ("cache_config" ),
426430 data .get ("response_schema" ),
427431 data .get ("stream" ),
432+ error_handler = self .error_response ,
428433 ):
429434 await self .send (
430435 json .dumps (
0 commit comments