Skip to content

Commit 7836429

Browse files
committed
Merge branch 'develop'
2 parents 58fa536 + ae9dbbe commit 7836429

52 files changed

Lines changed: 3088 additions & 994 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

Dogefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ env:
4141
TOGETHER_API_KEY: '{{ secret("TOGETHER_API_KEY") }}'
4242
SENTRY_DSN: '{{ secret("SENTRY_DSN_BACK") }}'
4343
USE_RAY: 'False'
44+
AZOR_PRIVATE_KEY: '{{ secret("AZOR_PRIVATE_KEY") }}'
4445
# - name: ray_worker
4546
# variables:
4647
# BACKEND_HOST: "{{ back.PRIVATE_URL }}"

back/.env-template

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,11 @@ BACKEND_TOKEN=<BACKEND_TOKEN>
6969
# --------------------------- AZOR Config ---------------------
7070
# This is used to encrypt LLM API Keys in the DB.
7171
# These are used as a way to use multiple api keys for the same LLM provider.
72-
# It can be generated with:
73-
# openssl genpkey -algorithm RSA -out private_key.pem -pkeyopt rsa_keygen_bits:4096 && cat private_key.pem
74-
AZOR_PRIVATE_KEY="-----BEGIN PRIVATE KEY-----
75-
<AZOR_PRIVATE_KEY>
76-
-----END PRIVATE KEY-----"
72+
#
73+
# REQUIRED: The private key MUST be Base64 encoded to avoid newline issues.
74+
# Generate a new private key and encode it:
75+
# openssl genpkey -algorithm RSA -out private_key.pem -pkeyopt rsa_keygen_bits:4096
76+
# cat private_key.pem | base64 -w 0
77+
#
78+
# Then paste the Base64 encoded string below:
79+
AZOR_PRIVATE_KEY=<BASE64_ENCODED_PRIVATE_KEY>

back/back/apps/broker/serializers/messages/__init__.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,22 +2,19 @@
22
from logging import getLogger
33
from typing import TYPE_CHECKING
44

5-
from drf_spectacular.utils import (
6-
PolymorphicProxySerializer,
7-
extend_schema_field,
8-
)
5+
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema_field
96
from lxml import etree
107
from lxml.etree import XMLSyntaxError
118
from rest_framework import serializers
129
from rest_framework.exceptions import ValidationError
1310

1411
from back.apps.broker.models.message import AgentType, Satisfaction
12+
from back.apps.fsm.models import FSMDefinition
1513
from back.common.abs.bot_consumers import BotConsumer
1614
from back.common.serializer_fields import JSTimestampField
17-
from back.apps.fsm.models import FSMDefinition
1815
from back.config.storage_backends import (
1916
PrivateMediaLocalStorage,
20-
select_private_storage
17+
select_private_storage,
2118
)
2219

2320
if TYPE_CHECKING:
@@ -110,20 +107,23 @@ class Reference(serializers.Serializer):
110107
knowledge_base_id = serializers.CharField(required=False, allow_null=True, allow_blank=True)
111108

112109

113-
class ToolUse(serializers.Serializer):
110+
class ToolUsePayload(serializers.Serializer):
114111
id = serializers.CharField(required=True)
115112
name = serializers.CharField(required=True)
116113
args = serializers.JSONField(required=True)
117114
text = serializers.CharField(required=False, allow_null=True, allow_blank=True)
118115

119116

117+
class ToolResultPayload(serializers.Serializer):
118+
id = serializers.CharField(required=False, allow_null=True, allow_blank=True)
119+
name = serializers.CharField(required=False, allow_null=True, allow_blank=True)
120+
result = serializers.CharField(required=True)
121+
122+
120123
class MessagePayload(serializers.Serializer):
121-
class _MessagePayload(serializers.Serializer):
122-
content = serializers.CharField(trim_whitespace=False, allow_blank=True)
123-
references = Reference(required=False, allow_null=True)
124-
tool_use = ToolUse(many=True, required=False, allow_null=True, allow_empty=True)
124+
content = serializers.SerializerMethodField()
125+
references = Reference(required=False, allow_null=True)
125126

126-
payload = _MessagePayload()
127127

128128
class HTMLPayload(serializers.Serializer):
129129
@staticmethod
@@ -163,6 +163,8 @@ class QuickRepliesPayload(serializers.Serializer):
163163
"ImagePayload": ImagePayload,
164164
"SatisfactionPayload": SatisfactionPayload,
165165
"QuickRepliesPayload": QuickRepliesPayload,
166+
"ToolUsePayload": ToolUsePayload,
167+
"ToolResultPayload": ToolResultPayload,
166168
},
167169
)
168170
)

back/back/apps/broker/serializers/rpc.py

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,13 @@ def validate(self, attrs):
5555
"type": AgentType.human.value,
5656
"id": attrs["ctx"]["user_id"],
5757
}
58+
59+
# Tool results are categorized as human messages although not exactly true
60+
if attrs.get('stack', []) and attrs['stack'][0].get('type') == 'tool_result':
61+
attrs['sender'] = {
62+
"type": AgentType.human.value,
63+
}
64+
5865
if attrs.get("node_type") == RPCNodeType.condition.value:
5966
return super().validate(attrs)
6067

@@ -103,32 +110,43 @@ class RPCLLMRequestSerializer(serializers.Serializer):
103110
The maximum number of tokens to generate
104111
seed: int
105112
The seed to use in the LLM
106-
streaming: bool
113+
stream: bool
107114
Whether the LLM response should be streamed or not
108-
cache_config: dict
109-
The cache configuration for the LLM request
110115
"""
111116

112117
llm_config_name = serializers.CharField(required=True, allow_blank=False, allow_null=False)
113118
conversation_id = serializers.CharField()
114119
bot_channel_name = serializers.CharField()
115-
messages = serializers.ListField(child=serializers.DictField())
120+
messages = serializers.ListField(child=serializers.DictField(), allow_empty=True, required=False, allow_null=True)
116121
temperature = serializers.FloatField(default=0.7, required=False)
117122
max_tokens = serializers.IntegerField(default=1024, required=False)
118123
seed = serializers.IntegerField(default=42, required=False)
119124
tools = serializers.ListField(
120125
child=serializers.DictField(), allow_empty=True, required=False, allow_null=True
121126
)
122127
tool_choice = serializers.CharField(allow_blank=True, required=False, allow_null=True)
123-
streaming = serializers.BooleanField(default=True)
128+
stream = serializers.BooleanField(default=False)
124129
use_conversation_context = serializers.BooleanField(default=True)
130+
response_schema = serializers.JSONField(default=dict, required=False, allow_null=True)
125131
cache_config = CacheConfigSerializer(required=False, allow_null=True)
126-
132+
127133
def validate(self, attrs):
128134
if not attrs.get("messages") and not attrs.get("use_conversation_context"):
129135
raise serializers.ValidationError(
130136
"If there are no messages then use_conversation_context should be always True"
131137
)
138+
139+
if attrs.get("tools") and attrs.get("response_schema"):
140+
raise serializers.ValidationError(
141+
"There cannot be tools and response schema at the same time"
142+
)
143+
144+
if attrs.get("tools") and attrs.get("stream"):
145+
raise serializers.ValidationError("ChatFAQ doesn't support streaming when using tools")
146+
147+
if attrs.get("response_schema") and attrs.get("stream"):
148+
raise serializers.ValidationError("ChatFAQ doesn't support structured output when streaming")
149+
132150
return attrs
133151

134152

back/back/apps/fsm/lib/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ async def run_condition(self, condition_name):
301301
payload = await self.rpc_result_future
302302
logger.debug(f"...Receive RCP call {condition_name} (condition)")
303303
self.waiting_for_rpc = None
304-
return payload["stack"]["score"], payload["stack"]["data"]
304+
return payload["stack"][0]["score"], payload["stack"][0]["data"] # Stack is a list of dicts, so only access the first one
305305

306306
async def save_cache(self):
307307
from back.apps.fsm.models import CachedFSM # TODO: Resolve CI

0 commit comments

Comments
 (0)