Skip to content

Commit 99c4f52

Browse files
lunikaAntoLC
authored andcommitted
✨(back) manage streaming with the ai service
We want to handle both streaming or not when interacting with the AI backend service.
1 parent 86d2866 commit 99c4f52

File tree

8 files changed

+98
-27
lines changed

8 files changed

+98
-27
lines changed

src/backend/core/api/viewsets.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1361,8 +1361,20 @@ def ai_proxy(self, request, *args, **kwargs):
13611361
serializer = serializers.AIProxySerializer(data=request.data)
13621362
serializer.is_valid(raise_exception=True)
13631363

1364-
response = AIService().proxy(request.data)
1365-
return drf.response.Response(response, status=drf.status.HTTP_200_OK)
1364+
ai_service = AIService()
1365+
1366+
if settings.AI_STREAM:
1367+
return StreamingHttpResponse(
1368+
ai_service.stream(request.data),
1369+
content_type="text/event-stream",
1370+
status=drf.status.HTTP_200_OK,
1371+
)
1372+
1373+
ai_response = ai_service.proxy(request.data)
1374+
return drf.response.Response(
1375+
ai_response.model_dump(),
1376+
status=drf.status.HTTP_200_OK,
1377+
)
13661378

13671379
@drf.decorators.action(
13681380
detail=True,
@@ -1821,6 +1833,7 @@ def get(self, request):
18211833
"AI_BOT",
18221834
"AI_FEATURE_ENABLED",
18231835
"AI_MODEL",
1836+
"AI_STREAM",
18241837
"COLLABORATION_WS_URL",
18251838
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY",
18261839
"CRISP_WEBSITE_ID",

src/backend/core/services/ai_services.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""AI services."""
22

33
import logging
4+
from typing import Generator
45

56
from django.conf import settings
67
from django.core.exceptions import ImproperlyConfigured
@@ -23,9 +24,15 @@ def __init__(self):
2324
raise ImproperlyConfigured("AI configuration not set")
2425
self.client = OpenAI(base_url=settings.AI_BASE_URL, api_key=settings.AI_API_KEY)
2526

26-
def proxy(self, data: dict) -> dict:
27+
def proxy(self, data: dict, stream: bool = False) -> Generator[str, None, None]:
2728
"""Proxy AI API requests to the configured AI provider."""
28-
data["stream"] = False
29+
data["stream"] = stream
30+
return self.client.chat.completions.create(**data)
2931

30-
response = self.client.chat.completions.create(**data)
31-
return response.model_dump()
32+
def stream(self, data: dict) -> Generator[str, None, None]:
33+
"""Stream AI API requests to the configured AI provider."""
34+
stream = self.proxy(data, stream=True)
35+
for chunk in stream:
36+
yield f"data: {chunk.model_dump_json()}\n\n"
37+
38+
yield "data: [DONE]\n\n"

src/backend/core/tests/test_api_config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
AI_BOT={"name": "Test Bot", "color": "#000000"},
2222
AI_FEATURE_ENABLED=False,
2323
AI_MODEL="test-model",
24+
AI_STREAM=False,
2425
COLLABORATION_WS_URL="http://testcollab/",
2526
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY=True,
2627
CRISP_WEBSITE_ID="123",
@@ -46,6 +47,7 @@ def test_api_config(is_authenticated):
4647
"AI_BOT": {"name": "Test Bot", "color": "#000000"},
4748
"AI_FEATURE_ENABLED": False,
4849
"AI_MODEL": "test-model",
50+
"AI_STREAM": False,
4951
"COLLABORATION_WS_URL": "http://testcollab/",
5052
"COLLABORATION_WS_NOT_CONNECTED_READY_ONLY": True,
5153
"CRISP_WEBSITE_ID": "123",

src/backend/core/tests/test_services_ai_services.py

Lines changed: 64 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,9 @@
22
Test ai API endpoints in the impress core app.
33
"""
44

5-
from unittest.mock import MagicMock, patch
5+
from unittest.mock import patch
66

77
from django.core.exceptions import ImproperlyConfigured
8-
from django.test.utils import override_settings
98

109
import pytest
1110
from openai import OpenAIError
@@ -15,6 +14,15 @@
1514
pytestmark = pytest.mark.django_db
1615

1716

17+
@pytest.fixture(autouse=True)
18+
def ai_settings(settings):
19+
"""Fixture to set AI settings."""
20+
settings.AI_MODEL = "llama"
21+
settings.AI_BASE_URL = "http://example.com"
22+
settings.AI_API_KEY = "test-key"
23+
settings.AI_FEATURE_ENABLED = True
24+
25+
1826
@pytest.mark.parametrize(
1927
"setting_name, setting_value",
2028
[
@@ -23,22 +31,19 @@
2331
("AI_MODEL", None),
2432
],
2533
)
26-
def test_api_ai_setting_missing(setting_name, setting_value):
34+
def test_services_ai_setting_missing(setting_name, setting_value, settings):
2735
"""Setting should be set"""
36+
setattr(settings, setting_name, setting_value)
2837

29-
with override_settings(**{setting_name: setting_value}):
30-
with pytest.raises(
31-
ImproperlyConfigured,
32-
match="AI configuration not set",
33-
):
34-
AIService()
38+
with pytest.raises(
39+
ImproperlyConfigured,
40+
match="AI configuration not set",
41+
):
42+
AIService()
3543

3644

37-
@override_settings(
38-
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
39-
)
4045
@patch("openai.resources.chat.completions.Completions.create")
41-
def test_api_ai__client_error(mock_create):
46+
def test_services_ai_proxy_client_error(mock_create):
4247
"""Fail when the client raises an error"""
4348

4449
mock_create.side_effect = OpenAIError("Mocked client error")
@@ -50,15 +55,11 @@ def test_api_ai__client_error(mock_create):
5055
AIService().proxy({"messages": [{"role": "user", "content": "hello"}]})
5156

5257

53-
@override_settings(
54-
AI_BASE_URL="http://example.com", AI_API_KEY="test-key", AI_MODEL="test-model"
55-
)
5658
@patch("openai.resources.chat.completions.Completions.create")
57-
def test_api_ai__success(mock_create):
59+
def test_services_ai_proxy_success(mock_create):
5860
"""The AI request should work as expect when called with valid arguments."""
5961

60-
mock_response = MagicMock()
61-
mock_response.model_dump.return_value = {
62+
mock_create.return_value = {
6263
"id": "chatcmpl-test",
6364
"object": "chat.completion",
6465
"created": 1234567890,
@@ -71,7 +72,6 @@ def test_api_ai__success(mock_create):
7172
}
7273
],
7374
}
74-
mock_create.return_value = mock_response
7575

7676
response = AIService().proxy({"messages": [{"role": "user", "content": "hello"}]})
7777

@@ -89,3 +89,47 @@ def test_api_ai__success(mock_create):
8989
],
9090
}
9191
assert response == expected_response
92+
mock_create.assert_called_once_with(
93+
messages=[{"role": "user", "content": "hello"}], stream=False
94+
)
95+
96+
97+
@patch("openai.resources.chat.completions.Completions.create")
98+
def test_services_ai_proxy_with_stream(mock_create):
99+
"""The AI request should work as expect when called with valid arguments."""
100+
101+
mock_create.return_value = {
102+
"id": "chatcmpl-test",
103+
"object": "chat.completion",
104+
"created": 1234567890,
105+
"model": "test-model",
106+
"choices": [
107+
{
108+
"index": 0,
109+
"message": {"role": "assistant", "content": "Salut"},
110+
"finish_reason": "stop",
111+
}
112+
],
113+
}
114+
115+
response = AIService().proxy(
116+
{"messages": [{"role": "user", "content": "hello"}]}, stream=True
117+
)
118+
119+
expected_response = {
120+
"id": "chatcmpl-test",
121+
"object": "chat.completion",
122+
"created": 1234567890,
123+
"model": "test-model",
124+
"choices": [
125+
{
126+
"index": 0,
127+
"message": {"role": "assistant", "content": "Salut"},
128+
"finish_reason": "stop",
129+
}
130+
],
131+
}
132+
assert response == expected_response
133+
mock_create.assert_called_once_with(
134+
messages=[{"role": "user", "content": "hello"}], stream=True
135+
)

src/backend/impress/settings.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -629,6 +629,9 @@ class Base(Configuration):
629629
default=False, environ_name="AI_FEATURE_ENABLED", environ_prefix=None
630630
)
631631
AI_MODEL = values.Value(None, environ_name="AI_MODEL", environ_prefix=None)
632+
AI_STREAM = values.BooleanValue(
633+
default=False, environ_name="AI_STREAM", environ_prefix=None
634+
)
632635
AI_USER_RATE_THROTTLE_RATES = {
633636
"minute": 3,
634637
"hour": 50,

src/frontend/apps/e2e/__tests__/app-impress/common.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ export const CONFIG = {
99
},
1010
AI_FEATURE_ENABLED: true,
1111
AI_MODEL: 'llama',
12+
AI_STREAM: false,
1213
CRISP_WEBSITE_ID: null,
1314
COLLABORATION_WS_URL: 'ws://localhost:4444/collaboration/ws/',
1415
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY: false,

src/frontend/apps/impress/src/core/config/api/useConfig.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ export interface ConfigResponse {
1515
AI_BOT: { name: string; color: string };
1616
AI_FEATURE_ENABLED?: boolean;
1717
AI_MODEL?: string;
18+
AI_STREAM: boolean;
1819
COLLABORATION_WS_URL?: string;
1920
COLLABORATION_WS_NOT_CONNECTED_READY_ONLY?: boolean;
2021
CRISP_WEBSITE_ID?: string;

src/frontend/apps/impress/src/features/docs/doc-editor/components/AI/useAI.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ export const useAI = (docId: Doc['id'], aiAllowed: boolean) => {
3333
const model = openai.chat(conf.AI_MODEL);
3434

3535
const extension = createAIExtension({
36-
stream: false,
36+
stream: conf.AI_STREAM,
3737
model,
3838
agentCursor: conf?.AI_BOT,
3939
promptBuilder: promptBuilder(llmFormats.html.defaultPromptBuilder),

0 commit comments

Comments
 (0)