22Test ai API endpoints in the impress core app.
33"""
44
5- from unittest .mock import MagicMock , patch
5+ from unittest .mock import patch
66
77from django .core .exceptions import ImproperlyConfigured
8- from django .test .utils import override_settings
98
109import pytest
1110from openai import OpenAIError
1514pytestmark = pytest .mark .django_db
1615
1716
17+ @pytest .fixture (autouse = True )
18+ def ai_settings (settings ):
19+ """Fixture to set AI settings."""
20+ settings .AI_MODEL = "llama"
21+ settings .AI_BASE_URL = "http://example.com"
22+ settings .AI_API_KEY = "test-key"
23+ settings .AI_FEATURE_ENABLED = True
24+
25+
1826@pytest .mark .parametrize (
1927 "setting_name, setting_value" ,
2028 [
2331 ("AI_MODEL" , None ),
2432 ],
2533)
26- def test_api_ai_setting_missing (setting_name , setting_value ):
34+ def test_services_ai_setting_missing (setting_name , setting_value , settings ):
2735 """Setting should be set"""
36+ setattr (settings , setting_name , setting_value )
2837
29- with override_settings (** {setting_name : setting_value }):
30- with pytest .raises (
31- ImproperlyConfigured ,
32- match = "AI configuration not set" ,
33- ):
34- AIService ()
38+ with pytest .raises (
39+ ImproperlyConfigured ,
40+ match = "AI configuration not set" ,
41+ ):
42+ AIService ()
3543
3644
37- @override_settings (
38- AI_BASE_URL = "http://example.com" , AI_API_KEY = "test-key" , AI_MODEL = "test-model"
39- )
4045@patch ("openai.resources.chat.completions.Completions.create" )
41- def test_api_ai__client_error (mock_create ):
46+ def test_services_ai_proxy_client_error (mock_create ):
4247 """Fail when the client raises an error"""
4348
4449 mock_create .side_effect = OpenAIError ("Mocked client error" )
@@ -50,15 +55,11 @@ def test_api_ai__client_error(mock_create):
5055 AIService ().proxy ({"messages" : [{"role" : "user" , "content" : "hello" }]})
5156
5257
53- @override_settings (
54- AI_BASE_URL = "http://example.com" , AI_API_KEY = "test-key" , AI_MODEL = "test-model"
55- )
5658@patch ("openai.resources.chat.completions.Completions.create" )
57- def test_api_ai__success (mock_create ):
59+ def test_services_ai_proxy_success (mock_create ):
5860 """The AI request should work as expect when called with valid arguments."""
5961
60- mock_response = MagicMock ()
61- mock_response .model_dump .return_value = {
62+ mock_create .return_value = {
6263 "id" : "chatcmpl-test" ,
6364 "object" : "chat.completion" ,
6465 "created" : 1234567890 ,
@@ -71,7 +72,6 @@ def test_api_ai__success(mock_create):
7172 }
7273 ],
7374 }
74- mock_create .return_value = mock_response
7575
7676 response = AIService ().proxy ({"messages" : [{"role" : "user" , "content" : "hello" }]})
7777
@@ -89,3 +89,47 @@ def test_api_ai__success(mock_create):
8989 ],
9090 }
9191 assert response == expected_response
92+ mock_create .assert_called_once_with (
93+ messages = [{"role" : "user" , "content" : "hello" }], stream = False
94+ )
95+
96+
97+ @patch ("openai.resources.chat.completions.Completions.create" )
98+ def test_services_ai_proxy_with_stream (mock_create ):
99+ """The AI request should work as expect when called with valid arguments."""
100+
101+ mock_create .return_value = {
102+ "id" : "chatcmpl-test" ,
103+ "object" : "chat.completion" ,
104+ "created" : 1234567890 ,
105+ "model" : "test-model" ,
106+ "choices" : [
107+ {
108+ "index" : 0 ,
109+ "message" : {"role" : "assistant" , "content" : "Salut" },
110+ "finish_reason" : "stop" ,
111+ }
112+ ],
113+ }
114+
115+ response = AIService ().proxy (
116+ {"messages" : [{"role" : "user" , "content" : "hello" }]}, stream = True
117+ )
118+
119+ expected_response = {
120+ "id" : "chatcmpl-test" ,
121+ "object" : "chat.completion" ,
122+ "created" : 1234567890 ,
123+ "model" : "test-model" ,
124+ "choices" : [
125+ {
126+ "index" : 0 ,
127+ "message" : {"role" : "assistant" , "content" : "Salut" },
128+ "finish_reason" : "stop" ,
129+ }
130+ ],
131+ }
132+ assert response == expected_response
133+ mock_create .assert_called_once_with (
134+ messages = [{"role" : "user" , "content" : "hello" }], stream = True
135+ )
0 commit comments