Skip to content

Commit 14f9042

Browse files
Add connector endpoint to trusted URLs step and remove stream parameter (opensearch-project#3460)
Signed-off-by: Nathalie Jonathan <[email protected]>
1 parent d90e6e9 commit 14f9042

File tree

1 file changed

+157
-0
lines changed

1 file changed

+157
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
# DeepSeek connector blueprint example for Chat
2+
This blueprint integrates [DeepSeek Chat Model](https://api-docs.deepseek.com/api/create-chat-completion) for question-answering capabilities for standalone interactions. Full conversational functionality requires additional development.
3+
Adapt and extend this blueprint as needed for your specific use case.
4+
5+
## 1. Add connector endpoint to trusted URLs:
6+
Note: skip this step starting 2.19.0
7+
8+
```json
9+
PUT /_cluster/settings
10+
{
11+
"persistent": {
12+
"plugins.ml_commons.trusted_connector_endpoints_regex": [
13+
"^https://api\\.deepseek\\.com/.*$"
14+
]
15+
}
16+
}
17+
```
18+
## 2. Create connector for DeepSeek Chat:
19+
20+
```json
21+
POST /_plugins/_ml/connectors/_create
22+
{
23+
"name": "DeepSeek Chat",
24+
"description": "Test connector for DeepSeek Chat",
25+
"version": "1",
26+
"protocol": "http",
27+
"parameters": {
28+
"endpoint": "api.deepseek.com",
29+
"model": "deepseek-chat"
30+
},
31+
"credential": {
32+
"deepSeek_key": "<PLEASE ADD YOUR DEEPSEEK API KEY HERE>"
33+
},
34+
"actions": [
35+
{
36+
"action_type": "predict",
37+
"method": "POST",
38+
"url": "https://${parameters.endpoint}/v1/chat/completions",
39+
"headers": {
40+
"Content-Type": "application/json",
41+
"Authorization": "Bearer ${credential.deepSeek_key}"
42+
},
43+
"request_body": "{ \"model\": \"${parameters.model}\", \"messages\": ${parameters.messages} }"
44+
}
45+
]
46+
}
47+
```
48+
49+
#### Sample response
50+
```json
51+
{
52+
"connector_id": "n0dOqZQBQwAL8-GO1pYI"
53+
}
54+
```
55+
56+
## 3. Create model group:
57+
58+
```json
59+
POST /_plugins/_ml/model_groups/_register
60+
{
61+
"name": "remote_model_group_chat",
62+
"description": "This is an example description"
63+
}
64+
```
65+
66+
#### Sample response
67+
```json
68+
{
69+
"model_group_id": "b0cjqZQBQwAL8-GOVJZ4",
70+
"status": "CREATED"
71+
}
72+
```
73+
74+
## 4. Register model to model group & deploy model:
75+
76+
```json
77+
POST /_plugins/_ml/models/_register?deploy=true
78+
{
79+
"name": "DeepSeek Chat model",
80+
"function_name": "remote",
81+
"model_group_id": "b0cjqZQBQwAL8-GOVJZ4",
82+
"description": "DeepSeek Chat",
83+
"connector_id": "n0dOqZQBQwAL8-GO1pYI"
84+
}
85+
```
86+
87+
#### Sample response
88+
```json
89+
{
90+
"task_id": "oEdPqZQBQwAL8-GOCJbw",
91+
"status": "CREATED",
92+
"model_id": "oUdPqZQBQwAL8-GOCZYL"
93+
}
94+
```
95+
96+
## 5. Test model inference
97+
98+
```json
99+
POST /_plugins/_ml/models/oUdPqZQBQwAL8-GOCZYL/_predict
100+
{
101+
"parameters": {
102+
"messages": [
103+
{
104+
"role": "system",
105+
"content": "You are a helpful assistant."
106+
},
107+
{
108+
"role": "user",
109+
"content": "Hello!"
110+
}
111+
]
112+
}
113+
}
114+
```
115+
116+
#### Sample response
117+
```json
118+
{
119+
"inference_results": [
120+
{
121+
"output": [
122+
{
123+
"name": "response",
124+
"dataAsMap": {
125+
"id": "9d9bd689-88a5-44b0-b73f-2daa92518761",
126+
"object": "chat.completion",
127+
"created": 1.738011126E9,
128+
"model": "deepseek-chat",
129+
"choices": [
130+
{
131+
"index": 0.0,
132+
"message": {
133+
"role": "assistant",
134+
"content": "Hello! How can I assist you today? 😊"
135+
},
136+
"finish_reason": "stop"
137+
}
138+
],
139+
"usage": {
140+
"prompt_tokens": 11.0,
141+
"completion_tokens": 11.0,
142+
"total_tokens": 22.0,
143+
"prompt_tokens_details": {
144+
"cached_tokens": 0.0
145+
},
146+
"prompt_cache_hit_tokens": 0.0,
147+
"prompt_cache_miss_tokens": 11.0
148+
},
149+
"system_fingerprint": "fp_3a5770e1b4"
150+
}
151+
}
152+
],
153+
"status_code": 200
154+
}
155+
]
156+
}
157+
```

0 commit comments

Comments
 (0)