@@ -8,6 +8,16 @@ const getContentFilteredMessage = (): string => {
8
8
return i18n . t ( 'ai.content_filtered' , { ns : 'errors' } ) ;
9
9
} ;
10
10
11
+ // This function provides a healthcare-focused system prompt
12
+ const getHealthcareSystemPrompt = ( ) : string => {
13
+ return `You are a healthcare assistant that only responds to medical and healthcare-related questions.
14
+ If a user asks a question that is not directly related to healthcare, medicine, medical reports,
15
+ health conditions, treatments, or medical terminology, respond with:
16
+ "${ i18n . t ( 'ai.non_healthcare_topic' , { ns : 'errors' , defaultValue : "I couldn't find an answer. Please try rephrasing your question or consult your healthcare provider." } ) } "
17
+
18
+ Only provide information about healthcare topics, and always mention that users should consult healthcare professionals for personalized medical advice.` ;
19
+ } ;
20
+
11
21
export interface ChatMessage {
12
22
role : 'user' | 'assistant' | 'system' ;
13
23
content : string ;
@@ -20,21 +30,44 @@ export interface ChatSession {
20
30
updatedAt : Date ;
21
31
}
22
32
23
- // Interfaces for Bedrock API responses
24
- interface BedrockResult {
25
- tokenCount : number ;
26
- outputText : string ;
27
- completionReason : 'CONTENT_FILTERED' | 'COMPLETE' | 'LENGTH' | 'STOP_SEQUENCE' | string ;
33
+ // Interfaces for Claude 3.7 Sonnet response
34
+ interface ClaudeContentBlock {
35
+ type : string ;
36
+ text ?: string ;
37
+ reasoningContent ?: {
38
+ reasoningText : string ;
39
+ } ;
40
+ }
41
+
42
+ interface ClaudeResponse {
43
+ id : string ;
44
+ type : string ;
45
+ role : string ;
46
+ content : ClaudeContentBlock [ ] ;
47
+ model : string ;
48
+ stop_reason : string ;
49
+ stop_sequence : string | null ;
50
+ usage : {
51
+ input_tokens : number ;
52
+ output_tokens : number ;
53
+ } ;
28
54
}
29
55
30
- interface BedrockResponse {
31
- inputTextTokenCount : number ;
32
- results : BedrockResult [ ] ;
56
+ // Claude request body interface
57
+ interface ClaudeRequestBody {
58
+ anthropic_version : string ;
59
+ max_tokens : number ;
60
+ messages : {
61
+ role : 'user' | 'assistant' | 'system' ;
62
+ content : { type : string ; text : string ; } [ ] ;
63
+ } [ ] ;
64
+ temperature : number ;
65
+ top_p : number ;
66
+ system ?: string ;
33
67
}
34
68
35
69
class BedrockService {
36
70
private client : BedrockRuntimeClient | null = null ;
37
- private readonly MODEL_ID = 'amazon.titan-text-lite-v1' ;
38
71
private sessions : Map < string , ChatSession > = new Map ( ) ;
39
72
private isTestEnvironment : boolean ;
40
73
private contentFilteredCount : number = 0 ; // Track number of filtered responses
@@ -86,57 +119,74 @@ class BedrockService {
86
119
}
87
120
}
88
121
89
- private handleBedrockResponse ( parsedResponse : BedrockResponse ) : string {
90
- // Check if we have results
91
- if ( ! parsedResponse . results || ! parsedResponse . results . length ) {
92
- throw new Error ( 'Invalid response structure: missing results ' ) ;
122
+ private handleClaudeResponse ( response : ClaudeResponse ) : string {
123
+ // Check if response has content
124
+ if ( ! response . content || ! response . content . length ) {
125
+ throw new Error ( 'Invalid response structure: missing content ' ) ;
93
126
}
94
127
95
- const result = parsedResponse . results [ 0 ] ;
96
-
97
128
// Check for content filtering
98
- if ( result . completionReason === "CONTENT_FILTERED " ) {
129
+ if ( response . stop_reason === "content_filtered " ) {
99
130
// Increment counter for analytics
100
131
this . contentFilteredCount ++ ;
101
132
102
133
// Return the translated message
103
134
return getContentFilteredMessage ( ) ;
104
135
}
105
136
137
+ // Extract text from content blocks
138
+ const textContent = response . content
139
+ . filter ( block => block . type === 'text' && block . text )
140
+ . map ( block => block . text )
141
+ . join ( '\n' ) ;
106
142
107
- return result . outputText ;
143
+ return textContent || '' ;
108
144
}
109
145
110
- private async invokeModel ( prompt : string ) : Promise < string > {
146
+ private async invokeModel ( messages : ChatMessage [ ] , systemPrompt ? : string ) : Promise < string > {
111
147
// In test environment, return a mock response
112
148
if ( this . isTestEnvironment || ! this . client ) {
113
- return `This is a test response to: "${ prompt } "` ;
149
+ return `This is a test response to: "${ messages [ messages . length - 1 ] ?. content || 'No message' } "` ;
114
150
}
115
151
152
+ // Format messages for Claude API
153
+ const formattedMessages = messages . map ( msg => ( {
154
+ role : msg . role ,
155
+ content : [ { type : 'text' , text : msg . content } ]
156
+ } ) ) ;
157
+
158
+ // Prepare request body for Claude 3.7 Sonnet
159
+ const requestBody : ClaudeRequestBody = {
160
+ anthropic_version : "bedrock-2023-05-31" ,
161
+ max_tokens : 4096 ,
162
+ messages : formattedMessages ,
163
+ temperature : 0.7 ,
164
+ top_p : 0.9
165
+ } ;
166
+
167
+ // Add system prompt if provided
168
+ if ( systemPrompt ) {
169
+ requestBody . system = systemPrompt ;
170
+ }
171
+
172
+ // Use the cross-region inference profile ID as the model ID (following AWS docs)
173
+ // Do not specify inferenceProfileArn separately
116
174
const input = {
117
- modelId : this . MODEL_ID ,
175
+ modelId : 'us.anthropic.claude-3-7-sonnet-20250219-v1:0' ,
118
176
contentType : 'application/json' ,
119
177
accept : 'application/json' ,
120
- body : JSON . stringify ( {
121
- inputText : prompt ,
122
- textGenerationConfig : {
123
- maxTokenCount : 4096 ,
124
- stopSequences : [ ] ,
125
- temperature : 0.7 ,
126
- topP : 1 ,
127
- } ,
128
- } ) ,
178
+ body : JSON . stringify ( requestBody ) ,
129
179
} ;
130
180
131
181
try {
132
182
const command = new InvokeModelCommand ( input ) ;
133
183
const response = await this . client . send ( command ) ;
134
184
const responseBody = new TextDecoder ( ) . decode ( response . body ) ;
135
- const parsedResponse = JSON . parse ( responseBody ) as BedrockResponse ;
185
+ const parsedResponse = JSON . parse ( responseBody ) as ClaudeResponse ;
136
186
137
- return this . handleBedrockResponse ( parsedResponse ) ;
187
+ return this . handleClaudeResponse ( parsedResponse ) ;
138
188
} catch ( error ) {
139
- console . error ( 'Error invoking Bedrock model:' , error ) ;
189
+ console . error ( 'Error invoking Claude model:' , error ) ;
140
190
throw error ;
141
191
}
142
192
}
@@ -152,7 +202,7 @@ class BedrockService {
152
202
return sessionId ;
153
203
}
154
204
155
- public async sendMessage ( sessionId : string , message : string ) : Promise < string > {
205
+ public async sendMessage ( sessionId : string , message : string , systemPrompt ?: string ) : Promise < string > {
156
206
const session = this . sessions . get ( sessionId ) ;
157
207
if ( ! session ) {
158
208
throw new Error ( 'Chat session not found' ) ;
@@ -164,11 +214,11 @@ class BedrockService {
164
214
content : message ,
165
215
} ) ;
166
216
167
- // Prepare context for the model
168
- const context = this . prepareContext ( session . messages ) ;
169
-
170
- // Get response from Bedrock
171
- const response = await this . invokeModel ( context ) ;
217
+ // Use healthcare system prompt by default, or allow custom override
218
+ const effectiveSystemPrompt = systemPrompt || getHealthcareSystemPrompt ( ) ;
219
+
220
+ // Get response from Claude
221
+ const response = await this . invokeModel ( session . messages , effectiveSystemPrompt ) ;
172
222
173
223
// Add assistant response to context
174
224
session . messages . push ( {
@@ -183,19 +233,6 @@ class BedrockService {
183
233
return response ;
184
234
}
185
235
186
- private prepareContext ( messages : ChatMessage [ ] ) : string {
187
- // Format the conversation history into a prompt
188
- const formattedMessages = messages . map ( msg => {
189
- const role = msg . role === 'assistant' ? 'Assistant' : 'Human' ;
190
- return `${ role } : ${ msg . content } ` ;
191
- } ) ;
192
-
193
- // Add a final prompt for the assistant
194
- formattedMessages . push ( 'Assistant:' ) ;
195
-
196
- return formattedMessages . join ( '\n' ) ;
197
- }
198
-
199
236
public getChatSession ( sessionId : string ) : ChatSession | undefined {
200
237
return this . sessions . get ( sessionId ) ;
201
238
}
0 commit comments