@@ -9,11 +9,12 @@ import {
99import { concat } from "@langchain/core/utils/stream" ;
1010import {
1111 ConversationRole as BedrockConversationRole ,
12+ BedrockRuntimeClient ,
1213 type Message as BedrockMessage ,
1314 type SystemContentBlock as BedrockSystemContentBlock ,
1415} from "@aws-sdk/client-bedrock-runtime" ;
1516import { z } from "zod/v3" ;
16- import { describe , expect , test , it } from "vitest" ;
17+ import { describe , expect , test , it , vi } from "vitest" ;
1718import { convertToConverseMessages } from "../utils/message_inputs.js" ;
1819import { handleConverseStreamContentBlockDelta } from "../utils/message_outputs.js" ;
1920import { ChatBedrockConverse } from "../chat_models.js" ;
@@ -451,6 +452,206 @@ test("Streaming supports empty string chunks", async () => {
451452 expect ( finalChunk . content ) . toBe ( "Hello world!" ) ;
452453} ) ;
453454
455+ describe ( "applicationInferenceProfile parameter" , ( ) => {
456+ const baseConstructorArgs = {
457+ region : "us-east-1" ,
458+ credentials : {
459+ secretAccessKey : "test-secret-key" ,
460+ accessKeyId : "test-access-key" ,
461+ } ,
462+ } ;
463+
464+ it ( "should initialize applicationInferenceProfile from constructor" , ( ) => {
465+ const testArn =
466+ "arn:aws:bedrock:eu-west-1:123456789012:application-inference-profile/test-profile" ;
467+ const model = new ChatBedrockConverse ( {
468+ ...baseConstructorArgs ,
469+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
470+ applicationInferenceProfile : testArn ,
471+ } ) ;
472+ expect ( model . model ) . toBe ( "anthropic.claude-3-haiku-20240307-v1:0" ) ;
473+ expect ( model . applicationInferenceProfile ) . toBe ( testArn ) ;
474+ } ) ;
475+
476+ it ( "should be undefined when not provided in constructor" , ( ) => {
477+ const model = new ChatBedrockConverse ( {
478+ ...baseConstructorArgs ,
479+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
480+ } ) ;
481+
482+ expect ( model . model ) . toBe ( "anthropic.claude-3-haiku-20240307-v1:0" ) ;
483+ expect ( model . applicationInferenceProfile ) . toBeUndefined ( ) ;
484+ } ) ;
485+
486+ it ( "should send applicationInferenceProfile as modelId in ConverseCommand when provided" , async ( ) => {
487+ const testArn =
488+ "arn:aws:bedrock:eu-west-1:123456789012:application-inference-profile/test-profile" ;
489+ const mockSend = vi . fn ( ) . mockResolvedValue ( {
490+ output : {
491+ message : {
492+ role : "assistant" ,
493+ content : [ { text : "Test response" } ] ,
494+ } ,
495+ } ,
496+ stopReason : "end_turn" ,
497+ usage : {
498+ inputTokens : 10 ,
499+ outputTokens : 5 ,
500+ totalTokens : 15 ,
501+ } ,
502+ } ) ;
503+
504+ const mockClient = {
505+ send : mockSend ,
506+ } as unknown as BedrockRuntimeClient ;
507+
508+ const model = new ChatBedrockConverse ( {
509+ ...baseConstructorArgs ,
510+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
511+ applicationInferenceProfile : testArn ,
512+ client : mockClient ,
513+ } ) ;
514+
515+ await model . invoke ( [ new HumanMessage ( "Hello" ) ] ) ;
516+
517+ // Verify that send was called
518+ expect ( mockSend ) . toHaveBeenCalledTimes ( 1 ) ;
519+
520+ // Verify that the command was created with applicationInferenceProfile as modelId
521+ const commandArg = mockSend . mock . calls [ 0 ] [ 0 ] ;
522+ expect ( commandArg . input . modelId ) . toBe ( testArn ) ;
523+ expect ( commandArg . input . modelId ) . not . toBe (
524+ "anthropic.claude-3-haiku-20240307-v1:0"
525+ ) ;
526+ } ) ;
527+
528+ it ( "should send model as modelId in ConverseCommand when applicationInferenceProfile is not provided" , async ( ) => {
529+ const mockSend = vi . fn ( ) . mockResolvedValue ( {
530+ output : {
531+ message : {
532+ role : "assistant" ,
533+ content : [ { text : "Test response" } ] ,
534+ } ,
535+ } ,
536+ stopReason : "end_turn" ,
537+ usage : {
538+ inputTokens : 10 ,
539+ outputTokens : 5 ,
540+ totalTokens : 15 ,
541+ } ,
542+ } ) ;
543+
544+ const mockClient = {
545+ send : mockSend ,
546+ } as unknown as BedrockRuntimeClient ;
547+
548+ const model = new ChatBedrockConverse ( {
549+ ...baseConstructorArgs ,
550+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
551+ client : mockClient ,
552+ } ) ;
553+
554+ await model . invoke ( [ new HumanMessage ( "Hello" ) ] ) ;
555+
556+ // Verify that send was called
557+ expect ( mockSend ) . toHaveBeenCalledTimes ( 1 ) ;
558+
559+ // Verify that the command was created with model as modelId
560+ const commandArg = mockSend . mock . calls [ 0 ] [ 0 ] ;
561+ expect ( commandArg . input . modelId ) . toBe (
562+ "anthropic.claude-3-haiku-20240307-v1:0"
563+ ) ;
564+ } ) ;
565+
566+ it ( "should send applicationInferenceProfile as modelId in ConverseStreamCommand when provided" , async ( ) => {
567+ const testArn =
568+ "arn:aws:bedrock:eu-west-1:123456789012:application-inference-profile/test-profile" ;
569+ const mockSend = vi . fn ( ) . mockResolvedValue ( {
570+ stream : ( async function * ( ) {
571+ yield {
572+ contentBlockDelta : {
573+ contentBlockIndex : 0 ,
574+ delta : { text : "Test" } ,
575+ } ,
576+ } ;
577+ yield {
578+ metadata : {
579+ usage : {
580+ inputTokens : 10 ,
581+ outputTokens : 5 ,
582+ totalTokens : 15 ,
583+ } ,
584+ } ,
585+ } ;
586+ } ) ( ) ,
587+ } ) ;
588+
589+ const mockClient = {
590+ send : mockSend ,
591+ } as unknown as BedrockRuntimeClient ;
592+
593+ const model = new ChatBedrockConverse ( {
594+ ...baseConstructorArgs ,
595+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
596+ applicationInferenceProfile : testArn ,
597+ streaming : true ,
598+ client : mockClient ,
599+ } ) ;
600+
601+ await model . invoke ( [ new HumanMessage ( "Hello" ) ] ) ;
602+
603+ expect ( mockSend ) . toHaveBeenCalledTimes ( 1 ) ;
604+
605+ const commandArg = mockSend . mock . calls [ 0 ] [ 0 ] ;
606+ expect ( commandArg . input . modelId ) . toBe ( testArn ) ;
607+ expect ( commandArg . input . modelId ) . not . toBe (
608+ "anthropic.claude-3-haiku-20240307-v1:0"
609+ ) ;
610+ } ) ;
611+
612+ it ( "should send model as modelId in ConverseStreamCommand when applicationInferenceProfile is not provided" , async ( ) => {
613+ const mockSend = vi . fn ( ) . mockResolvedValue ( {
614+ stream : ( async function * ( ) {
615+ yield {
616+ contentBlockDelta : {
617+ contentBlockIndex : 0 ,
618+ delta : { text : "Test" } ,
619+ } ,
620+ } ;
621+ yield {
622+ metadata : {
623+ usage : {
624+ inputTokens : 10 ,
625+ outputTokens : 5 ,
626+ totalTokens : 15 ,
627+ } ,
628+ } ,
629+ } ;
630+ } ) ( ) ,
631+ } ) ;
632+
633+ const mockClient = {
634+ send : mockSend ,
635+ } as unknown as BedrockRuntimeClient ;
636+
637+ const model = new ChatBedrockConverse ( {
638+ ...baseConstructorArgs ,
639+ model : "anthropic.claude-3-haiku-20240307-v1:0" ,
640+ streaming : true ,
641+ client : mockClient ,
642+ } ) ;
643+
644+ await model . invoke ( [ new HumanMessage ( "Hello" ) ] ) ;
645+
646+ expect ( mockSend ) . toHaveBeenCalledTimes ( 1 ) ;
647+
648+ const commandArg = mockSend . mock . calls [ 0 ] [ 0 ] ;
649+ expect ( commandArg . input . modelId ) . toBe (
650+ "anthropic.claude-3-haiku-20240307-v1:0"
651+ ) ;
652+ } ) ;
653+ } ) ;
654+
454655describe ( "tool_choice works for supported models" , ( ) => {
455656 const tool = {
456657 name : "weather" ,
0 commit comments