Skip to content

Commit 4a8fbcb

Browse files
authored
fix: update Gemini model names (#3674)
1 parent f377daa commit 4a8fbcb

27 files changed

+27
-27
lines changed

generative-ai/snippets/countTokens.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function countTokens(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/functionCallingStreamChat.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ const functionResponseParts = [
5555
async function functionCallingStreamChat(
5656
projectId = 'PROJECT_ID',
5757
location = 'us-central1',
58-
model = 'gemini-1.0-pro'
58+
model = 'gemini-1.0-pro-002'
5959
) {
6060
// Initialize Vertex with your Cloud project and location
6161
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/functionCallingStreamContent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ const functionResponseParts = [
5555
async function functionCallingStreamChat(
5656
projectId = 'PROJECT_ID',
5757
location = 'us-central1',
58-
model = 'gemini-1.0-pro'
58+
model = 'gemini-1.0-pro-002'
5959
) {
6060
// Initialize Vertex with your Cloud project and location
6161
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/inference/nonStreamTextBasic.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function generateContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/inference/streamTextBasic.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function generateContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/nonStreamingChat.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createNonStreamingChat(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/nonStreamingContent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createNonStreamingContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/nonStreamingMultipartContent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createNonStreamingMultipartContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro-vision',
24+
model = 'gemini-1.0-pro-vision-001',
2525
image = 'gs://generativeai-downloads/images/scones.jpg',
2626
mimeType = 'image/jpeg'
2727
) {

generative-ai/snippets/safetySettings.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ const {
2525
async function setSafetySettings(
2626
projectId = 'PROJECT_ID',
2727
location = 'us-central1',
28-
model = 'gemini-1.0-pro-001'
28+
model = 'gemini-1.0-pro-002'
2929
) {
3030
// Initialize Vertex with your Cloud project and location
3131
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/sendMultiModalPromptWithImage.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ async function getBase64(url) {
2727
async function sendMultiModalPromptWithImage(
2828
projectId = 'PROJECT_ID',
2929
location = 'us-central1',
30-
model = 'gemini-1.0-pro-vision'
30+
model = 'gemini-1.0-pro-vision-001'
3131
) {
3232
// For images, the SDK supports base64 strings
3333
const landmarkImage1 = await getBase64(

generative-ai/snippets/sendMultiModalPromptWithVideo.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function sendMultiModalPromptWithVideo(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro-vision'
24+
model = 'gemini-1.0-pro-vision-001'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/streamChat.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createStreamChat(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/streamContent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createStreamContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro'
24+
model = 'gemini-1.0-pro-002'
2525
) {
2626
// Initialize Vertex with your Cloud project and location
2727
const vertexAI = new VertexAI({project: projectId, location: location});

generative-ai/snippets/streamMultipartContent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const {VertexAI} = require('@google-cloud/vertexai');
2121
async function createStreamMultipartContent(
2222
projectId = 'PROJECT_ID',
2323
location = 'us-central1',
24-
model = 'gemini-1.0-pro-vision',
24+
model = 'gemini-1.0-pro-vision-001',
2525
image = 'gs://generativeai-downloads/images/scones.jpg',
2626
mimeType = 'image/jpeg'
2727
) {

generative-ai/snippets/test/countTokens.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Count tokens', async () => {
2727
/**

generative-ai/snippets/test/functionCallingStreamContent.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI Function Calling Stream Content', () => {
2727
/**

generative-ai/snippets/test/inference/nonStreamTextBasic.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI Basic Text Inference', () => {
2727
/**

generative-ai/snippets/test/inference/streamTextBasic.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI Basic Text Inference Streaming', () => {
2727
/**

generative-ai/snippets/test/nonStreamingChat.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI NonStreaming Chat', async () => {
2727
/**

generative-ai/snippets/test/nonStreamingContent.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI NonStreaming Content', () => {
2727
/**

generative-ai/snippets/test/nonStreamingMultipartContent.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro-vision';
24+
const model = 'gemini-1.0-pro-vision-001';
2525

2626
describe('Generative AI NonStreaming Multipart Content', () => {
2727
/**

generative-ai/snippets/test/safetySettings.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro-001';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Safety settings', async () => {
2727
/**

generative-ai/snippets/test/sendMultiModalPromptWithImage.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro-vision';
24+
const model = 'gemini-1.0-pro-vision-001';
2525

2626
describe('Generative AI Stream MultiModal with Image', () => {
2727
/**

generative-ai/snippets/test/sendMultiModalPromptWithVideo.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro-vision';
24+
const model = 'gemini-1.0-pro-vision-001';
2525

2626
describe('Generative AI Stream MultiModal with Video', () => {
2727
/**

generative-ai/snippets/test/streamChat.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI Stream Chat', () => {
2727
/**

generative-ai/snippets/test/streamContent.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro';
24+
const model = 'gemini-1.0-pro-002';
2525

2626
describe('Generative AI Stream Content', () => {
2727
/**

generative-ai/snippets/test/streamMultipartContent.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'});
2121

2222
const projectId = process.env.CAIP_PROJECT_ID;
2323
const location = process.env.LOCATION;
24-
const model = 'gemini-1.0-pro-vision';
24+
const model = 'gemini-1.0-pro-vision-001';
2525

2626
describe('Generative AI Stream Multipart Content', () => {
2727
/**

0 commit comments

Comments
 (0)