docs: update to latest stable gemini models (#633)

Update to the latest Stable gemini models. See the [latest stable models
list](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/model-versions#latest-stable).

Closes https://github.com/googleapis/genai-toolbox/issues/518
This commit is contained in:
Twisha Bansal
2025-06-02 10:40:52 +05:30
committed by GitHub
parent 386bb23e7c
commit 4700dd363c
4 changed files with 22 additions and 22 deletions

View File

@@ -583,7 +583,7 @@
" history.append(user_prompt_content)\n",
"\n",
" response = genai_client.models.generate_content(\n",
" model=\"gemini-2.0-flash\",\n",
" model=\"gemini-2.0-flash-001\",\n",
" contents=history,\n",
" config=GenerateContentConfig(\n",
" system_instruction=prompt,\n",
@@ -686,7 +686,7 @@
"\"\"\"\n",
"\n",
"root_agent = Agent(\n",
" model='gemini-2.0-flash',\n",
" model='gemini-2.0-flash-001',\n",
" name='hotel_agent',\n",
" description='A helpful AI assistant.',\n",
" instruction=prompt,\n",
@@ -802,8 +802,8 @@
"async def run_application():\n",
" # Create an LLM to bind with the agent.\n",
" # TODO(developer): replace this with another model if needed\n",
" model = ChatVertexAI(model_name=\"gemini-1.5-pro\", project=project_id)\n",
" # model = ChatGoogleGenerativeAI(model=\"gemini-1.5-pro\")\n",
" model = ChatVertexAI(model_name=\"gemini-2.0-flash-001\", project=project_id)\n",
" # model = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash-001\")\n",
" # model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"\n",
" # Load the tools from the Toolbox server\n",
@@ -898,12 +898,12 @@
" # Create an LLM to bind with the agent.\n",
" # TODO(developer): replace this with another model if needed\n",
" llm = GoogleGenAI(\n",
" model=\"gemini-1.5-pro\",\n",
" model=\"gemini-2.0-flash-001\",\n",
" vertexai_config={\"project\": project_id, \"location\": \"us-central1\"},\n",
" )\n",
" # llm = GoogleGenAI(\n",
" # api_key=os.getenv(\"GOOGLE_API_KEY\"),\n",
" # model=\"gemini-1.5-pro\",\n",
" # model=\"gemini-2.0-flash-001\",\n",
" # )\n",
" # llm = Anthropic(\n",
" # model=\"claude-3-7-sonnet-latest\",\n",

View File

@@ -369,7 +369,7 @@ async def run_application():
history.append(user_prompt_content)
response = genai_client.models.generate_content(
model="gemini-2.0-flash",
model="gemini-2.0-flash-001",
contents=history,
config=GenerateContentConfig(
system_instruction=prompt,
@@ -445,7 +445,7 @@ with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
"""
root_agent = Agent(
model='gemini-2.0-flash',
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant.',
instruction=prompt,
@@ -522,8 +522,8 @@ queries = [
async def run_application():
# TODO(developer): replace this with another model if needed
model = ChatVertexAI(model_name="gemini-1.5-pro")
# model = ChatGoogleGenerativeAI(model="gemini-1.5-pro")
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Load the tools from the Toolbox server
@@ -576,12 +576,12 @@ queries = [
async def run_application():
# TODO(developer): replace this with another model if needed
llm = GoogleGenAI(
model="gemini-1.5-pro",
model="gemini-2.0-flash-001",
vertexai_config={"project": "project-id", "location": "us-central1"},
)
# llm = GoogleGenAI(
# api_key=os.getenv("GOOGLE_API_KEY"),
# model="gemini-1.5-pro",
# model="gemini-2.0-flash-001",
# )
# llm = Anthropic(
# model="claude-3-7-sonnet-latest",

View File

@@ -513,8 +513,8 @@
"\n",
"# Create an LLM to bind with the agent.\n",
"# TODO(developer): replace this with another model if needed\n",
"model = ChatVertexAI(model_name=\"gemini-1.5-pro\", project=BIGQUERY_PROJECT)\n",
"# model = ChatGoogleGenerativeAI(model=\"gemini-1.5-pro\")\n",
"model = ChatVertexAI(model_name=\"gemini-2.0-flash-001\", project=BIGQUERY_PROJECT)\n",
"# model = ChatGoogleGenerativeAI(model=\"gemini-2.0-flash-001\")\n",
"# model = ChatAnthropic(model=\"claude-3-5-sonnet-20240620\")\n",
"\n",
"# Load the tools from the Toolbox server\n",
@@ -615,12 +615,12 @@
" # Create an LLM to bind with the agent.\n",
" # TODO(developer): replace this with another model if needed\n",
" llm = GoogleGenAI(\n",
" model=\"gemini-2.0-flash\",\n",
" model=\"gemini-2.0-flash-001\",\n",
" vertexai_config={\"project\": BIGQUERY_PROJECT, \"location\": \"us-central1\"},\n",
" )\n",
" # llm = GoogleGenAI(\n",
" # api_key=os.getenv(\"GOOGLE_API_KEY\"),\n",
" # model=\"gemini-1.5-pro\",\n",
" # model=\"gemini-2.0-flash-001\",\n",
" # )\n",
" # llm = Anthropic(\n",
" # model=\"claude-3-7-sonnet-latest\",\n",
@@ -712,7 +712,7 @@
"\"\"\"\n",
"\n",
"root_agent = Agent(\n",
" model='gemini-2.0-flash',\n",
" model='gemini-2.0-flash-001',\n",
" name='hotel_agent',\n",
" description='A helpful AI assistant.',\n",
" instruction=prompt,\n",

View File

@@ -316,8 +316,8 @@ queries = [
def main():
# TODO(developer): replace this with another model if needed
model = ChatVertexAI(model_name="gemini-1.5-pro")
# model = ChatGoogleGenerativeAI(model="gemini-1.5-pro")
model = ChatVertexAI(model_name="gemini-2.0-flash-001")
# model = ChatGoogleGenerativeAI(model="gemini-2.0-flash-001")
# model = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Load the tools from the Toolbox server
@@ -368,12 +368,12 @@ queries = [
async def main():
# TODO(developer): replace this with another model if needed
llm = GoogleGenAI(
model="gemini-1.5-pro",
model="gemini-2.0-flash-001",
vertexai_config={"location": "us-central1"},
)
# llm = GoogleGenAI(
# api_key=os.getenv("GOOGLE_API_KEY"),
# model="gemini-1.5-pro",
# model="gemini-2.0-flash-001",
# )
# llm = Anthropic(
# model="claude-3-7-sonnet-latest",
@@ -432,7 +432,7 @@ with ToolboxSyncClient("http://127.0.0.1:5000") as toolbox_client:
# --- Configure the Agent ---
root_agent = Agent(
model='gemini-2.0-flash',
model='gemini-2.0-flash-001',
name='hotel_agent',
description='A helpful AI assistant that can search and book hotels.',
instruction=prompt,