Skip to content

Commit c79f7d5

Browse files
feat(genai): gemini 2.5 model name update (#13438)
* feat(genai): Update google gemini model names * feat(genai): Update google genai SDK version * fix(genai): add bug fixes.
1 parent 242fb36 commit c79f7d5

File tree

62 files changed

+66
-64
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

62 files changed

+66
-64
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/bounding_box/boundingbox_with_txt_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ def plot_bounding_boxes(image_uri: str, bounding_boxes: list[BoundingBox]) -> No
9494
image_uri = "https://storage.googleapis.com/generativeai-downloads/images/socks.jpg"
9595

9696
response = client.models.generate_content(
97-
model="gemini-2.5-flash-preview-05-20",
97+
model="gemini-2.5-flash",
9898
contents=[
9999
Part.from_uri(
100100
file_uri=image_uri,

genai/bounding_box/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0
22
pillow==11.1.0

genai/content_cache/contentcache_create_with_txt_gcs_pdf.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,12 @@ def create_content_cache() -> str:
4242
]
4343

4444
content_cache = client.caches.create(
45-
model="gemini-2.5-flash-preview-05-20",
45+
model="gemini-2.5-flash",
4646
config=CreateCachedContentConfig(
4747
contents=contents,
4848
system_instruction=system_instruction,
49+
# (Optional) For enhanced security, the content cache can be encrypted using a Cloud KMS key
50+
# kms_key_name = "projects/.../locations/us-central1/keyRings/.../cryptoKeys/..."
4951
display_name="example-cache",
5052
ttl="86400s",
5153
),

genai/content_cache/contentcache_use_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def generate_content(cache_name: str) -> str:
2222
# Use content cache to generate text response
2323
# E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
2424
response = client.models.generate_content(
25-
model="gemini-2.5-flash-preview-05-20",
25+
model="gemini-2.5-flash",
2626
contents="Summarize the pdfs",
2727
config=GenerateContentConfig(
2828
cached_content=cache_name,

genai/content_cache/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/controlled_generation/ctrlgen_with_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class Recipe(BaseModel):
2626

2727
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2828
response = client.models.generate_content(
29-
model="gemini-2.5-flash-preview-05-20",
29+
model="gemini-2.5-flash",
3030
contents="List a few popular cookie recipes.",
3131
config=GenerateContentConfig(
3232
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_enum_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ class InstrumentClass(enum.Enum):
2929

3030
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3131
response = client.models.generate_content(
32-
model="gemini-2.5-flash-preview-05-20",
32+
model="gemini-2.5-flash",
3333
contents="What type of instrument is a guitar?",
3434
config={
3535
"response_mime_type": "text/x.enum",

genai/controlled_generation/ctrlgen_with_enum_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="What type of instrument is an oboe?",
2525
config=GenerateContentConfig(
2626
response_mime_type="text/x.enum",

genai/controlled_generation/ctrlgen_with_nested_class_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ class Recipe(BaseModel):
3636

3737
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3838
response = client.models.generate_content(
39-
model="gemini-2.5-flash-preview-05-20",
39+
model="gemini-2.5-flash",
4040
contents="List about 10 home-baked cookies and give them grades based on tastiness.",
4141
config=GenerateContentConfig(
4242
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_nullable_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def generate_content() -> str:
5151

5252
client = genai.Client(http_options=HttpOptions(api_version="v1"))
5353
response = client.models.generate_content(
54-
model="gemini-2.5-flash-preview-05-20",
54+
model="gemini-2.5-flash",
5555
contents=prompt,
5656
config=GenerateContentConfig(
5757
response_mime_type="application/json",

genai/controlled_generation/ctrlgen_with_resp_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def generate_content() -> str:
3636

3737
client = genai.Client(http_options=HttpOptions(api_version="v1"))
3838
response = client.models.generate_content(
39-
model="gemini-2.5-flash-preview-05-20",
39+
model="gemini-2.5-flash",
4040
contents=prompt,
4141
config={
4242
"response_mime_type": "application/json",
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/count_tokens/counttoken_compute_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def compute_tokens_example() -> int:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.compute_tokens(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="What's the longest word in the English language?",
2525
)
2626

genai/count_tokens/counttoken_resp_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def count_tokens_example() -> int:
2424

2525
# Send text to Gemini
2626
response = client.models.generate_content(
27-
model="gemini-2.5-flash-preview-05-20", contents=prompt
27+
model="gemini-2.5-flash", contents=prompt
2828
)
2929

3030
# Prompt and response tokens count

genai/count_tokens/counttoken_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def count_tokens() -> int:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.count_tokens(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="What's the highest mountain in Africa?",
2525
)
2626
print(response)

genai/count_tokens/counttoken_with_txt_vid.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def count_tokens() -> int:
2929
]
3030

3131
response = client.models.count_tokens(
32-
model="gemini-2.5-flash-preview-05-20",
32+
model="gemini-2.5-flash",
3333
contents=contents,
3434
)
3535
print(response)

genai/count_tokens/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/embeddings/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/express_mode/api_key_example.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def generate_content() -> str:
2323
client = genai.Client(vertexai=True, api_key=API_KEY)
2424

2525
response = client.models.generate_content(
26-
model="gemini-2.5-flash-preview-05-20",
26+
model="gemini-2.5-flash",
2727
contents="Explain bubble sort to me.",
2828
)
2929

genai/express_mode/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/express_mode/test_express_mode_examples.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def test_api_key_example(mock_genai_client: MagicMock) -> None:
4040

4141
mock_genai_client.assert_called_once_with(vertexai=True, api_key="YOUR_API_KEY")
4242
mock_genai_client.return_value.models.generate_content.assert_called_once_with(
43-
model="gemini-2.5-flash-preview-05-20",
43+
model="gemini-2.5-flash",
4444
contents="Explain bubble sort to me.",
4545
)
4646
assert response == "This is a mocked bubble sort explanation."
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0
22
pillow==11.1.0

genai/live/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
google-genai==1.19.0
1+
google-genai==1.20.0
22
scipy==1.15.3
33
websockets==15.0.1
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/provisioned_throughput/provisionedthroughput_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def generate_content() -> str:
3131
)
3232
)
3333
response = client.models.generate_content(
34-
model="gemini-2.5-flash-preview-05-20",
34+
model="gemini-2.5-flash",
3535
contents="How does AI work?",
3636
)
3737
print(response.text)
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/safety/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/safety/safety_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def generate_content() -> GenerateContentResponse:
5454
]
5555

5656
response = client.models.generate_content(
57-
model="gemini-2.5-flash-preview-05-20",
57+
model="gemini-2.5-flash",
5858
contents=prompt,
5959
config=GenerateContentConfig(
6060
system_instruction=system_instruction,
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/text_generation/textgen_async_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ async def generate_content() -> str:
2121
from google.genai.types import GenerateContentConfig, HttpOptions
2222

2323
client = genai.Client(http_options=HttpOptions(api_version="v1"))
24-
model_id = "gemini-2.5-flash-preview-05-20"
24+
model_id = "gemini-2.5-flash"
2525

2626
response = await client.aio.models.generate_content(
2727
model=model_id,

genai/text_generation/textgen_chat_stream_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def generate_content() -> bool:
1919
from google.genai.types import HttpOptions
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
22-
chat_session = client.chats.create(model="gemini-2.5-flash-preview-05-20")
22+
chat_session = client.chats.create(model="gemini-2.5-flash")
2323

2424
for chunk in chat_session.send_message_stream("Why is the sky blue?"):
2525
print(chunk.text, end="")

genai/text_generation/textgen_chat_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
chat_session = client.chats.create(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
history=[
2525
UserContent(parts=[Part(text="Hello")]),
2626
ModelContent(

genai/text_generation/textgen_config_with_txt.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="Why is the sky blue?",
2525
# See the SDK documentation at
2626
# https://googleapis.github.io/python-genai/genai.html#genai.types.GenerateContentConfig
@@ -31,7 +31,7 @@ def generate_content() -> str:
3131
top_p=0.95,
3232
top_k=20,
3333
seed=5,
34-
max_output_tokens=100,
34+
max_output_tokens=500,
3535
stop_sequences=["STOP!"],
3636
presence_penalty=0.0,
3737
frequency_penalty=0.0,

genai/text_generation/textgen_sys_instr_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="Why is the sky blue?",
2525
config=GenerateContentConfig(
2626
system_instruction=[

genai/text_generation/textgen_transcript_with_gcs_audio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def generate_content() -> str:
2424
Use speaker A, speaker B, etc. to identify speakers.
2525
"""
2626
response = client.models.generate_content(
27-
model="gemini-2.5-flash-preview-05-20",
27+
model="gemini-2.5-flash",
2828
contents=[
2929
prompt,
3030
Part.from_uri(

genai/text_generation/textgen_with_gcs_audio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def generate_content() -> str:
2323
Provide a concise summary of the main points in the audio file.
2424
"""
2525
response = client.models.generate_content(
26-
model="gemini-2.5-flash-preview-05-20",
26+
model="gemini-2.5-flash",
2727
contents=[
2828
prompt,
2929
Part.from_uri(

genai/text_generation/textgen_with_local_video.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def generate_content() -> str:
1919
from google.genai.types import HttpOptions, Part
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
22-
model_id = "gemini-2.5-flash-preview-05-20"
22+
model_id = "gemini-2.5-flash"
2323

2424
# Read local video file content
2525
with open("test_data/describe_video_content.mp4", "rb") as fp:

genai/text_generation/textgen_with_multi_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def generate_content() -> str:
2828
local_file_img_bytes = f.read()
2929

3030
response = client.models.generate_content(
31-
model="gemini-2.5-flash-preview-05-20",
31+
model="gemini-2.5-flash",
3232
contents=[
3333
"Generate a list of all the objects contained in both images.",
3434
Part.from_uri(file_uri=gcs_file_img_path, mime_type="image/jpeg"),

genai/text_generation/textgen_with_multi_local_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def generate_content(image_path_1: str, image_path_2: str) -> str:
2828
image_2_bytes = f.read()
2929

3030
response = client.models.generate_content(
31-
model="gemini-2.5-flash-preview-05-20",
31+
model="gemini-2.5-flash",
3232
contents=[
3333
"Generate a list of all the objects contained in both images.",
3434
Part.from_bytes(data=image_1_bytes, mime_type="image/jpeg"),

genai/text_generation/textgen_with_mute_video.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents=[
2525
Part.from_uri(
2626
file_uri="gs://cloud-samples-data/generative-ai/video/ad_copy_from_video.mp4",

genai/text_generation/textgen_with_pdf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def generate_content() -> str:
2121
from google.genai.types import HttpOptions, Part
2222

2323
client = genai.Client(http_options=HttpOptions(api_version="v1"))
24-
model_id = "gemini-2.5-flash-preview-05-20"
24+
model_id = "gemini-2.5-flash"
2525

2626
prompt = """
2727
You are a highly skilled document summarization specialist.

genai/text_generation/textgen_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents="How does AI work?",
2525
)
2626
print(response.text)

genai/text_generation/textgen_with_txt_img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222
response = client.models.generate_content(
23-
model="gemini-2.5-flash-preview-05-20",
23+
model="gemini-2.5-flash",
2424
contents=[
2525
"What is shown in this image?",
2626
Part.from_uri(

genai/text_generation/textgen_with_txt_stream.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def generate_content() -> bool:
2121
client = genai.Client(http_options=HttpOptions(api_version="v1"))
2222

2323
for chunk in client.models.generate_content_stream(
24-
model="gemini-2.5-flash-preview-05-20",
24+
model="gemini-2.5-flash",
2525
contents="Why is the sky blue?",
2626
):
2727
print(chunk.text, end="")

genai/text_generation/textgen_with_video.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def generate_content() -> str:
2525
Create a chapter breakdown with timestamps for key sections or topics discussed.
2626
"""
2727
response = client.models.generate_content(
28-
model="gemini-2.5-flash-preview-05-20",
28+
model="gemini-2.5-flash",
2929
contents=[
3030
Part.from_uri(
3131
file_uri="gs://cloud-samples-data/generative-ai/video/pixel8.mp4",

genai/text_generation/textgen_with_youtube_video.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def generate_content() -> str:
2121
from google.genai.types import HttpOptions, Part
2222

2323
client = genai.Client(http_options=HttpOptions(api_version="v1"))
24-
model_id = "gemini-2.5-flash-preview-05-20"
24+
model_id = "gemini-2.5-flash"
2525

2626
response = client.models.generate_content(
2727
model=model_id,

genai/text_generation/thinking_textgen_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client()
2222
response = client.models.generate_content(
23-
model="gemini-2.5-pro-preview-05-06",
23+
model="gemini-2.5-pro",
2424
contents="solve x^2 + 4x + 4 = 0",
2525
)
2626
print(response.text)

genai/thinking/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-genai==1.16.1
1+
google-genai==1.20.0

genai/thinking/thinking_budget_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def generate_content() -> str:
2121
client = genai.Client()
2222

2323
response = client.models.generate_content(
24-
model="gemini-2.5-flash-preview-05-20",
24+
model="gemini-2.5-flash",
2525
contents="solve x^2 + 4x + 4 = 0",
2626
config=GenerateContentConfig(
2727
thinking_config=ThinkingConfig(

genai/thinking/thinking_includethoughts_with_txt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def generate_content() -> str:
2020

2121
client = genai.Client()
2222
response = client.models.generate_content(
23-
model="gemini-2.5-pro-preview-05-06",
23+
model="gemini-2.5-pro",
2424
contents="solve x^2 + 4x + 4 = 0",
2525
config=GenerateContentConfig(
2626
thinking_config=ThinkingConfig(include_thoughts=True)

0 commit comments

Comments
 (0)