Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lib/crewai/src/crewai/cli/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,7 @@
"claude-3-haiku-20240307",
],
"gemini": [
"gemini/gemini-3-pro-preview",
"gemini/gemini-1.5-flash",
"gemini/gemini-1.5-pro",
"gemini/gemini-2.0-flash-lite-001",
Expand Down
1 change: 1 addition & 0 deletions lib/crewai/src/crewai/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ def writable(self) -> bool:
"o3-mini": 200000,
"o4-mini": 200000,
# gemini
"gemini-3-pro-preview": 1048576,
"gemini-2.0-flash": 1048576,
"gemini-2.0-flash-thinking-exp-01-21": 32768,
"gemini-2.0-flash-lite-001": 1048576,
Expand Down
2 changes: 2 additions & 0 deletions lib/crewai/src/crewai/llms/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@
]

GeminiModels: TypeAlias = Literal[
"gemini-3-pro-preview",
"gemini-2.5-pro",
"gemini-2.5-pro-preview-03-25",
"gemini-2.5-pro-preview-05-06",
Expand Down Expand Up @@ -287,6 +288,7 @@
"learnlm-2.0-flash-experimental",
]
GEMINI_MODELS: list[GeminiModels] = [
"gemini-3-pro-preview",
"gemini-2.5-pro",
"gemini-2.5-pro-preview-03-25",
"gemini-2.5-pro-preview-05-06",
Expand Down
7 changes: 4 additions & 3 deletions lib/crewai/src/crewai/llms/providers/gemini/completion.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
import re
from typing import Any, cast

from pydantic import BaseModel
Expand Down Expand Up @@ -100,9 +101,8 @@ def __init__(
self.stop_sequences = stop_sequences or []

# Model-specific settings
self.is_gemini_2 = "gemini-2" in model.lower()
self.is_gemini_1_5 = "gemini-1.5" in model.lower()
self.supports_tools = self.is_gemini_1_5 or self.is_gemini_2
version_match = re.search(r"gemini-(\d+(?:\.\d+)?)", model.lower())
self.supports_tools = bool(version_match and float(version_match.group(1)) >= 1.5)

@property
def stop(self) -> list[str]:
Expand Down Expand Up @@ -559,6 +559,7 @@ def get_context_window_size(self) -> int:
)

context_windows = {
"gemini-3-pro-preview": 1048576, # 1M tokens
"gemini-2.0-flash": 1048576, # 1M tokens
"gemini-2.0-flash-thinking": 32768,
"gemini-2.0-flash-lite": 1048576,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
interactions:
- request:
body: '{"contents":[{"role":"user","parts":[{"text":"What is the capital of France?"}]}],"generationConfig":{"stop_sequences":[]}}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '123'
content-type:
- application/json
host:
- generativelanguage.googleapis.com
user-agent:
- litellm/1.78.5
method: POST
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-3-pro-preview:generateContent
response:
body:
string: !!binary |
H4sIAAAAAAAC/21UW4+iSBh9719heGxmBgFvbDIPgKAgNwUV3OxDCSWU3KFApdP/fWl77XF2l6RI
5ftOnVN1ku+8vQwGhA+yAAUAw5r4Y/BnXxkM3u7/j16eYZjhvvEo9cUCVPgX9vN7e9r3EAyvH4cI
J4IDHxQIg2SQnwZyBTIfDlA9eH21QIXq19cfxLd/HY3yJoywjcIM4KaCHzRSvZbEWpL4YIlRytG8
a3eoGiukHPHm3jH2FNvMTC1qLlgS05RL42PVyPMdz1uFHpQuytZSBqcHf7PexMHK3mjJQjWKIbM+
MxFL6cvWMMfQFsOJ3UQk5j1hWmoxK1DrLqncyrpcQ+UY0uZog2oqkTmXiQ2f27ZBpS58MXBTxRbX
qdfsl25Vn5tswrUHeVhVxenW7kaG0cKdt2hjjxPUBYY26BAUvbqqw30AoG0eTMmzdImnIrI51+VY
xeqUl/HKs8ZgfBPF0bbtMDjMzxZSkv3KNuJgwTlYMkw9YEyKMcfkRvUmkiPpBqL486niJEuQKtE7
XibhpJy1AltrXSrjq+iEucKfK5z43Ci6bTu+VIVuRNecmwRN2gnbqQHH6lQ06eNM5ttpwEjZVOI3
umesM9qbcxMySprtbDYXaboQdioPMpuEy3U4VZrM6njN0rAk8Fh3/ON+E58FJPDtxD8upIWTbI/D
MrqM7RWj7VWo6kMFUgaj5Dpzsg8bE6GoIc+rJEcnau8qGNnZygGNcRO61nD5sXgyWbUQ+Z4XQhrX
3C6UyS2OTHAp2cUJVp0eSZqtyTuTy48XjmW0xLJVYRqYYmSZhatQ45ROKPZiXTZTxiq2ceDPIhii
7tBurqtSL7ylp5NRw5FUzJXsLkiRJs1BIi05Oxit51ToBF2oTGOvYTXjfJptR62SVdTB7W5aaJzq
nb9adAVFIii3gZE5Qz87C+ViVKa3eJ2f4pyiSzasywoHJA2klNL01IIYX6o55V8n3BUc8vKagLIp
d/pRZoatSfor/yx4bAYp/udP4mlc3r/2f/2aIqLKk/vUpHkAkwf8/QEgTihDdbSBoM6zD5jtmNbX
EBIoC+C1Lw9fHgJ3aqKpQQh1iEGfFOArD4iiytMCO3kMMzFv7kkx++R6ypX/beO8D4XfOvSI/vYf
1nrea6LkOW+eoqh/IkgQvt2zRnKdpzDpBZ5VHza8PLn1yJrfL0gz45d//Pq0cAerGn16FcK0d+87
+72/Yb9gi+DlrklUsC7yrIZK8IHbeV4/2Sy/LL9r50a3aquVZ2uPeHl/+RvdmjG6dAUAAA==
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Wed, 19 Nov 2025 08:56:53 GMT
Server:
- scaffolding on HTTPServer2
Server-Timing:
- gfet4t7; dur=2508
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1
2 changes: 0 additions & 2 deletions lib/crewai/tests/llms/google/test_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,13 +455,11 @@ def test_gemini_model_capabilities():
llm_2_0 = LLM(model="google/gemini-2.0-flash-001")
from crewai.llms.providers.gemini.completion import GeminiCompletion
assert isinstance(llm_2_0, GeminiCompletion)
assert llm_2_0.is_gemini_2 == True
assert llm_2_0.supports_tools == True

# Test Gemini 1.5 model
llm_1_5 = LLM(model="google/gemini-1.5-pro")
assert isinstance(llm_1_5, GeminiCompletion)
assert llm_1_5.is_gemini_1_5 == True
assert llm_1_5.supports_tools == True


Expand Down
1 change: 1 addition & 0 deletions lib/crewai/tests/test_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ def test_validate_call_params_no_response_format():
@pytest.mark.parametrize(
"model",
[
"gemini/gemini-3-pro-preview",
"gemini/gemini-2.0-flash-thinking-exp-01-21",
"gemini/gemini-2.0-flash-001",
"gemini/gemini-2.0-flash-lite-001",
Expand Down
Loading