diff --git a/docs/mcp/client.md b/docs/mcp/client.md index 4b643d493a..5f8a9b3f6d 100644 --- a/docs/mcp/client.md +++ b/docs/mcp/client.md @@ -18,22 +18,19 @@ pip/uv-add "pydantic-ai-slim[mcp]" PydanticAI comes with two ways to connect to MCP servers: -- [`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] which connects to an MCP server using the [HTTP SSE](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) transport +- [`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] which connects to an MCP server using the [HTTP SSE](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) or [Streamable HTTP](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http) transport - [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] which runs the server as a subprocess and connects to it using the [stdio](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) transport Examples of both are shown below; [mcp-run-python](run-python.md) is used as the MCP server in both examples. -### SSE Client +### SSE or Streamable HTTP Client -[`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] connects over HTTP using the [HTTP + Server Sent Events transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) to a server. +[`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] connects over HTTP using the [HTTP + Server Sent Events transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) or the [Streamable HTTP](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http) to a server. !!! note [`MCPServerHTTP`][pydantic_ai.mcp.MCPServerHTTP] requires an MCP server to be running and accepting HTTP connections before calling [`agent.run_mcp_servers()`][pydantic_ai.Agent.run_mcp_servers]. Running the server is not managed by PydanticAI. -The name "HTTP" is used since this implemented will be adapted in future to use the new -[Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development. - -Before creating the SSE client, we need to run the server (docs [here](run-python.md)): +Before creating the client, we need to run the server (docs [here](run-python.md)): ```bash {title="terminal (run sse server)"} deno run \ @@ -45,7 +42,7 @@ deno run \ from pydantic_ai import Agent from pydantic_ai.mcp import MCPServerHTTP -server = MCPServerHTTP(url='http://localhost:3001/sse') # (1)! +server = MCPServerHTTP(url='http://localhost:3001/sse', transport='sse') # (1)! agent = Agent('openai:gpt-4o', mcp_servers=[server]) # (2)! @@ -56,7 +53,7 @@ async def main(): #> There are 9,208 days between January 1, 2000, and March 18, 2025. ``` -1. Define the MCP server with the URL used to connect. +1. Define the MCP server with the URL and transport used to connect. The url will typically end in `/mcp` for HTTP servers and `/sse` for SSE. Transport can be either sse or streamable-http. 2. Create an agent with the MCP server attached. 3. Create a client session to connect to the server. @@ -84,6 +81,10 @@ Will display as follows: ![Logfire run python code](../img/logfire-run-python-code.png) +#### Transport + +`MCPServerHTTP` supports both SSE (`sse`) and Streamable HTTP (`streamable-http`) servers which can be specified via the `transport` parameter. + ### MCP "stdio" Server The other transport offered by MCP is the [stdio transport](https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) where the server is run as a subprocess and communicates with the client over `stdin` and `stdout`. In this case, you'd use the [`MCPServerStdio`][pydantic_ai.mcp.MCPServerStdio] class. diff --git a/pydantic_ai_slim/pydantic_ai/mcp.py b/pydantic_ai_slim/pydantic_ai/mcp.py index a8b95079f2..da48024d63 100644 --- a/pydantic_ai_slim/pydantic_ai/mcp.py +++ b/pydantic_ai_slim/pydantic_ai/mcp.py @@ -6,9 +6,10 @@ from collections.abc import AsyncIterator, Sequence from contextlib import AsyncExitStack, asynccontextmanager from dataclasses import dataclass +from datetime import timedelta from pathlib import Path from types import TracebackType -from typing import Any +from typing import Any, Literal import anyio from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream @@ -16,7 +17,6 @@ BlobResourceContents, EmbeddedResource, ImageContent, - JSONRPCMessage, LoggingLevel, TextContent, TextResourceContents, @@ -31,6 +31,8 @@ from mcp.client.session import ClientSession from mcp.client.sse import sse_client from mcp.client.stdio import StdioServerParameters, stdio_client + from mcp.client.streamable_http import streamablehttp_client + from mcp.shared.message import SessionMessage except ImportError as _import_error: raise ImportError( 'Please install the `mcp` package to use the MCP server, ' @@ -56,8 +58,8 @@ class MCPServer(ABC): """ _client: ClientSession - _read_stream: MemoryObjectReceiveStream[JSONRPCMessage | Exception] - _write_stream: MemoryObjectSendStream[JSONRPCMessage] + _read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] + _write_stream: MemoryObjectSendStream[SessionMessage] _exit_stack: AsyncExitStack @abstractmethod @@ -66,8 +68,8 @@ async def client_streams( self, ) -> AsyncIterator[ tuple[ - MemoryObjectReceiveStream[JSONRPCMessage | Exception], - MemoryObjectSendStream[JSONRPCMessage], + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], ] ]: """Create the streams for the MCP server.""" @@ -266,8 +268,8 @@ async def client_streams( self, ) -> AsyncIterator[ tuple[ - MemoryObjectReceiveStream[JSONRPCMessage | Exception], - MemoryObjectSendStream[JSONRPCMessage], + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], ] ]: server = StdioServerParameters(command=self.command, args=list(self.args), env=self.env, cwd=self.cwd) @@ -288,11 +290,8 @@ def _get_client_initialize_timeout(self) -> float: class MCPServerHTTP(MCPServer): """An MCP server that connects over streamable HTTP connections. - This class implements the SSE transport from the MCP specification. - See for more information. - - The name "HTTP" is used since this implemented will be adapted in future to use the new - [Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development. + This class implements both the SSE and Streamable HTTP transports from the MCP specification. + See for more information. !!! note Using this class as an async context manager will create a new pool of HTTP connections to connect @@ -303,7 +302,7 @@ class MCPServerHTTP(MCPServer): from pydantic_ai import Agent from pydantic_ai.mcp import MCPServerHTTP - server = MCPServerHTTP('http://localhost:3001/sse') # (1)! + server = MCPServerHTTP('http://localhost:3001/sse', transport='sse') # (1)! agent = Agent('openai:gpt-4o', mcp_servers=[server]) async def main(): @@ -358,22 +357,38 @@ async def main(): For example, if `tool_prefix='foo'`, then a tool named `bar` will be registered as `foo_bar` """ + transport: Literal['sse', 'streamable-http'] = 'sse' + """The transport the MCP server is using. + + If empty, sse will be assumed for backwards compatibality reasons. + """ + @asynccontextmanager async def client_streams( self, ) -> AsyncIterator[ tuple[ - MemoryObjectReceiveStream[JSONRPCMessage | Exception], - MemoryObjectSendStream[JSONRPCMessage], + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], ] ]: # pragma: no cover - async with sse_client( - url=self.url, - headers=self.headers, - timeout=self.timeout, - sse_read_timeout=self.sse_read_timeout, - ) as (read_stream, write_stream): - yield read_stream, write_stream + if self.transport == 'sse': + async with sse_client( + url=self.url, + headers=self.headers, + timeout=self.timeout, + sse_read_timeout=self.sse_read_timeout, + ) as (read_stream, write_stream): + yield read_stream, write_stream + elif self.transport == 'streamable-http': + async with streamablehttp_client( + url=self.url, + headers=self.headers, + timeout=self.timeout if isinstance(self.timeout, timedelta) else timedelta(seconds=self.timeout), + ) as (read_stream, write_stream, _): + yield read_stream, write_stream + else: + raise ValueError(f'Unsupported transport type: {self.transport}') def _get_log_level(self) -> LoggingLevel | None: return self.log_level diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 631cc196d0..63037f9256 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -75,7 +75,7 @@ tavily = ["tavily-python>=0.5.0"] # CLI cli = ["rich>=13", "prompt-toolkit>=3", "argcomplete>=3.5.0"] # MCP -mcp = ["mcp>=1.9.0; python_version >= '3.10'"] +mcp = ["mcp>=1.9.1; python_version >= '3.10'"] # Evals evals = ["pydantic-evals=={{ version }}"] # A2A diff --git a/tests/test_mcp.py b/tests/test_mcp.py index 735f76e55c..c1f7145e90 100644 --- a/tests/test_mcp.py +++ b/tests/test_mcp.py @@ -1,7 +1,11 @@ """Tests for the MCP (Model Context Protocol) server implementation.""" +import multiprocessing import re +import socket +import time from pathlib import Path +from typing import Literal import pytest from inline_snapshot import snapshot @@ -21,6 +25,7 @@ from pydantic_ai.usage import Usage from .conftest import IsDatetime, try_import +from .mcp_server import mcp as test_mcp_server with try_import() as imports_successful: from pydantic_ai.mcp import MCPServerHTTP, MCPServerStdio @@ -31,7 +36,6 @@ pytestmark = [ pytest.mark.skipif(not imports_successful(), reason='mcp and openai not installed'), pytest.mark.anyio, - pytest.mark.vcr, ] @@ -42,6 +46,71 @@ def agent(openai_api_key: str): return Agent(model, mcp_servers=[server]) +def run_sse_server() -> None: + """Run the Streamable HTTP MCP server.""" + test_mcp_server.run(transport='sse') + + +def run_streamable_http_server() -> None: + """Run the SSE MCP Server""" + test_mcp_server.run(transport='streamable-http') + + +@pytest.fixture +def mcp_server(request: pytest.FixtureRequest): + proc = multiprocessing.Process(target=request.param, daemon=True) + print('Staring streamable http server process on port port') + proc.start() + + max_attempts = 20 + attempt = 0 + + while attempt < max_attempts: + try: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.connect(('localhost', 8000)) + print('MCP server started.') + break + except ConnectionRefusedError: + time.sleep(0.1) + attempt += 1 + else: + proc.kill() # Ensure process is killed if it fails to start + proc.join(timeout=1) + raise RuntimeError(f'StreamableHTTP server failed to start on port after {max_attempts} attempts') + + yield + proc.kill() + + +@pytest.mark.parametrize( + 'mcp_server, path, transport', + [ + ( + run_sse_server, + 'sse', + 'sse', + ), + ( + run_streamable_http_server, + 'mcp', + 'streamable-http', + ), + ], + indirect=['mcp_server'], +) +async def test_http_servers(mcp_server: None, path: str, transport: Literal['sse', 'streamable-http']): + server = MCPServerHTTP(url=f'http://localhost:8000/{path}', transport=transport) + async with server: + tools = await server.list_tools() + assert len(tools) == 10 + assert tools[0].name == 'celsius_to_fahrenheit' + assert tools[0].description.startswith('Convert Celsius to Fahrenheit.') + + result = await server.call_tool('celsius_to_fahrenheit', {'celsius': 0}) + assert result == snapshot('32.0') + + async def test_stdio_server(): server = MCPServerStdio('python', ['-m', 'tests.mcp_server']) async with server: diff --git a/uv.lock b/uv.lock index e946208234..c310f25a11 100644 --- a/uv.lock +++ b/uv.lock @@ -1748,7 +1748,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.6.0" +version = "1.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "python_full_version >= '3.10'" }, @@ -1756,13 +1756,14 @@ dependencies = [ { name = "httpx-sse", marker = "python_full_version >= '3.10'" }, { name = "pydantic", marker = "python_full_version >= '3.10'" }, { name = "pydantic-settings", marker = "python_full_version >= '3.10'" }, + { name = "python-multipart", marker = "python_full_version >= '3.10'" }, { name = "sse-starlette", marker = "python_full_version >= '3.10'" }, { name = "starlette", marker = "python_full_version >= '3.10'" }, - { name = "uvicorn", marker = "python_full_version >= '3.10'" }, + { name = "uvicorn", marker = "python_full_version >= '3.10' and sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/d2/f587cb965a56e992634bebc8611c5b579af912b74e04eb9164bd49527d21/mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723", size = 200031, upload-time = "2025-03-27T16:46:32.336Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/03/77c49cce3ace96e6787af624611b627b2828f0dca0f8df6f330a10eea51e/mcp-1.9.2.tar.gz", hash = "sha256:3c7651c053d635fd235990a12e84509fe32780cd359a5bbef352e20d4d963c05", size = 333066, upload-time = "2025-05-29T14:42:17.76Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/20a7f33b0b884a9d14dd3aa94ff1ac9da1479fe2ad66dd9e2736075d2506/mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0", size = 76077, upload-time = "2025-03-27T16:46:29.919Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a6/8f5ee9da9f67c0fd8933f63d6105f02eabdac8a8c0926728368ffbb6744d/mcp-1.9.2-py3-none-any.whl", hash = "sha256:bc29f7fd67d157fef378f89a4210384f5fecf1168d0feb12d22929818723f978", size = 131083, upload-time = "2025-05-29T14:42:16.211Z" }, ] [package.optional-dependencies] @@ -3084,7 +3085,7 @@ requires-dist = [ { name = "groq", marker = "extra == 'groq'", specifier = ">=0.15.0" }, { name = "httpx", specifier = ">=0.27" }, { name = "logfire", marker = "extra == 'logfire'", specifier = ">=3.11.0" }, - { name = "mcp", marker = "python_full_version >= '3.10' and extra == 'mcp'", specifier = ">=1.9.0" }, + { name = "mcp", marker = "python_full_version >= '3.10' and extra == 'mcp'", specifier = ">=1.9.1" }, { name = "mistralai", marker = "extra == 'mistral'", specifier = ">=1.2.5" }, { name = "openai", marker = "extra == 'openai'", specifier = ">=1.75.0" }, { name = "opentelemetry-api", specifier = ">=1.28.0" },