Skip to content

Commit 475e6c1

Browse files
Wh1isperDouweM
andauthored
Add tool_prefix option to MCP servers and error on conflicting tool names (#1266)
Co-authored-by: Douwe Maan <douwe@pydantic.dev>
1 parent 76ea1b3 commit 475e6c1

File tree

5 files changed

+254
-19
lines changed

5 files changed

+254
-19
lines changed

docs/mcp/client.md

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,3 +118,70 @@ async def main():
118118
```
119119

120120
1. See [MCP Run Python](run-python.md) for more information.
121+
122+
## Using Tool Prefixes to Avoid Naming Conflicts
123+
124+
When connecting to multiple MCP servers that might provide tools with the same name, you can use the `tool_prefix` parameter to avoid naming conflicts. This parameter adds a prefix to all tool names from a specific server.
125+
126+
### How It Works
127+
128+
- If `tool_prefix` is set, all tools from that server will be prefixed with `{tool_prefix}_`
129+
- When listing tools, the prefixed names are shown to the model
130+
- When calling tools, the prefix is automatically removed before sending the request to the server
131+
132+
This allows you to use multiple servers that might have overlapping tool names without conflicts.
133+
134+
### Example with HTTP Server
135+
136+
```python {title="mcp_tool_prefix_http_client.py" py="3.10"}
137+
from pydantic_ai import Agent
138+
from pydantic_ai.mcp import MCPServerHTTP
139+
140+
# Create two servers with different prefixes
141+
weather_server = MCPServerHTTP(
142+
url='http://localhost:3001/sse',
143+
tool_prefix='weather' # Tools will be prefixed with 'weather_'
144+
)
145+
146+
calculator_server = MCPServerHTTP(
147+
url='http://localhost:3002/sse',
148+
tool_prefix='calc' # Tools will be prefixed with 'calc_'
149+
)
150+
151+
# Both servers might have a tool named 'get_data', but they'll be exposed as:
152+
# - 'weather_get_data'
153+
# - 'calc_get_data'
154+
agent = Agent('openai:gpt-4o', mcp_servers=[weather_server, calculator_server])
155+
```
156+
157+
### Example with Stdio Server
158+
159+
```python {title="mcp_tool_prefix_stdio_client.py" py="3.10"}
160+
from pydantic_ai import Agent
161+
from pydantic_ai.mcp import MCPServerStdio
162+
163+
python_server = MCPServerStdio(
164+
'deno',
165+
args=[
166+
'run',
167+
'-N',
168+
'jsr:@pydantic/mcp-run-python',
169+
'stdio',
170+
],
171+
tool_prefix='py' # Tools will be prefixed with 'py_'
172+
)
173+
174+
js_server = MCPServerStdio(
175+
'node',
176+
args=[
177+
'run',
178+
'mcp-js-server.js',
179+
'stdio',
180+
],
181+
tool_prefix='js' # Tools will be prefixed with 'js_'
182+
)
183+
184+
agent = Agent('openai:gpt-4o', mcp_servers=[python_server, js_server])
185+
```
186+
187+
When the model interacts with these servers, it will see the prefixed tool names, but the prefixes will be automatically handled when making tool calls.

pydantic_ai_slim/pydantic_ai/_agent_graph.py

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -222,27 +222,40 @@ async def _prepare_request_parameters(
222222
ctx: GraphRunContext[GraphAgentState, GraphAgentDeps[DepsT, NodeRunEndT]],
223223
) -> models.ModelRequestParameters:
224224
"""Build tools and create an agent model."""
225-
function_tool_defs: list[ToolDefinition] = []
225+
function_tool_defs_map: dict[str, ToolDefinition] = {}
226226

227227
run_context = build_run_context(ctx)
228228

229229
async def add_tool(tool: Tool[DepsT]) -> None:
230230
ctx = run_context.replace_with(retry=tool.current_retry, tool_name=tool.name)
231231
if tool_def := await tool.prepare_tool_def(ctx):
232-
function_tool_defs.append(tool_def)
232+
# prepare_tool_def may change tool_def.name
233+
if tool_def.name in function_tool_defs_map:
234+
if tool_def.name != tool.name:
235+
# Prepare tool def may have renamed the tool
236+
raise exceptions.UserError(
237+
f"Renaming tool '{tool.name}' to '{tool_def.name}' conflicts with existing tool."
238+
)
239+
else:
240+
raise exceptions.UserError(f'Tool name conflicts with existing tool: {tool.name!r}.')
241+
function_tool_defs_map[tool_def.name] = tool_def
233242

234243
async def add_mcp_server_tools(server: MCPServer) -> None:
235244
if not server.is_running:
236245
raise exceptions.UserError(f'MCP server is not running: {server}')
237246
tool_defs = await server.list_tools()
238-
# TODO(Marcelo): We should check if the tool names are unique. If not, we should raise an error.
239-
function_tool_defs.extend(tool_defs)
247+
for tool_def in tool_defs:
248+
if tool_def.name in function_tool_defs_map:
249+
raise exceptions.UserError(
250+
f"MCP Server '{server}' defines a tool whose name conflicts with existing tool: {tool_def.name!r}. Consider using `tool_prefix` to avoid name conflicts."
251+
)
252+
function_tool_defs_map[tool_def.name] = tool_def
240253

241254
await asyncio.gather(
242255
*map(add_tool, ctx.deps.function_tools.values()),
243256
*map(add_mcp_server_tools, ctx.deps.mcp_servers),
244257
)
245-
258+
function_tool_defs = list(function_tool_defs_map.values())
246259
if ctx.deps.prepare_tools:
247260
# Prepare the tools using the provided function
248261
# This also acts over tool definitions pulled from MCP servers

pydantic_ai_slim/pydantic_ai/mcp.py

Lines changed: 59 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,13 @@ class MCPServer(ABC):
4646
"""
4747

4848
is_running: bool = False
49+
tool_prefix: str | None = None
50+
"""A prefix to add to all tools that are registered with the server.
51+
52+
If not empty, will include a trailing underscore(`_`).
53+
54+
e.g. if `tool_prefix='foo'`, then a tool named `bar` will be registered as `foo_bar`
55+
"""
4956

5057
_client: ClientSession
5158
_read_stream: MemoryObjectReceiveStream[JSONRPCMessage | Exception]
@@ -57,7 +64,10 @@ class MCPServer(ABC):
5764
async def client_streams(
5865
self,
5966
) -> AsyncIterator[
60-
tuple[MemoryObjectReceiveStream[JSONRPCMessage | Exception], MemoryObjectSendStream[JSONRPCMessage]]
67+
tuple[
68+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
69+
MemoryObjectSendStream[JSONRPCMessage],
70+
]
6171
]:
6272
"""Create the streams for the MCP server."""
6373
raise NotImplementedError('MCP Server subclasses must implement this method.')
@@ -68,6 +78,14 @@ def _get_log_level(self) -> LoggingLevel | None:
6878
"""Get the log level for the MCP server."""
6979
raise NotImplementedError('MCP Server subclasses must implement this method.')
7080

81+
def get_prefixed_tool_name(self, tool_name: str) -> str:
82+
"""Get the tool name with prefix if `tool_prefix` is set."""
83+
return f'{self.tool_prefix}_{tool_name}' if self.tool_prefix else tool_name
84+
85+
def get_unprefixed_tool_name(self, tool_name: str) -> str:
86+
"""Get original tool name without prefix for calling tools."""
87+
return tool_name.removeprefix(f'{self.tool_prefix}_') if self.tool_prefix else tool_name
88+
7189
async def list_tools(self) -> list[ToolDefinition]:
7290
"""Retrieve tools that are currently active on the server.
7391
@@ -78,7 +96,7 @@ async def list_tools(self) -> list[ToolDefinition]:
7896
tools = await self._client.list_tools()
7997
return [
8098
ToolDefinition(
81-
name=tool.name,
99+
name=self.get_prefixed_tool_name(tool.name),
82100
description=tool.description or '',
83101
parameters_json_schema=tool.inputSchema,
84102
)
@@ -100,7 +118,7 @@ async def call_tool(
100118
Raises:
101119
ModelRetry: If the tool call fails.
102120
"""
103-
result = await self._client.call_tool(tool_name, arguments)
121+
result = await self._client.call_tool(self.get_unprefixed_tool_name(tool_name), arguments)
104122

105123
content = [self._map_tool_result_part(part) for part in result.content]
106124

@@ -126,7 +144,10 @@ async def __aenter__(self) -> Self:
126144
return self
127145

128146
async def __aexit__(
129-
self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
147+
self,
148+
exc_type: type[BaseException] | None,
149+
exc_value: BaseException | None,
150+
traceback: TracebackType | None,
130151
) -> bool | None:
131152
await self._exit_stack.aclose()
132153
self.is_running = False
@@ -223,11 +244,22 @@ async def main():
223244
cwd: str | Path | None = None
224245
"""The working directory to use when spawning the process."""
225246

247+
tool_prefix: str | None = None
248+
"""A prefix to add to all tools that are registered with the server.
249+
250+
If not empty, will include a trailing underscore(`_`).
251+
252+
e.g. if `tool_prefix='foo'`, then a tool named `bar` will be registered as `foo_bar`
253+
"""
254+
226255
@asynccontextmanager
227256
async def client_streams(
228257
self,
229258
) -> AsyncIterator[
230-
tuple[MemoryObjectReceiveStream[JSONRPCMessage | Exception], MemoryObjectSendStream[JSONRPCMessage]]
259+
tuple[
260+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
261+
MemoryObjectSendStream[JSONRPCMessage],
262+
]
231263
]:
232264
server = StdioServerParameters(command=self.command, args=list(self.args), env=self.env, cwd=self.cwd)
233265
async with stdio_client(server=server) as (read_stream, write_stream):
@@ -236,6 +268,9 @@ async def client_streams(
236268
def _get_log_level(self) -> LoggingLevel | None:
237269
return self.log_level
238270

271+
def __repr__(self) -> str:
272+
return f'MCPServerStdio(command={self.command!r}, args={self.args!r}, tool_prefix={self.tool_prefix!r})'
273+
239274

240275
@dataclass
241276
class MCPServerHTTP(MCPServer):
@@ -303,16 +338,33 @@ async def main():
303338
If `None`, no log level will be set.
304339
"""
305340

341+
tool_prefix: str | None = None
342+
"""A prefix to add to all tools that are registered with the server.
343+
344+
If not empty, will include a trailing underscore (`_`).
345+
346+
For example, if `tool_prefix='foo'`, then a tool named `bar` will be registered as `foo_bar`
347+
"""
348+
306349
@asynccontextmanager
307350
async def client_streams(
308351
self,
309352
) -> AsyncIterator[
310-
tuple[MemoryObjectReceiveStream[JSONRPCMessage | Exception], MemoryObjectSendStream[JSONRPCMessage]]
353+
tuple[
354+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
355+
MemoryObjectSendStream[JSONRPCMessage],
356+
]
311357
]: # pragma: no cover
312358
async with sse_client(
313-
url=self.url, headers=self.headers, timeout=self.timeout, sse_read_timeout=self.sse_read_timeout
359+
url=self.url,
360+
headers=self.headers,
361+
timeout=self.timeout,
362+
sse_read_timeout=self.sse_read_timeout,
314363
) as (read_stream, write_stream):
315364
yield read_stream, write_stream
316365

317366
def _get_log_level(self) -> LoggingLevel | None:
318367
return self.log_level
368+
369+
def __repr__(self) -> str: # pragma: no cover
370+
return f'MCPServerHTTP(url={self.url!r}, tool_prefix={self.tool_prefix!r})'

tests/test_mcp.py

Lines changed: 63 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Tests for the MCP (Model Context Protocol) server implementation."""
22

3+
import re
34
from pathlib import Path
45

56
import pytest
@@ -54,6 +55,13 @@ async def test_stdio_server():
5455
assert result == snapshot('32.0')
5556

5657

58+
async def test_stdio_server_with_tool_prefix():
59+
server = MCPServerStdio('python', ['-m', 'tests.mcp_server'], tool_prefix='foo')
60+
async with server:
61+
tools = await server.list_tools()
62+
assert all(tool.name.startswith('foo_') for tool in tools)
63+
64+
5765
async def test_stdio_server_with_cwd():
5866
test_dir = Path(__file__).parent
5967
server = MCPServerStdio('python', ['mcp_server.py'], cwd=test_dir)
@@ -156,6 +164,41 @@ async def test_agent_with_stdio_server(allow_model_requests: None, agent: Agent)
156164
)
157165

158166

167+
async def test_agent_with_conflict_tool_name(agent: Agent):
168+
@agent.tool_plain
169+
def get_none() -> None: # pragma: no cover
170+
"""Return nothing"""
171+
return None
172+
173+
async with agent.run_mcp_servers():
174+
with pytest.raises(
175+
UserError,
176+
match=re.escape(
177+
"MCP Server 'MCPServerStdio(command='python', args=['-m', 'tests.mcp_server'], tool_prefix=None)' defines a tool whose name conflicts with existing tool: 'get_none'. Consider using `tool_prefix` to avoid name conflicts."
178+
),
179+
):
180+
await agent.run('Get me a conflict')
181+
182+
183+
async def test_agent_with_prefix_tool_name(openai_api_key: str):
184+
server = MCPServerStdio('python', ['-m', 'tests.mcp_server'], tool_prefix='foo')
185+
model = OpenAIModel('gpt-4o', provider=OpenAIProvider(api_key=openai_api_key))
186+
agent = Agent(
187+
model,
188+
mcp_servers=[server],
189+
)
190+
191+
@agent.tool_plain
192+
def get_none() -> None: # pragma: no cover
193+
"""Return nothing"""
194+
return None
195+
196+
async with agent.run_mcp_servers():
197+
# This means that we passed the _prepare_request_parameters check and there is no conflict in the tool name
198+
with pytest.raises(RuntimeError, match='Model requests are not allowed, since ALLOW_MODEL_REQUESTS is False'):
199+
await agent.run('No conflict')
200+
201+
159202
async def test_agent_with_server_not_running(openai_api_key: str):
160203
server = MCPServerStdio('python', ['-m', 'tests.mcp_server'])
161204
model = OpenAIModel('gpt-4o', provider=OpenAIProvider(api_key=openai_api_key))
@@ -281,7 +324,9 @@ async def test_tool_returning_text_resource(allow_model_requests: None, agent: A
281324
ModelResponse(
282325
parts=[
283326
ToolCallPart(
284-
tool_name='get_product_name', args='{}', tool_call_id='call_LaiWltzI39sdquflqeuF0EyE'
327+
tool_name='get_product_name',
328+
args='{}',
329+
tool_call_id='call_LaiWltzI39sdquflqeuF0EyE',
285330
)
286331
],
287332
usage=Usage(
@@ -354,7 +399,9 @@ async def test_tool_returning_image_resource(allow_model_requests: None, agent:
354399
ModelResponse(
355400
parts=[
356401
ToolCallPart(
357-
tool_name='get_image_resource', args='{}', tool_call_id='call_nFsDHYDZigO0rOHqmChZ3pmt'
402+
tool_name='get_image_resource',
403+
args='{}',
404+
tool_call_id='call_nFsDHYDZigO0rOHqmChZ3pmt',
358405
)
359406
],
360407
usage=Usage(
@@ -435,7 +482,11 @@ async def test_tool_returning_image(allow_model_requests: None, agent: Agent, im
435482
),
436483
ModelResponse(
437484
parts=[
438-
ToolCallPart(tool_name='get_image', args='{}', tool_call_id='call_Q7xG8CCG0dyevVfUS0ubsDdN')
485+
ToolCallPart(
486+
tool_name='get_image',
487+
args='{}',
488+
tool_call_id='call_Q7xG8CCG0dyevVfUS0ubsDdN',
489+
)
439490
],
440491
usage=Usage(
441492
requests=1,
@@ -581,7 +632,9 @@ async def test_tool_returning_error(allow_model_requests: None, agent: Agent):
581632
ModelResponse(
582633
parts=[
583634
ToolCallPart(
584-
tool_name='get_error', args='{"value":false}', tool_call_id='call_rETXZWddAGZSHyVHAxptPGgc'
635+
tool_name='get_error',
636+
args='{"value":false}',
637+
tool_call_id='call_rETXZWddAGZSHyVHAxptPGgc',
585638
)
586639
],
587640
usage=Usage(
@@ -614,7 +667,9 @@ async def test_tool_returning_error(allow_model_requests: None, agent: Agent):
614667
ModelResponse(
615668
parts=[
616669
ToolCallPart(
617-
tool_name='get_error', args='{"value":true}', tool_call_id='call_4xGyvdghYKHN8x19KWkRtA5N'
670+
tool_name='get_error',
671+
args='{"value":true}',
672+
tool_call_id='call_4xGyvdghYKHN8x19KWkRtA5N',
618673
)
619674
],
620675
usage=Usage(
@@ -758,7 +813,9 @@ async def test_tool_returning_multiple_items(allow_model_requests: None, agent:
758813
ModelResponse(
759814
parts=[
760815
ToolCallPart(
761-
tool_name='get_multiple_items', args='{}', tool_call_id='call_kL0TvjEVQBDGZrn1Zv7iNYOW'
816+
tool_name='get_multiple_items',
817+
args='{}',
818+
tool_call_id='call_kL0TvjEVQBDGZrn1Zv7iNYOW',
762819
)
763820
],
764821
usage=Usage(

0 commit comments

Comments
 (0)