Skip to content

API Reference

This page documents the public Python API exposed by deepmcpagent.

Public API for deepmcpagent.

ServerSpec = StdioServerSpec | HTTPServerSpec module-attribute

Union of supported server specifications.

FastMCPMulti

Create a single FastMCP client wired to multiple servers.

The client is configured using the mcpServers dictionary generated from the typed server specifications.

Parameters:

Name Type Description Default
servers Mapping[str, ServerSpec]

Mapping of server name to server spec.

required
Source code in src/deepmcpagent/clients.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
class FastMCPMulti:
    """Create a single FastMCP client wired to multiple servers.

    The client is configured using the `mcpServers` dictionary generated from
    the typed server specifications.

    Args:
        servers: Mapping of server name to server spec.
    """

    def __init__(self, servers: Mapping[str, ServerSpec]) -> None:
        mcp_cfg = {"mcpServers": servers_to_mcp_config(servers)}
        self._client: Any = FastMCPClient(mcp_cfg)  # <-- annotate as Any

    @property
    def client(self) -> Any:  # <-- return Any to avoid unparameterized generic
        """Return the underlying FastMCP client instance."""
        return self._client

client property

Return the underlying FastMCP client instance.

HTTPServerSpec

Bases: _BaseServer

Specification for a remote MCP server reachable via HTTP/SSE.

Attributes:

Name Type Description
url str

Full endpoint URL for the MCP server (e.g., http://127.0.0.1:8000/mcp).

transport Literal['http', 'streamable-http', 'sse']

The transport mechanism ("http", "streamable-http", or "sse").

headers dict[str, str]

Optional request headers (e.g., Authorization tokens).

auth str | None

Optional auth hint if your FastMCP deployment consumes it.

Source code in src/deepmcpagent/config.py
41
42
43
44
45
46
47
48
49
50
51
52
53
54
class HTTPServerSpec(_BaseServer):
    """Specification for a remote MCP server reachable via HTTP/SSE.

    Attributes:
        url: Full endpoint URL for the MCP server (e.g., http://127.0.0.1:8000/mcp).
        transport: The transport mechanism ("http", "streamable-http", or "sse").
        headers: Optional request headers (e.g., Authorization tokens).
        auth: Optional auth hint if your FastMCP deployment consumes it.
    """

    url: str
    transport: Literal["http", "streamable-http", "sse"] = "http"
    headers: dict[str, str] = Field(default_factory=dict)
    auth: str | None = None

MCPToolLoader

Discover MCP tools via FastMCP and convert them to LangChain tools.

Source code in src/deepmcpagent/tools.py
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
class MCPToolLoader:
    """Discover MCP tools via FastMCP and convert them to LangChain tools."""

    def __init__(
        self,
        multi: FastMCPMulti,
        *,
        on_before: OnBefore | None = None,
        on_after: OnAfter | None = None,
        on_error: OnError | None = None,
    ) -> None:
        self._multi = multi
        self._on_before = on_before
        self._on_after = on_after
        self._on_error = on_error

    async def _list_tools_raw(self) -> tuple[Any, list[Any]]:
        """Fetch raw tool descriptors from all configured MCP servers."""
        c = self._multi.client
        try:
            async with c:
                tools = await c.list_tools()
        except Exception as exc:
            raise MCPClientError(
                f"Failed to list tools from MCP servers: {exc}. "
                "Check server URLs, network connectivity, and authentication headers."
            ) from exc
        return c, list(tools or [])

    async def get_all_tools(self) -> list[BaseTool]:
        """Return all available tools as LangChain `BaseTool` instances."""
        client, tools = await self._list_tools_raw()

        out: list[BaseTool] = []
        for t in tools:
            name = t.name
            desc = getattr(t, "description", "") or ""
            schema = getattr(t, "inputSchema", None) or {}
            model = _jsonschema_to_pydantic(schema, model_name=f"Args_{name}")
            out.append(
                _FastMCPTool(
                    name=name,
                    description=desc,
                    args_schema=model,
                    tool_name=name,
                    client=client,
                    on_before=self._on_before,
                    on_after=self._on_after,
                    on_error=self._on_error,
                )
            )
        return out

    async def list_tool_info(self) -> list[ToolInfo]:
        """Return human-readable tool metadata for introspection or debugging."""
        _, tools = await self._list_tools_raw()
        return [
            ToolInfo(
                server_guess=(getattr(t, "server", None) or getattr(t, "serverName", None) or ""),
                name=t.name,
                description=getattr(t, "description", "") or "",
                input_schema=getattr(t, "inputSchema", None) or {},
            )
            for t in tools
        ]

get_all_tools() async

Return all available tools as LangChain BaseTool instances.

Source code in src/deepmcpagent/tools.py
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
async def get_all_tools(self) -> list[BaseTool]:
    """Return all available tools as LangChain `BaseTool` instances."""
    client, tools = await self._list_tools_raw()

    out: list[BaseTool] = []
    for t in tools:
        name = t.name
        desc = getattr(t, "description", "") or ""
        schema = getattr(t, "inputSchema", None) or {}
        model = _jsonschema_to_pydantic(schema, model_name=f"Args_{name}")
        out.append(
            _FastMCPTool(
                name=name,
                description=desc,
                args_schema=model,
                tool_name=name,
                client=client,
                on_before=self._on_before,
                on_after=self._on_after,
                on_error=self._on_error,
            )
        )
    return out

list_tool_info() async

Return human-readable tool metadata for introspection or debugging.

Source code in src/deepmcpagent/tools.py
188
189
190
191
192
193
194
195
196
197
198
199
async def list_tool_info(self) -> list[ToolInfo]:
    """Return human-readable tool metadata for introspection or debugging."""
    _, tools = await self._list_tools_raw()
    return [
        ToolInfo(
            server_guess=(getattr(t, "server", None) or getattr(t, "serverName", None) or ""),
            name=t.name,
            description=getattr(t, "description", "") or "",
            input_schema=getattr(t, "inputSchema", None) or {},
        )
        for t in tools
    ]

StdioServerSpec

Bases: _BaseServer

Specification for a local MCP server launched via stdio.

NOTE

The FastMCP Python client typically expects HTTP/SSE transports. Using StdioServerSpec requires a different adapter or an HTTP shim in front of the stdio server. Keep this for future expansion or custom runners.

Attributes:

Name Type Description
command str

Executable to launch (e.g., "python").

args list[str]

Positional arguments for the process.

env dict[str, str]

Environment variables to set for the process.

cwd str | None

Optional working directory.

keep_alive bool

Whether the client should try to keep a persistent session.

Source code in src/deepmcpagent/config.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
class StdioServerSpec(_BaseServer):
    """Specification for a local MCP server launched via stdio.

    NOTE:
        The FastMCP Python client typically expects HTTP/SSE transports. Using
        `StdioServerSpec` requires a different adapter or an HTTP shim in front
        of the stdio server. Keep this for future expansion or custom runners.

    Attributes:
        command: Executable to launch (e.g., "python").
        args: Positional arguments for the process.
        env: Environment variables to set for the process.
        cwd: Optional working directory.
        keep_alive: Whether the client should try to keep a persistent session.
    """

    command: str
    args: list[str] = Field(default_factory=list)
    env: dict[str, str] = Field(default_factory=dict)
    cwd: str | None = None
    keep_alive: bool = True

ToolInfo dataclass

Human-friendly metadata for a discovered MCP tool.

Source code in src/deepmcpagent/tools.py
22
23
24
25
26
27
28
29
@dataclass(frozen=True)
class ToolInfo:
    """Human-friendly metadata for a discovered MCP tool."""

    server_guess: str
    name: str
    description: str
    input_schema: dict[str, Any]

build_deep_agent(*, servers, model, instructions=None, trace_tools=False, cross_agents=None) async

Build an MCP-first agent graph.

This function discovers tools from the configured MCP servers, converts them into LangChain tools, and then builds an agent. If the optional deepagents package is installed, a Deep Agent loop is created. Otherwise, a LangGraph ReAct agent is used.

Parameters:

Name Type Description Default
servers Mapping[str, ServerSpec]

Mapping of server name to spec (HTTP/SSE recommended for FastMCP).

required
model ModelLike

REQUIRED. Either a LangChain chat model instance, a provider id string accepted by init_chat_model, or a Runnable.

required
instructions str | None

Optional system prompt. If not provided, uses DEFAULT_SYSTEM_PROMPT.

None
trace_tools bool

If True, print each tool invocation and result from inside the tool wrapper (works for both DeepAgents and LangGraph prebuilt).

False
cross_agents Mapping[str, CrossAgent] | None

Optional mapping of peer name -> CrossAgent. When provided, each peer is exposed as a tool (e.g., ask_agent_<name>) and an optional broadcast_to_agents tool is added to consult multiple peers.

None

Returns:

Type Description
tuple[Runnable[Any, Any], MCPToolLoader]

Tuple of (graph, loader) where: - graph is a LangGraph or DeepAgents runnable with .ainvoke. - loader can be used to introspect tools.

Source code in src/deepmcpagent/agent.py
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
async def build_deep_agent(
    *,
    servers: Mapping[str, ServerSpec],
    model: ModelLike,
    instructions: str | None = None,
    trace_tools: bool = False,
    cross_agents: Mapping[str, CrossAgent] | None = None,  # NEW
) -> tuple[Runnable[Any, Any], MCPToolLoader]:
    """Build an MCP-first agent graph.

    This function discovers tools from the configured MCP servers, converts them into
    LangChain tools, and then builds an agent. If the optional `deepagents` package is
    installed, a Deep Agent loop is created. Otherwise, a LangGraph ReAct agent is used.

    Args:
        servers: Mapping of server name to spec (HTTP/SSE recommended for FastMCP).
        model: REQUIRED. Either a LangChain chat model instance, a provider id string
            accepted by `init_chat_model`, or a Runnable.
        instructions: Optional system prompt. If not provided, uses DEFAULT_SYSTEM_PROMPT.
        trace_tools: If True, print each tool invocation and result from inside the tool
            wrapper (works for both DeepAgents and LangGraph prebuilt).
        cross_agents: Optional mapping of peer name -> CrossAgent. When provided, each
            peer is exposed as a tool (e.g., `ask_agent_<name>`) and an optional
            `broadcast_to_agents` tool is added to consult multiple peers.

    Returns:
        Tuple of `(graph, loader)` where:
            - `graph` is a LangGraph or DeepAgents runnable with `.ainvoke`.
            - `loader` can be used to introspect tools.
    """
    if model is None:  # Defensive check; CLI/code must always pass a model now.
        raise ValueError("A model is required. Provide a model instance or a provider id string.")

    # Simple printing callbacks for tracing (kept dependency-free)
    def _before(name: str, kwargs: dict[str, Any]) -> None:
        if trace_tools:
            print(f"→ Invoking tool: {name} with {kwargs}")

    def _after(name: str, res: Any) -> None:
        if not trace_tools:
            return
        pretty = res
        for attr in ("data", "text", "content", "result"):
            try:
                val = getattr(res, attr, None)
                if val not in (None, ""):
                    pretty = val
                    break
            except Exception:
                continue
        print(f"✔ Tool result from {name}: {pretty}")

    def _error(name: str, exc: Exception) -> None:
        if trace_tools:
            print(f"✖ {name} error: {exc}")

    multi = FastMCPMulti(servers)
    loader = MCPToolLoader(
        multi,
        on_before=_before if trace_tools else None,
        on_after=_after if trace_tools else None,
        on_error=_error if trace_tools else None,
    )

    try:
        discovered = await loader.get_all_tools()
        tools: list[BaseTool] = list(discovered) if discovered else []
    except MCPClientError as exc:
        raise RuntimeError(
            f"Failed to initialize agent because tool discovery failed. Details: {exc}"
        ) from exc

    # Attach cross-agent tools if provided
    if cross_agents:
        tools.extend(make_cross_agent_tools(cross_agents))

    if not tools:
        print("[deepmcpagent] No tools discovered from MCP servers; agent will run without tools.")

    chat: Runnable[Any, Any] = _normalize_model(model)
    sys_prompt = instructions or DEFAULT_SYSTEM_PROMPT

    # ----------------------------------------------------------------------
    # Attempt DeepAgents first, then gracefully fall back to LangGraph.
    # ----------------------------------------------------------------------
    try:
        # Optional deep agent loop if installed.
        from deepagents import create_deep_agent  # type: ignore

        graph = cast(
            Runnable[Any, Any],
            create_deep_agent(tools=tools, instructions=sys_prompt, model=chat),
        )

    except ImportError:
        # Fallback to LangGraph’s ReAct agent, compatible with all versions.
        import inspect

        try:
            sig = inspect.signature(create_react_agent)
            params = set(sig.parameters.keys())

            # base kwargs always valid
            kwargs: dict[str, Any] = {"model": chat, "tools": tools}

            # Only pass prompt args if supported by this version
            if "system_prompt" in params:
                kwargs["system_prompt"] = sys_prompt
            elif "state_modifier" in params:
                kwargs["state_modifier"] = sys_prompt
            # Newer versions (>=0.6) have no prompt args → skip

            graph = cast(Runnable[Any, Any], create_react_agent(**kwargs))

        except TypeError:
            # Absolute fallback for latest versions: no prompt args allowed
            graph = cast(Runnable[Any, Any], create_react_agent(model=chat, tools=tools))

    return graph, loader

servers_to_mcp_config(servers)

Convert programmatic server specs to the FastMCP configuration dict.

Parameters:

Name Type Description Default
servers Mapping[str, ServerSpec]

Mapping of server name to specification.

required

Returns:

Type Description
dict[str, dict[str, object]]

Dict suitable for initializing fastmcp.Client({"mcpServers": ...}).

Source code in src/deepmcpagent/config.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
def servers_to_mcp_config(servers: Mapping[str, ServerSpec]) -> dict[str, dict[str, object]]:
    """Convert programmatic server specs to the FastMCP configuration dict.

    Args:
        servers: Mapping of server name to specification.

    Returns:
        Dict suitable for initializing `fastmcp.Client({"mcpServers": ...})`.
    """
    cfg: dict[str, dict[str, object]] = {}
    for name, s in servers.items():
        if isinstance(s, StdioServerSpec):
            cfg[name] = {
                "transport": "stdio",
                "command": s.command,
                "args": s.args,
                "env": s.env or None,
                "cwd": s.cwd or None,
                "keep_alive": s.keep_alive,
            }
        else:
            entry: dict[str, object] = {
                "transport": s.transport,
                "url": s.url,
            }
            if s.headers:
                entry["headers"] = s.headers
            if s.auth is not None:
                entry["auth"] = s.auth
            cfg[name] = entry
    return cfg