diff --git a/.github/actions/conformance/client.py b/.github/actions/conformance/client.py new file mode 100644 index 0000000000..7ca88110a7 --- /dev/null +++ b/.github/actions/conformance/client.py @@ -0,0 +1,346 @@ +"""MCP unified conformance test client. + +This client is designed to work with the @modelcontextprotocol/conformance npm package. +It handles all conformance test scenarios via environment variables and CLI arguments. + +Contract: + - MCP_CONFORMANCE_SCENARIO env var -> scenario name + - MCP_CONFORMANCE_CONTEXT env var -> optional JSON (for client-credentials scenarios) + - Server URL as last CLI argument (sys.argv[1]) + - Must exit 0 within 30 seconds + +Scenarios: + initialize - Connect, initialize, list tools, close + tools_call - Connect, call add_numbers(a=5, b=3), close + sse-retry - Connect, call test_reconnection, close + elicitation-sep1034-client-defaults - Elicitation with default accept callback + auth/client-credentials-jwt - Client credentials with private_key_jwt + auth/client-credentials-basic - Client credentials with client_secret_basic + auth/* - Authorization code flow (default for auth scenarios) +""" + +import asyncio +import json +import logging +import os +import sys +from collections.abc import Callable, Coroutine +from typing import Any, cast +from urllib.parse import parse_qs, urlparse + +import httpx +from pydantic import AnyUrl + +from mcp import ClientSession, types +from mcp.client.auth import OAuthClientProvider, TokenStorage +from mcp.client.auth.extensions.client_credentials import ( + ClientCredentialsOAuthProvider, + PrivateKeyJWTOAuthProvider, + SignedJWTParameters, +) +from mcp.client.streamable_http import streamable_http_client +from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata, OAuthToken +from mcp.shared.context import RequestContext + +# Set up logging to stderr (stdout is for conformance test output) +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + stream=sys.stderr, +) +logger = logging.getLogger(__name__) + +# Type for async scenario handler functions +ScenarioHandler = Callable[[str], Coroutine[Any, None, None]] + +# Registry of scenario handlers +HANDLERS: dict[str, ScenarioHandler] = {} + + +def register(name: str) -> Callable[[ScenarioHandler], ScenarioHandler]: + """Register a scenario handler.""" + + def decorator(fn: ScenarioHandler) -> ScenarioHandler: + HANDLERS[name] = fn + return fn + + return decorator + + +def get_conformance_context() -> dict[str, Any]: + """Load conformance test context from MCP_CONFORMANCE_CONTEXT environment variable.""" + context_json = os.environ.get("MCP_CONFORMANCE_CONTEXT") + if not context_json: + raise RuntimeError( + "MCP_CONFORMANCE_CONTEXT environment variable not set. " + "Expected JSON with client_id, client_secret, and/or private_key_pem." + ) + try: + return json.loads(context_json) + except json.JSONDecodeError as e: + raise RuntimeError(f"Failed to parse MCP_CONFORMANCE_CONTEXT as JSON: {e}") from e + + +class InMemoryTokenStorage(TokenStorage): + """Simple in-memory token storage for conformance testing.""" + + def __init__(self) -> None: + self._tokens: OAuthToken | None = None + self._client_info: OAuthClientInformationFull | None = None + + async def get_tokens(self) -> OAuthToken | None: + return self._tokens + + async def set_tokens(self, tokens: OAuthToken) -> None: + self._tokens = tokens + + async def get_client_info(self) -> OAuthClientInformationFull | None: + return self._client_info + + async def set_client_info(self, client_info: OAuthClientInformationFull) -> None: + self._client_info = client_info + + +class ConformanceOAuthCallbackHandler: + """OAuth callback handler that automatically fetches the authorization URL + and extracts the auth code, without requiring user interaction. + """ + + def __init__(self) -> None: + self._auth_code: str | None = None + self._state: str | None = None + + async def handle_redirect(self, authorization_url: str) -> None: + """Fetch the authorization URL and extract the auth code from the redirect.""" + logger.debug(f"Fetching authorization URL: {authorization_url}") + + async with httpx.AsyncClient() as client: + response = await client.get( + authorization_url, + follow_redirects=False, + ) + + if response.status_code in (301, 302, 303, 307, 308): + location = cast(str, response.headers.get("location")) + if location: + redirect_url = urlparse(location) + query_params: dict[str, list[str]] = parse_qs(redirect_url.query) + + if "code" in query_params: + self._auth_code = query_params["code"][0] + state_values = query_params.get("state") + self._state = state_values[0] if state_values else None + logger.debug(f"Got auth code from redirect: {self._auth_code[:10]}...") + return + else: + raise RuntimeError(f"No auth code in redirect URL: {location}") + else: + raise RuntimeError(f"No redirect location received from {authorization_url}") + else: + raise RuntimeError(f"Expected redirect response, got {response.status_code} from {authorization_url}") + + async def handle_callback(self) -> tuple[str, str | None]: + """Return the captured auth code and state.""" + if self._auth_code is None: + raise RuntimeError("No authorization code available - was handle_redirect called?") + auth_code = self._auth_code + state = self._state + self._auth_code = None + self._state = None + return auth_code, state + + +# --- Scenario Handlers --- + + +@register("initialize") +async def run_initialize(server_url: str) -> None: + """Connect, initialize, list tools, close.""" + async with streamable_http_client(url=server_url) as (read_stream, write_stream, _): + async with ClientSession(read_stream, write_stream) as session: + await session.initialize() + logger.debug("Initialized successfully") + await session.list_tools() + logger.debug("Listed tools successfully") + + +@register("tools_call") +async def run_tools_call(server_url: str) -> None: + """Connect, initialize, list tools, call add_numbers(a=5, b=3), close.""" + async with streamable_http_client(url=server_url) as (read_stream, write_stream, _): + async with ClientSession(read_stream, write_stream) as session: + await session.initialize() + await session.list_tools() + result = await session.call_tool("add_numbers", {"a": 5, "b": 3}) + logger.debug(f"add_numbers result: {result}") + + +@register("sse-retry") +async def run_sse_retry(server_url: str) -> None: + """Connect, initialize, list tools, call test_reconnection, close.""" + async with streamable_http_client(url=server_url) as (read_stream, write_stream, _): + async with ClientSession(read_stream, write_stream) as session: + await session.initialize() + await session.list_tools() + result = await session.call_tool("test_reconnection", {}) + logger.debug(f"test_reconnection result: {result}") + + +async def default_elicitation_callback( + context: RequestContext[ClientSession, Any], # noqa: ARG001 + params: types.ElicitRequestParams, +) -> types.ElicitResult | types.ErrorData: + """Accept elicitation and apply defaults from the schema (SEP-1034).""" + content: dict[str, str | int | float | bool | list[str] | None] = {} + + # For form mode, extract defaults from the requested_schema + if isinstance(params, types.ElicitRequestFormParams): + schema = params.requested_schema + logger.debug(f"Elicitation schema: {schema}") + properties = schema.get("properties", {}) + for prop_name, prop_schema in properties.items(): + if "default" in prop_schema: + content[prop_name] = prop_schema["default"] + logger.debug(f"Applied defaults: {content}") + + return types.ElicitResult(action="accept", content=content) + + +@register("elicitation-sep1034-client-defaults") +async def run_elicitation_defaults(server_url: str) -> None: + """Connect with elicitation callback that applies schema defaults.""" + async with streamable_http_client(url=server_url) as (read_stream, write_stream, _): + async with ClientSession( + read_stream, write_stream, elicitation_callback=default_elicitation_callback + ) as session: + await session.initialize() + await session.list_tools() + result = await session.call_tool("test_client_elicitation_defaults", {}) + logger.debug(f"test_client_elicitation_defaults result: {result}") + + +@register("auth/client-credentials-jwt") +async def run_client_credentials_jwt(server_url: str) -> None: + """Client credentials flow with private_key_jwt authentication.""" + context = get_conformance_context() + client_id = context.get("client_id") + private_key_pem = context.get("private_key_pem") + signing_algorithm = context.get("signing_algorithm", "ES256") + + if not client_id: + raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_id'") + if not private_key_pem: + raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'private_key_pem'") + + jwt_params = SignedJWTParameters( + issuer=client_id, + subject=client_id, + signing_algorithm=signing_algorithm, + signing_key=private_key_pem, + ) + + oauth_auth = PrivateKeyJWTOAuthProvider( + server_url=server_url, + storage=InMemoryTokenStorage(), + client_id=client_id, + assertion_provider=jwt_params.create_assertion_provider(), + ) + + await _run_auth_session(server_url, oauth_auth) + + +@register("auth/client-credentials-basic") +async def run_client_credentials_basic(server_url: str) -> None: + """Client credentials flow with client_secret_basic authentication.""" + context = get_conformance_context() + client_id = context.get("client_id") + client_secret = context.get("client_secret") + + if not client_id: + raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_id'") + if not client_secret: + raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_secret'") + + oauth_auth = ClientCredentialsOAuthProvider( + server_url=server_url, + storage=InMemoryTokenStorage(), + client_id=client_id, + client_secret=client_secret, + token_endpoint_auth_method="client_secret_basic", + ) + + await _run_auth_session(server_url, oauth_auth) + + +async def run_auth_code_client(server_url: str) -> None: + """Authorization code flow (default for auth/* scenarios).""" + callback_handler = ConformanceOAuthCallbackHandler() + + oauth_auth = OAuthClientProvider( + server_url=server_url, + client_metadata=OAuthClientMetadata( + client_name="conformance-client", + redirect_uris=[AnyUrl("http://localhost:3000/callback")], + grant_types=["authorization_code", "refresh_token"], + response_types=["code"], + ), + storage=InMemoryTokenStorage(), + redirect_handler=callback_handler.handle_redirect, + callback_handler=callback_handler.handle_callback, + client_metadata_url="https://conformance-test.local/client-metadata.json", + ) + + await _run_auth_session(server_url, oauth_auth) + + +async def _run_auth_session(server_url: str, oauth_auth: OAuthClientProvider) -> None: + """Common session logic for all OAuth flows.""" + client = httpx.AsyncClient(auth=oauth_auth, timeout=30.0) + async with streamable_http_client(url=server_url, http_client=client) as (read_stream, write_stream, _): + async with ClientSession( + read_stream, write_stream, elicitation_callback=default_elicitation_callback + ) as session: + await session.initialize() + logger.debug("Initialized successfully") + + tools_result = await session.list_tools() + logger.debug(f"Listed tools: {[t.name for t in tools_result.tools]}") + + # Call the first available tool (different tests have different tools) + if tools_result.tools: + tool_name = tools_result.tools[0].name + try: + result = await session.call_tool(tool_name, {}) + logger.debug(f"Called {tool_name}, result: {result}") + except Exception as e: + logger.debug(f"Tool call result/error: {e}") + + logger.debug("Connection closed successfully") + + +def main() -> None: + """Main entry point for the conformance client.""" + if len(sys.argv) < 2: + print(f"Usage: {sys.argv[0]} ", file=sys.stderr) + sys.exit(1) + + server_url = sys.argv[1] + scenario = os.environ.get("MCP_CONFORMANCE_SCENARIO") + + if scenario: + logger.debug(f"Running explicit scenario '{scenario}' against {server_url}") + handler = HANDLERS.get(scenario) + if handler: + asyncio.run(handler(server_url)) + elif scenario.startswith("auth/"): + asyncio.run(run_auth_code_client(server_url)) + else: + print(f"Unknown scenario: {scenario}", file=sys.stderr) + sys.exit(1) + else: + logger.debug(f"Running default auth flow against {server_url}") + asyncio.run(run_auth_code_client(server_url)) + + +if __name__ == "__main__": + main() diff --git a/.github/actions/conformance/run-server.sh b/.github/actions/conformance/run-server.sh new file mode 100755 index 0000000000..01af136120 --- /dev/null +++ b/.github/actions/conformance/run-server.sh @@ -0,0 +1,30 @@ +#!/bin/bash +set -e + +PORT="${PORT:-3001}" +SERVER_URL="http://localhost:${PORT}/mcp" + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR/../../.." + +# Start everything-server +uv run --frozen mcp-everything-server --port "$PORT" & +SERVER_PID=$! +trap "kill $SERVER_PID 2>/dev/null || true; wait $SERVER_PID 2>/dev/null || true" EXIT + +# Wait for server to be ready +MAX_RETRIES=30 +RETRY_COUNT=0 +while ! curl -s "$SERVER_URL" > /dev/null 2>&1; do + RETRY_COUNT=$((RETRY_COUNT + 1)) + if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then + echo "Server failed to start after ${MAX_RETRIES} retries" >&2 + exit 1 + fi + sleep 0.5 +done + +echo "Server ready at $SERVER_URL" + +# Run conformance tests +npx @modelcontextprotocol/conformance@0.1.10 server --url "$SERVER_URL" "$@" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..00dc69828b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: monthly + groups: + github-actions: + patterns: + - "*" diff --git a/.github/workflows/comment-on-release.yml b/.github/workflows/comment-on-release.yml new file mode 100644 index 0000000000..6e734e18e5 --- /dev/null +++ b/.github/workflows/comment-on-release.yml @@ -0,0 +1,149 @@ +name: Comment on PRs in Release + +on: + release: + types: [published] + +permissions: + pull-requests: write + contents: read + +jobs: + comment-on-prs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 0 + + - name: Get previous release + id: previous_release + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const currentTag = '${{ github.event.release.tag_name }}'; + + // Get all releases + const { data: releases } = await github.rest.repos.listReleases({ + owner: context.repo.owner, + repo: context.repo.repo, + per_page: 100 + }); + + // Find current release index + const currentIndex = releases.findIndex(r => r.tag_name === currentTag); + + if (currentIndex === -1) { + console.log('Current release not found in list'); + return null; + } + + // Get previous release (next in the list since they're sorted by date desc) + const previousRelease = releases[currentIndex + 1]; + + if (!previousRelease) { + console.log('No previous release found, this might be the first release'); + return null; + } + + console.log(`Found previous release: ${previousRelease.tag_name}`); + + return previousRelease.tag_name; + + - name: Get merged PRs between releases + id: get_prs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const currentTag = '${{ github.event.release.tag_name }}'; + const previousTag = ${{ steps.previous_release.outputs.result }}; + + if (!previousTag) { + console.log('No previous release found, skipping'); + return []; + } + + console.log(`Finding PRs between ${previousTag} and ${currentTag}`); + + // Get commits between previous and current release + const comparison = await github.rest.repos.compareCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + base: previousTag, + head: currentTag + }); + + const commits = comparison.data.commits; + console.log(`Found ${commits.length} commits`); + + // Get PRs associated with each commit using GitHub API + const prNumbers = new Set(); + + for (const commit of commits) { + try { + const { data: prs } = await github.rest.repos.listPullRequestsAssociatedWithCommit({ + owner: context.repo.owner, + repo: context.repo.repo, + commit_sha: commit.sha + }); + + for (const pr of prs) { + if (pr.merged_at) { + prNumbers.add(pr.number); + console.log(`Found merged PR: #${pr.number}`); + } + } + } catch (error) { + console.log(`Failed to get PRs for commit ${commit.sha}: ${error.message}`); + } + } + + console.log(`Found ${prNumbers.size} merged PRs`); + return Array.from(prNumbers); + + - name: Comment on PRs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const prNumbers = ${{ steps.get_prs.outputs.result }}; + const releaseTag = '${{ github.event.release.tag_name }}'; + const releaseUrl = '${{ github.event.release.html_url }}'; + + const comment = `This pull request is included in [${releaseTag}](${releaseUrl})`; + + let commentedCount = 0; + + for (const prNumber of prNumbers) { + try { + // Check if we've already commented on this PR for this release + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + per_page: 100 + }); + + const alreadyCommented = comments.some(c => + c.user.type === 'Bot' && c.body.includes(releaseTag) + ); + + if (alreadyCommented) { + console.log(`Skipping PR #${prNumber} - already commented for ${releaseTag}`); + continue; + } + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + body: comment + }); + commentedCount++; + console.log(`Successfully commented on PR #${prNumber}`); + } catch (error) { + console.error(`Failed to comment on PR #${prNumber}:`, error.message); + } + } + + console.log(`Commented on ${commentedCount} of ${prNumbers.length} PRs`); diff --git a/.github/workflows/conformance.yml b/.github/workflows/conformance.yml new file mode 100644 index 0000000000..248e5bf6ac --- /dev/null +++ b/.github/workflows/conformance.yml @@ -0,0 +1,45 @@ +name: Conformance Tests + +on: + push: + branches: [main] + pull_request: + workflow_dispatch: + +concurrency: + group: conformance-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + +jobs: + server-conformance: + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 + with: + enable-cache: true + version: 0.9.5 + - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + with: + node-version: 24 + - run: uv sync --frozen --all-extras --package mcp-everything-server + - run: ./.github/actions/conformance/run-server.sh + + client-conformance: + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 + with: + enable-cache: true + version: 0.9.5 + - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + with: + node-version: 24 + - run: uv sync --frozen --all-extras --package mcp + - run: npx @modelcontextprotocol/conformance@0.1.10 client --command 'uv run --frozen python .github/actions/conformance/client.py' --suite all diff --git a/.github/workflows/main-checks.yml b/.github/workflows/main-checks.yml deleted file mode 100644 index e2b2a97a14..0000000000 --- a/.github/workflows/main-checks.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: Main branch checks - -on: - push: - branches: - - main - - "v*.*.*" - - "v1.x" - tags: - - "v*.*.*" - -jobs: - checks: - uses: ./.github/workflows/shared.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000000..d34e438fc9 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,24 @@ +name: CI + +on: + push: + branches: ["main", "v1.x"] + tags: ["v*.*.*"] + pull_request: + branches: ["main", "v1.x"] + +permissions: + contents: read + +jobs: + checks: + uses: ./.github/workflows/shared.yml + + all-green: + if: always() + needs: [checks] + runs-on: ubuntu-latest + steps: + - uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 + with: + jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/publish-docs-manually.yml b/.github/workflows/publish-docs-manually.yml index befe44d31c..d77c267222 100644 --- a/.github/workflows/publish-docs-manually.yml +++ b/.github/workflows/publish-docs-manually.yml @@ -9,20 +9,20 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Configure Git Credentials run: | git config user.name github-actions[bot] git config user.email 41898282+github-actions[bot]@users.noreply.github.com - name: Install uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - - uses: actions/cache@v4 + - uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1 with: key: mkdocs-material-${{ env.cache_id }} path: .cache diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 59ede84172..f96b30f864 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -10,10 +10,10 @@ jobs: runs-on: ubuntu-latest needs: [checks] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Install uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 @@ -25,7 +25,7 @@ jobs: run: uv build - name: Upload artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: release-dists path: dist/ @@ -44,13 +44,13 @@ jobs: steps: - name: Retrieve release distributions - uses: actions/download-artifact@v4 + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 with: name: release-dists path: dist/ - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1 docs-publish: runs-on: ubuntu-latest @@ -58,20 +58,20 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Configure Git Credentials run: | git config user.name github-actions[bot] git config user.email 41898282+github-actions[bot]@users.noreply.github.com - name: Install uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - - uses: actions/cache@v4 + - uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1 with: key: mkdocs-material-${{ env.cache_id }} path: .cache diff --git a/.github/workflows/pull-request-checks.yml b/.github/workflows/pull-request-checks.yml deleted file mode 100644 index a7e7a8bf13..0000000000 --- a/.github/workflows/pull-request-checks.yml +++ /dev/null @@ -1,8 +0,0 @@ -name: Pull request checks - -on: - pull_request: - -jobs: - checks: - uses: ./.github/workflows/shared.yml diff --git a/.github/workflows/shared.yml b/.github/workflows/shared.yml index 531487db5a..108e6c6676 100644 --- a/.github/workflows/shared.yml +++ b/.github/workflows/shared.yml @@ -13,16 +13,16 @@ jobs: pre-commit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - - uses: astral-sh/setup-uv@v7 + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 - name: Install dependencies run: uv sync --frozen --all-extras --python 3.10 - - uses: pre-commit/action@v3.0.1 + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 with: extra_args: --all-files --verbose env: @@ -35,19 +35,19 @@ jobs: continue-on-error: true strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] dep-resolution: - name: lowest-direct install-flags: "--upgrade --resolution lowest-direct" - - name: highest - install-flags: "--upgrade --resolution highest" + - name: locked + install-flags: "--frozen" os: [ubuntu-latest, windows-latest] steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Install uv - uses: astral-sh/setup-uv@v7 + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 @@ -56,17 +56,18 @@ jobs: run: uv sync ${{ matrix.dep-resolution.install-flags }} --all-extras --python ${{ matrix.python-version }} - name: Run pytest with coverage + shell: bash run: | - uv run --frozen --no-sync coverage run -m pytest + uv run --frozen --no-sync coverage run -m pytest -n auto uv run --frozen --no-sync coverage combine uv run --frozen --no-sync coverage report readme-snippets: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - - uses: astral-sh/setup-uv@v7 + - uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # v7.2.0 with: enable-cache: true version: 0.9.5 diff --git a/.github/workflows/weekly-lockfile-update.yml b/.github/workflows/weekly-lockfile-update.yml new file mode 100644 index 0000000000..09e1efe511 --- /dev/null +++ b/.github/workflows/weekly-lockfile-update.yml @@ -0,0 +1,40 @@ +name: Weekly Lockfile Update + +on: + workflow_dispatch: + schedule: + # Every Thursday at 8:00 UTC + - cron: "0 8 * * 4" + +permissions: + contents: write + pull-requests: write + +jobs: + update-lockfile: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6.0.1 + + - uses: astral-sh/setup-uv@v7.2.0 + with: + version: 0.9.5 + + - name: Update lockfile + run: | + echo '## Updated Dependencies' > pr_body.md + echo '' >> pr_body.md + echo '```' >> pr_body.md + uv lock --upgrade 2>&1 | tee -a pr_body.md + echo '```' >> pr_body.md + + - name: Create pull request + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v7 + with: + commit-message: "chore: update uv.lock with latest dependencies" + title: "chore: weekly dependency update" + body-path: pr_body.md + branch: weekly-lockfile-update + delete-branch: true + add-paths: uv.lock + labels: dependencies diff --git a/.gitignore b/.gitignore index 2478cac4b3..de16995594 100644 --- a/.gitignore +++ b/.gitignore @@ -172,3 +172,4 @@ cython_debug/ # claude code .claude/ +results/ diff --git a/CLAUDE.md b/CLAUDE.md index cc2d360602..93ddf44e92 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -6,9 +6,9 @@ This document contains critical information about working with this codebase. Fo 1. Package Management - ONLY use uv, NEVER pip - - Installation: `uv add package` - - Running tools: `uv run tool` - - Upgrading: `uv add --dev package --upgrade-package package` + - Installation: `uv add ` + - Running tools: `uv run ` + - Upgrading: `uv lock --upgrade-package ` - FORBIDDEN: `uv pip install`, `@latest` syntax 2. Code Quality @@ -17,13 +17,17 @@ This document contains critical information about working with this codebase. Fo - Functions must be focused and small - Follow existing patterns exactly - Line length: 120 chars maximum + - FORBIDDEN: imports inside functions 3. Testing Requirements - Framework: `uv run --frozen pytest` - Async testing: use anyio, not asyncio + - Do not use `Test` prefixed classes, use functions - Coverage: test edge cases and errors - New features require tests - Bug fixes require regression tests + - IMPORTANT: The `tests/client/test_client.py` is the most well designed test file. Follow its patterns. + - IMPORTANT: Be minimal, and focus on E2E tests: Use the `mcp.client.Client` whenever possible. - For commits fixing bugs or adding features based on user reports add: @@ -51,6 +55,17 @@ This document contains critical information about working with this codebase. Fo - NEVER ever mention a `co-authored-by` or similar aspects. In particular, never mention the tool used to create the commit message or PR. +## Breaking Changes + +When making breaking changes, document them in `docs/migration.md`. Include: + +- What changed +- Why it changed +- How to migrate existing code + +Search for related sections in the migration guide and group related changes together +rather than adding new standalone sections. + ## Python Tools ## Code Formatting @@ -66,12 +81,11 @@ This document contains critical information about working with this codebase. Fo - Line wrapping: - Strings: use parentheses - Function calls: multi-line with proper indent - - Imports: split into multiple lines + - Imports: try to use a single line 2. Type Checking - Tool: `uv run --frozen pyright` - Requirements: - - Explicit None checks for Optional - Type narrowing for strings - Version warnings can be ignored if checks pass @@ -106,10 +120,6 @@ This document contains critical information about working with this codebase. Fo - Add None checks - Narrow string types - Match existing patterns - - Pytest: - - If the tests aren't finding the anyio pytest mark, try adding PYTEST_DISABLE_PLUGIN_AUTOLOAD="" - to the start of the pytest run command eg: - `PYTEST_DISABLE_PLUGIN_AUTOLOAD="" uv run --frozen pytest` 3. Best Practices - Check git status before commits @@ -127,6 +137,4 @@ This document contains critical information about working with this codebase. Fo - File ops: `except (OSError, PermissionError):` - JSON: `except json.JSONDecodeError:` - Network: `except (ConnectionError, TimeoutError):` -- **Only catch `Exception` for**: - - Top-level handlers that must not crash - - Cleanup blocks (log at debug level) +- **FORBIDDEN** `except Exception:` - unless in top-level handlers diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c18937f5b3..64187086f3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,6 +2,43 @@ Thank you for your interest in contributing to the MCP Python SDK! This document provides guidelines and instructions for contributing. +## Before You Start + +We welcome contributions! These guidelines exist to save everyone time, yours included. Following them means your work is more likely to be accepted. + +**All pull requests require a corresponding issue.** Unless your change is trivial (typo, docs tweak, broken link), create an issue first. Every merged feature becomes ongoing maintenance, so we need to agree something is worth doing before reviewing code. PRs without a linked issue will be closed. + +Having an issue doesn't guarantee acceptance. Wait for maintainer feedback or a `ready for work` label before starting. PRs for issues without buy-in may also be closed. + +Use issues to validate your idea before investing time in code. PRs are for execution, not exploration. + +### The SDK is Opinionated + +Not every contribution will be accepted, even with a working implementation. We prioritize maintainability and consistency over adding capabilities. This is at maintainers' discretion. + +### What Needs Discussion + +These always require an issue first: + +- New public APIs or decorators +- Architectural changes or refactoring +- Changes that touch multiple modules +- Features that might require spec changes (these need a [SEP](https://github.com/modelcontextprotocol/modelcontextprotocol) first) + +Bug fixes for clear, reproducible issues are welcome—but still create an issue to track the fix. + +### Finding Issues to Work On + +| Label | For | Description | +|-------|-----|-------------| +| [`good first issue`](https://github.com/modelcontextprotocol/python-sdk/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | Newcomers | Can tackle without deep codebase knowledge | +| [`help wanted`](https://github.com/modelcontextprotocol/python-sdk/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | Experienced contributors | Maintainers probably won't get to this | +| [`ready for work`](https://github.com/modelcontextprotocol/python-sdk/issues?q=is%3Aopen+is%3Aissue+label%3A%22ready+for+work%22) | Maintainers | Triaged and ready for a maintainer to pick up | + +Issues labeled `needs confirmation` or `needs maintainer action` are **not** ready for work—wait for maintainer input first. + +Before starting, comment on the issue so we can assign it to you. This prevents duplicate effort. + ## Development Setup 1. Make sure you have Python 3.10+ installed @@ -23,9 +60,14 @@ uv tool install pre-commit --with pre-commit-uv --force-reinstall ## Development Workflow 1. Choose the correct branch for your changes: - - For bug fixes to a released version: use the latest release branch (e.g. v1.1.x for 1.1.3) - - For new features: use the main branch (which will become the next minor/major version) - - If unsure, ask in an issue first + + | Change Type | Target Branch | Example | + |-------------|---------------|---------| + | New features, breaking changes | `main` | New APIs, refactors | + | Security fixes for v1 | `v1.x` | Critical patches | + | Bug fixes for v1 | `v1.x` | Non-breaking fixes | + + > **Note:** `main` is the v2 development branch. Breaking changes are welcome on `main`. The `v1.x` branch receives only security and critical bug fixes. 2. Create a new branch from your chosen base branch @@ -71,13 +113,29 @@ pre-commit run --all-files - Add type hints to all functions - Include docstrings for public APIs -## Pull Request Process +## Pull Requests + +By the time you open a PR, the "what" and "why" should already be settled in an issue. This keeps reviews focused on implementation. + +### Scope + +Small PRs get reviewed fast. Large PRs sit in the queue. + +A few dozen lines can be reviewed in minutes. Hundreds of lines across many files takes real effort and things slip through. If your change is big, break it into smaller PRs or get alignment from a maintainer first. + +### What Gets Rejected + +- **No prior discussion**: Features or significant changes without an approved issue +- **Scope creep**: Changes that go beyond what was discussed +- **Misalignment**: Even well-implemented features may be rejected if they don't fit the SDK's direction +- **Overengineering**: Unnecessary complexity for simple problems + +### Checklist 1. Update documentation as needed 2. Add tests for new functionality 3. Ensure CI passes -4. Maintainers will review your code -5. Address review feedback +4. Address review feedback ## Code of Conduct diff --git a/README.md b/README.md index e7a6e955b9..468e1d85da 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,13 @@ +> [!IMPORTANT] +> **This is the `main` branch which contains v2 of the SDK (currently in development, pre-alpha).** +> +> We anticipate a stable v2 release in Q1 2026. Until then, **v1.x remains the recommended version** for production use. v1.x will continue to receive bug fixes and security updates for at least 6 months after v2 ships to give people time to upgrade. +> +> For v1 documentation and code, see the [`v1.x` branch](https://github.com/modelcontextprotocol/python-sdk/tree/v1.x). + ## Table of Contents @@ -129,8 +136,7 @@ Let's create a simple MCP server that exposes a calculator tool and some data: ```python -""" -FastMCP quickstart example. +"""FastMCP quickstart example. Run from the repository root: uv run examples/snippets/servers/fastmcp_quickstart.py @@ -139,7 +145,7 @@ Run from the repository root: from mcp.server.fastmcp import FastMCP # Create an MCP server -mcp = FastMCP("Demo", json_response=True) +mcp = FastMCP("Demo") # Add an addition tool @@ -171,7 +177,7 @@ def greet_user(name: str, style: str = "friendly") -> str: # Run with streamable HTTP transport if __name__ == "__main__": - mcp.run(transport="streamable-http") + mcp.run(transport="streamable-http", json_response=True) ``` _Full example: [examples/snippets/servers/fastmcp_quickstart.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/fastmcp_quickstart.py)_ @@ -435,7 +441,7 @@ def validated_tool() -> Annotated[CallToolResult, ValidationModel]: """Return CallToolResult with structured output validation.""" return CallToolResult( content=[TextContent(type="text", text="Validated response")], - structuredContent={"status": "success", "data": {"result": 42}}, + structured_content={"status": "success", "data": {"result": 42}}, _meta={"internal": "metadata"}, ) @@ -720,9 +726,8 @@ Client usage: ```python -""" -cd to the `examples/snippets` directory and run: - uv run completion-client +"""cd to the `examples/snippets` directory and run: +uv run completion-client """ import asyncio @@ -750,8 +755,8 @@ async def run(): # List available resource templates templates = await session.list_resource_templates() print("Available resource templates:") - for template in templates.resourceTemplates: - print(f" - {template.uriTemplate}") + for template in templates.resource_templates: + print(f" - {template.uri_template}") # List available prompts prompts = await session.list_prompts() @@ -760,20 +765,20 @@ async def run(): print(f" - {prompt.name}") # Complete resource template arguments - if templates.resourceTemplates: - template = templates.resourceTemplates[0] - print(f"\nCompleting arguments for resource template: {template.uriTemplate}") + if templates.resource_templates: + template = templates.resource_templates[0] + print(f"\nCompleting arguments for resource template: {template.uri_template}") # Complete without context result = await session.complete( - ref=ResourceTemplateReference(type="ref/resource", uri=template.uriTemplate), + ref=ResourceTemplateReference(type="ref/resource", uri=template.uri_template), argument={"name": "owner", "value": "model"}, ) print(f"Completions for 'owner' starting with 'model': {result.completion.values}") # Complete with context - repo suggestions based on owner result = await session.complete( - ref=ResourceTemplateReference(type="ref/resource", uri=template.uriTemplate), + ref=ResourceTemplateReference(type="ref/resource", uri=template.uri_template), argument={"name": "repo", "value": ""}, context_arguments={"owner": "modelcontextprotocol"}, ) @@ -903,7 +908,7 @@ async def connect_service(service_name: str, ctx: Context[ServerSession, None]) mode="url", message=f"Authorization required to connect to {service_name}", url=f"https://{service_name}.example.com/oauth/authorize?elicit={elicitation_id}", - elicitationId=elicitation_id, + elicitation_id=elicitation_id, ) ] ) @@ -997,9 +1002,8 @@ MCP servers can use authentication by providing an implementation of the `TokenV ```python -""" -Run from the repository root: - uv run examples/snippets/servers/oauth_server.py +"""Run from the repository root: +uv run examples/snippets/servers/oauth_server.py """ from pydantic import AnyHttpUrl @@ -1019,7 +1023,6 @@ class SimpleTokenVerifier(TokenVerifier): # Create FastMCP instance as a Resource Server mcp = FastMCP( "Weather Service", - json_response=True, # Token verifier for authentication token_verifier=SimpleTokenVerifier(), # Auth settings for RFC 9728 Protected Resource Metadata @@ -1043,7 +1046,7 @@ async def get_weather(city: str = "London") -> dict[str, str]: if __name__ == "__main__": - mcp.run(transport="streamable-http") + mcp.run(transport="streamable-http", json_response=True) ``` _Full example: [examples/snippets/servers/oauth_server.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/oauth_server.py)_ @@ -1071,7 +1074,7 @@ The FastMCP server instance accessible via `ctx.fastmcp` provides access to serv - `debug` - Debug mode flag - `log_level` - Current logging level - `host` and `port` - Server network configuration - - `mount_path`, `sse_path`, `streamable_http_path` - Transport paths + - `sse_path`, `streamable_http_path` - Transport paths - `stateless_http` - Whether the server operates in stateless mode - And other configuration options @@ -1239,22 +1242,13 @@ Note that `uv run mcp run` or `uv run mcp dev` only supports server using FastMC ```python -""" -Run from the repository root: - uv run examples/snippets/servers/streamable_config.py +"""Run from the repository root: +uv run examples/snippets/servers/streamable_config.py """ from mcp.server.fastmcp import FastMCP -# Stateless server with JSON responses (recommended) -mcp = FastMCP("StatelessServer", stateless_http=True, json_response=True) - -# Other configuration options: -# Stateless server with SSE streaming responses -# mcp = FastMCP("StatelessServer", stateless_http=True) - -# Stateful server with session persistence -# mcp = FastMCP("StatefulServer") +mcp = FastMCP("StatelessServer") # Add a simple tool to demonstrate the server @@ -1265,8 +1259,17 @@ def greet(name: str = "World") -> str: # Run server with streamable_http transport +# Transport-specific options (stateless_http, json_response) are passed to run() if __name__ == "__main__": - mcp.run(transport="streamable-http") + # Stateless server with JSON responses (recommended) + mcp.run(transport="streamable-http", stateless_http=True, json_response=True) + + # Other configuration options: + # Stateless server with SSE streaming responses + # mcp.run(transport="streamable-http", stateless_http=True) + + # Stateful server with session persistence + # mcp.run(transport="streamable-http") ``` _Full example: [examples/snippets/servers/streamable_config.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/streamable_config.py)_ @@ -1276,9 +1279,8 @@ You can mount multiple FastMCP servers in a Starlette application: ```python -""" -Run from the repository root: - uvicorn examples.snippets.servers.streamable_starlette_mount:app --reload +"""Run from the repository root: +uvicorn examples.snippets.servers.streamable_starlette_mount:app --reload """ import contextlib @@ -1289,7 +1291,7 @@ from starlette.routing import Mount from mcp.server.fastmcp import FastMCP # Create the Echo server -echo_mcp = FastMCP(name="EchoServer", stateless_http=True, json_response=True) +echo_mcp = FastMCP(name="EchoServer") @echo_mcp.tool() @@ -1299,7 +1301,7 @@ def echo(message: str) -> str: # Create the Math server -math_mcp = FastMCP(name="MathServer", stateless_http=True, json_response=True) +math_mcp = FastMCP(name="MathServer") @math_mcp.tool() @@ -1320,16 +1322,16 @@ async def lifespan(app: Starlette): # Create the Starlette app and mount the MCP servers app = Starlette( routes=[ - Mount("/echo", echo_mcp.streamable_http_app()), - Mount("/math", math_mcp.streamable_http_app()), + Mount("/echo", echo_mcp.streamable_http_app(stateless_http=True, json_response=True)), + Mount("/math", math_mcp.streamable_http_app(stateless_http=True, json_response=True)), ], lifespan=lifespan, ) # Note: Clients connect to http://localhost:8000/echo/mcp and http://localhost:8000/math/mcp # To mount at the root of each path (e.g., /echo instead of /echo/mcp): -# echo_mcp.settings.streamable_http_path = "/" -# math_mcp.settings.streamable_http_path = "/" +# echo_mcp.streamable_http_app(streamable_http_path="/", stateless_http=True, json_response=True) +# math_mcp.streamable_http_app(streamable_http_path="/", stateless_http=True, json_response=True) ``` _Full example: [examples/snippets/servers/streamable_starlette_mount.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/streamable_starlette_mount.py)_ @@ -1387,8 +1389,7 @@ You can mount the StreamableHTTP server to an existing ASGI server using the `st ```python -""" -Basic example showing how to mount StreamableHTTP server in Starlette. +"""Basic example showing how to mount StreamableHTTP server in Starlette. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_basic_mounting:app --reload @@ -1402,7 +1403,7 @@ from starlette.routing import Mount from mcp.server.fastmcp import FastMCP # Create MCP server -mcp = FastMCP("My App", json_response=True) +mcp = FastMCP("My App") @mcp.tool() @@ -1419,9 +1420,10 @@ async def lifespan(app: Starlette): # Mount the StreamableHTTP server to the existing ASGI server +# Transport-specific options are passed to streamable_http_app() app = Starlette( routes=[ - Mount("/", app=mcp.streamable_http_app()), + Mount("/", app=mcp.streamable_http_app(json_response=True)), ], lifespan=lifespan, ) @@ -1434,8 +1436,7 @@ _Full example: [examples/snippets/servers/streamable_http_basic_mounting.py](htt ```python -""" -Example showing how to mount StreamableHTTP server using Host-based routing. +"""Example showing how to mount StreamableHTTP server using Host-based routing. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_host_mounting:app --reload @@ -1449,7 +1450,7 @@ from starlette.routing import Host from mcp.server.fastmcp import FastMCP # Create MCP server -mcp = FastMCP("MCP Host App", json_response=True) +mcp = FastMCP("MCP Host App") @mcp.tool() @@ -1466,9 +1467,10 @@ async def lifespan(app: Starlette): # Mount using Host-based routing +# Transport-specific options are passed to streamable_http_app() app = Starlette( routes=[ - Host("mcp.acme.corp", app=mcp.streamable_http_app()), + Host("mcp.acme.corp", app=mcp.streamable_http_app(json_response=True)), ], lifespan=lifespan, ) @@ -1481,8 +1483,7 @@ _Full example: [examples/snippets/servers/streamable_http_host_mounting.py](http ```python -""" -Example showing how to mount multiple StreamableHTTP servers with path configuration. +"""Example showing how to mount multiple StreamableHTTP servers with path configuration. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_multiple_servers:app --reload @@ -1496,8 +1497,8 @@ from starlette.routing import Mount from mcp.server.fastmcp import FastMCP # Create multiple MCP servers -api_mcp = FastMCP("API Server", json_response=True) -chat_mcp = FastMCP("Chat Server", json_response=True) +api_mcp = FastMCP("API Server") +chat_mcp = FastMCP("Chat Server") @api_mcp.tool() @@ -1512,12 +1513,6 @@ def send_message(message: str) -> str: return f"Message sent: {message}" -# Configure servers to mount at the root of each path -# This means endpoints will be at /api and /chat instead of /api/mcp and /chat/mcp -api_mcp.settings.streamable_http_path = "/" -chat_mcp.settings.streamable_http_path = "/" - - # Create a combined lifespan to manage both session managers @contextlib.asynccontextmanager async def lifespan(app: Starlette): @@ -1527,11 +1522,12 @@ async def lifespan(app: Starlette): yield -# Mount the servers +# Mount the servers with transport-specific options passed to streamable_http_app() +# streamable_http_path="/" means endpoints will be at /api and /chat instead of /api/mcp and /chat/mcp app = Starlette( routes=[ - Mount("/api", app=api_mcp.streamable_http_app()), - Mount("/chat", app=chat_mcp.streamable_http_app()), + Mount("/api", app=api_mcp.streamable_http_app(json_response=True, streamable_http_path="/")), + Mount("/chat", app=chat_mcp.streamable_http_app(json_response=True, streamable_http_path="/")), ], lifespan=lifespan, ) @@ -1544,8 +1540,7 @@ _Full example: [examples/snippets/servers/streamable_http_multiple_servers.py](h ```python -""" -Example showing path configuration during FastMCP initialization. +"""Example showing path configuration when mounting FastMCP. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_path_config:app --reload @@ -1556,13 +1551,8 @@ from starlette.routing import Mount from mcp.server.fastmcp import FastMCP -# Configure streamable_http_path during initialization -# This server will mount at the root of wherever it's mounted -mcp_at_root = FastMCP( - "My Server", - json_response=True, - streamable_http_path="/", -) +# Create a simple FastMCP server +mcp_at_root = FastMCP("My Server") @mcp_at_root.tool() @@ -1571,10 +1561,14 @@ def process_data(data: str) -> str: return f"Processed: {data}" -# Mount at /process - endpoints will be at /process instead of /process/mcp +# Mount at /process with streamable_http_path="/" so the endpoint is /process (not /process/mcp) +# Transport-specific options like json_response are passed to streamable_http_app() app = Starlette( routes=[ - Mount("/process", app=mcp_at_root.streamable_http_app()), + Mount( + "/process", + app=mcp_at_root.streamable_http_app(json_response=True, streamable_http_path="/"), + ), ] ) ``` @@ -1607,7 +1601,7 @@ app = Starlette( app.router.routes.append(Host('mcp.acme.corp', app=mcp.sse_app())) ``` -When mounting multiple MCP servers under different paths, you can configure the mount path in several ways: +You can also mount multiple MCP servers at different sub-paths. The SSE transport automatically detects the mount path via ASGI's `root_path` mechanism, so message endpoints are correctly routed: ```python from starlette.applications import Starlette @@ -1617,31 +1611,18 @@ from mcp.server.fastmcp import FastMCP # Create multiple MCP servers github_mcp = FastMCP("GitHub API") browser_mcp = FastMCP("Browser") -curl_mcp = FastMCP("Curl") search_mcp = FastMCP("Search") -# Method 1: Configure mount paths via settings (recommended for persistent configuration) -github_mcp.settings.mount_path = "/github" -browser_mcp.settings.mount_path = "/browser" - -# Method 2: Pass mount path directly to sse_app (preferred for ad-hoc mounting) -# This approach doesn't modify the server's settings permanently - -# Create Starlette app with multiple mounted servers +# Mount each server at its own sub-path +# The SSE transport automatically uses ASGI's root_path to construct +# the correct message endpoint (e.g., /github/messages/, /browser/messages/) app = Starlette( routes=[ - # Using settings-based configuration Mount("/github", app=github_mcp.sse_app()), Mount("/browser", app=browser_mcp.sse_app()), - # Using direct mount path parameter - Mount("/curl", app=curl_mcp.sse_app("/curl")), - Mount("/search", app=search_mcp.sse_app("/search")), + Mount("/search", app=search_mcp.sse_app()), ] ) - -# Method 3: For direct execution, you can also pass the mount path to run() -if __name__ == "__main__": - search_mcp.run(transport="sse", mount_path="/search") ``` For more information on mounting applications in Starlette, see the [Starlette documentation](https://www.starlette.io/routing/#submounting-routes). @@ -1654,9 +1635,8 @@ For more control, you can use the low-level server implementation directly. This ```python -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/lifespan.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/lifespan.py """ from collections.abc import AsyncIterator @@ -1712,7 +1692,7 @@ async def handle_list_tools() -> list[types.Tool]: types.Tool( name="query_db", description="Query the database", - inputSchema={ + input_schema={ "type": "object", "properties": {"query": {"type": "string", "description": "SQL query to execute"}}, "required": ["query"], @@ -1771,8 +1751,7 @@ The lifespan API provides: ```python -""" -Run from the repository root: +"""Run from the repository root: uv run examples/snippets/servers/lowlevel/basic.py """ @@ -1850,9 +1829,8 @@ The low-level server supports structured output for tools, allowing you to retur ```python -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/structured_output.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/structured_output.py """ import asyncio @@ -1873,12 +1851,12 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="get_weather", description="Get current weather for a city", - inputSchema={ + input_schema={ "type": "object", "properties": {"city": {"type": "string", "description": "City name"}}, "required": ["city"], }, - outputSchema={ + output_schema={ "type": "object", "properties": { "temperature": {"type": "number", "description": "Temperature in Celsius"}, @@ -1953,9 +1931,8 @@ For full control over the response including the `_meta` field (for passing data ```python -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/direct_call_tool_result.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/direct_call_tool_result.py """ import asyncio @@ -1976,7 +1953,7 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="advanced_tool", description="Tool with full control including _meta field", - inputSchema={ + input_schema={ "type": "object", "properties": {"message": {"type": "string"}}, "required": ["message"], @@ -1992,7 +1969,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> types.CallTo message = str(arguments.get("message", "")) return types.CallToolResult( content=[types.TextContent(type="text", text=f"Processed: {message}")], - structuredContent={"result": "success", "message": message}, + structured_content={"result": "success", "message": message}, _meta={"hidden": "data for client applications only"}, ) @@ -2033,11 +2010,7 @@ For servers that need to handle large datasets, the low-level server provides pa ```python -""" -Example of implementing pagination with MCP server decorators. -""" - -from pydantic import AnyUrl +"""Example of implementing pagination with MCP server decorators.""" import mcp.types as types from mcp.server.lowlevel import Server @@ -2063,14 +2036,14 @@ async def list_resources_paginated(request: types.ListResourcesRequest) -> types # Get page of resources page_items = [ - types.Resource(uri=AnyUrl(f"resource://items/{item}"), name=item, description=f"Description for {item}") + types.Resource(uri=f"resource://items/{item}", name=item, description=f"Description for {item}") for item in ITEMS[start:end] ] # Determine next cursor next_cursor = str(end) if end < len(ITEMS) else None - return types.ListResourcesResult(resources=page_items, nextCursor=next_cursor) + return types.ListResourcesResult(resources=page_items, next_cursor=next_cursor) ``` _Full example: [examples/snippets/servers/pagination_example.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/pagination_example.py)_ @@ -2080,9 +2053,7 @@ _Full example: [examples/snippets/servers/pagination_example.py](https://github. ```python -""" -Example of consuming paginated MCP endpoints from a client. -""" +"""Example of consuming paginated MCP endpoints from a client.""" import asyncio @@ -2111,8 +2082,8 @@ async def list_all_resources() -> None: print(f"Fetched {len(result.resources)} resources") # Check if there are more pages - if result.nextCursor: - cursor = result.nextCursor + if result.next_cursor: + cursor = result.next_cursor else: break @@ -2141,9 +2112,8 @@ The SDK provides a high-level client interface for connecting to MCP servers usi ```python -""" -cd to the `examples/snippets/clients` directory and run: - uv run client +"""cd to the `examples/snippets/clients` directory and run: +uv run client """ import asyncio @@ -2175,7 +2145,7 @@ async def handle_sampling_message( text="Hello, world! from model", ), model="gpt-3.5-turbo", - stopReason="endTurn", + stop_reason="endTurn", ) @@ -2213,7 +2183,7 @@ async def run(): result_unstructured = result.content[0] if isinstance(result_unstructured, types.TextContent): print(f"Tool result: {result_unstructured.text}") - result_structured = result.structuredContent + result_structured = result.structured_content print(f"Structured tool result: {result_structured}") @@ -2233,9 +2203,8 @@ Clients can also connect using [Streamable HTTP transport](https://modelcontextp ```python -""" -Run from the repository root: - uv run examples/snippets/clients/streamable_basic.py +"""Run from the repository root: +uv run examples/snippets/clients/streamable_basic.py """ import asyncio @@ -2273,9 +2242,8 @@ When building MCP clients, the SDK provides utilities to help display human-read ```python -""" -cd to the `examples/snippets` directory and run: - uv run display-utilities-client +"""cd to the `examples/snippets` directory and run: +uv run display-utilities-client """ import asyncio @@ -2314,7 +2282,7 @@ async def display_resources(session: ClientSession): print(f"Resource: {display_name} ({resource.uri})") templates_response = await session.list_resource_templates() - for template in templates_response.resourceTemplates: + for template in templates_response.resource_templates: display_name = get_display_name(template) print(f"Resource Template: {display_name}") @@ -2358,8 +2326,7 @@ The SDK includes [authorization support](https://modelcontextprotocol.io/specifi ```python -""" -Before running, specify running MCP RS server URL. +"""Before running, specify running MCP RS server URL. To spin up RS server locally, see examples/servers/simple-auth/README.md diff --git a/docs/migration.md b/docs/migration.md new file mode 100644 index 0000000000..19dc9326d8 --- /dev/null +++ b/docs/migration.md @@ -0,0 +1,315 @@ +# Migration Guide: v1 to v2 + +This guide covers the breaking changes introduced in v2 of the MCP Python SDK and how to update your code. + +## Overview + +Version 2 of the MCP Python SDK introduces several breaking changes to improve the API, align with the MCP specification, and provide better type safety. + +## Breaking Changes + +### `streamablehttp_client` removed + +The deprecated `streamablehttp_client` function has been removed. Use `streamable_http_client` instead. + +**Before (v1):** + +```python +from mcp.client.streamable_http import streamablehttp_client + +async with streamablehttp_client( + url="http://localhost:8000/mcp", + headers={"Authorization": "Bearer token"}, + timeout=30, + sse_read_timeout=300, + auth=my_auth, +) as (read_stream, write_stream, get_session_id): + ... +``` + +**After (v2):** + +```python +import httpx +from mcp.client.streamable_http import streamable_http_client + +# Configure headers, timeout, and auth on the httpx.AsyncClient +http_client = httpx.AsyncClient( + headers={"Authorization": "Bearer token"}, + timeout=httpx.Timeout(30, read=300), + auth=my_auth, +) + +async with http_client: + async with streamable_http_client( + url="http://localhost:8000/mcp", + http_client=http_client, + ) as (read_stream, write_stream, get_session_id): + ... +``` + +### `StreamableHTTPTransport` parameters removed + +The `headers`, `timeout`, `sse_read_timeout`, and `auth` parameters have been removed from `StreamableHTTPTransport`. Configure these on the `httpx.AsyncClient` instead (see example above). + +### Removed type aliases and classes + +The following deprecated type aliases and classes have been removed from `mcp.types`: + +| Removed | Replacement | +|---------|-------------| +| `Content` | `ContentBlock` | +| `ResourceReference` | `ResourceTemplateReference` | + +**Before (v1):** + +```python +from mcp.types import Content, ResourceReference +``` + +**After (v2):** + +```python +from mcp.types import ContentBlock, ResourceTemplateReference +``` + +### `args` parameter removed from `ClientSessionGroup.call_tool()` + +The deprecated `args` parameter has been removed from `ClientSessionGroup.call_tool()`. Use `arguments` instead. + +**Before (v1):** + +```python +result = await session_group.call_tool("my_tool", args={"key": "value"}) +``` + +**After (v2):** + +```python +result = await session_group.call_tool("my_tool", arguments={"key": "value"}) +``` + +### `cursor` parameter removed from `ClientSession` list methods + +The deprecated `cursor` parameter has been removed from the following `ClientSession` methods: + +- `list_resources()` +- `list_resource_templates()` +- `list_prompts()` +- `list_tools()` + +Use `params=PaginatedRequestParams(cursor=...)` instead. + +**Before (v1):** + +```python +result = await session.list_resources(cursor="next_page_token") +result = await session.list_tools(cursor="next_page_token") +``` + +**After (v2):** + +```python +from mcp.types import PaginatedRequestParams + +result = await session.list_resources(params=PaginatedRequestParams(cursor="next_page_token")) +result = await session.list_tools(params=PaginatedRequestParams(cursor="next_page_token")) +``` + +### `mount_path` parameter removed from FastMCP + +The `mount_path` parameter has been removed from `FastMCP.__init__()`, `FastMCP.run()`, `FastMCP.run_sse_async()`, and `FastMCP.sse_app()`. It was also removed from the `Settings` class. + +This parameter was redundant because the SSE transport already handles sub-path mounting via ASGI's standard `root_path` mechanism. When using Starlette's `Mount("/path", app=mcp.sse_app())`, Starlette automatically sets `root_path` in the ASGI scope, and the `SseServerTransport` uses this to construct the correct message endpoint path. + +### Transport-specific parameters moved from FastMCP constructor to run()/app methods + +Transport-specific parameters have been moved from the `FastMCP` constructor to the `run()`, `sse_app()`, and `streamable_http_app()` methods. This provides better separation of concerns - the constructor now only handles server identity and authentication, while transport configuration is passed when starting the server. + +**Parameters moved:** + +- `host`, `port` - HTTP server binding +- `sse_path`, `message_path` - SSE transport paths +- `streamable_http_path` - StreamableHTTP endpoint path +- `json_response`, `stateless_http` - StreamableHTTP behavior +- `event_store`, `retry_interval` - StreamableHTTP event handling +- `transport_security` - DNS rebinding protection + +**Before (v1):** + +```python +from mcp.server.fastmcp import FastMCP + +# Transport params in constructor +mcp = FastMCP("Demo", json_response=True, stateless_http=True) +mcp.run(transport="streamable-http") + +# Or for SSE +mcp = FastMCP("Server", host="0.0.0.0", port=9000, sse_path="/events") +mcp.run(transport="sse") +``` + +**After (v2):** + +```python +from mcp.server.fastmcp import FastMCP + +# Transport params passed to run() +mcp = FastMCP("Demo") +mcp.run(transport="streamable-http", json_response=True, stateless_http=True) + +# Or for SSE +mcp = FastMCP("Server") +mcp.run(transport="sse", host="0.0.0.0", port=9000, sse_path="/events") +``` + +**For mounted apps:** + +When mounting FastMCP in a Starlette app, pass transport params to the app methods: + +```python +# Before (v1) +mcp = FastMCP("App", json_response=True) +app = Starlette(routes=[Mount("/", app=mcp.streamable_http_app())]) + +# After (v2) +mcp = FastMCP("App") +app = Starlette(routes=[Mount("/", app=mcp.streamable_http_app(json_response=True))]) +``` + +**Note:** DNS rebinding protection is automatically enabled when `host` is `127.0.0.1`, `localhost`, or `::1`. This now happens in `sse_app()` and `streamable_http_app()` instead of the constructor. + +### Replace `RootModel` by union types with `TypeAdapter` validation + +The following union types are no longer `RootModel` subclasses: + +- `ClientRequest` +- `ServerRequest` +- `ClientNotification` +- `ServerNotification` +- `ClientResult` +- `ServerResult` +- `JSONRPCMessage` + +This means you can no longer access `.root` on these types or use `model_validate()` directly on them. Instead, use the provided `TypeAdapter` instances for validation. + +**Before (v1):** + +```python +from mcp.types import ClientRequest, ServerNotification + +# Using RootModel.model_validate() +request = ClientRequest.model_validate(data) +actual_request = request.root # Accessing the wrapped value + +notification = ServerNotification.model_validate(data) +actual_notification = notification.root +``` + +**After (v2):** + +```python +from mcp.types import client_request_adapter, server_notification_adapter + +# Using TypeAdapter.validate_python() +request = client_request_adapter.validate_python(data) +# No .root access needed - request is the actual type + +notification = server_notification_adapter.validate_python(data) +# No .root access needed - notification is the actual type +``` + +**Available adapters:** + +| Union Type | Adapter | +|------------|---------| +| `ClientRequest` | `client_request_adapter` | +| `ServerRequest` | `server_request_adapter` | +| `ClientNotification` | `client_notification_adapter` | +| `ServerNotification` | `server_notification_adapter` | +| `ClientResult` | `client_result_adapter` | +| `ServerResult` | `server_result_adapter` | +| `JSONRPCMessage` | `jsonrpc_message_adapter` | + +All adapters are exported from `mcp.types`. + +### Resource URI type changed from `AnyUrl` to `str` + +The `uri` field on resource-related types now uses `str` instead of Pydantic's `AnyUrl`. This aligns with the [MCP specification schema](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/schema/draft/schema.ts) which defines URIs as plain strings (`uri: string`) without strict URL validation. This change allows relative paths like `users/me` that were previously rejected. + +**Before (v1):** + +```python +from pydantic import AnyUrl +from mcp.types import Resource + +# Required wrapping in AnyUrl +resource = Resource(name="test", uri=AnyUrl("users/me")) # Would fail validation +``` + +**After (v2):** + +```python +from mcp.types import Resource + +# Plain strings accepted +resource = Resource(name="test", uri="users/me") # Works +resource = Resource(name="test", uri="custom://scheme") # Works +resource = Resource(name="test", uri="https://example.com") # Works +``` + +If your code passes `AnyUrl` objects to URI fields, convert them to strings: + +```python +# If you have an AnyUrl from elsewhere +uri = str(my_any_url) # Convert to string +``` + +Affected types: + +- `Resource.uri` +- `ReadResourceRequestParams.uri` +- `ResourceContents.uri` (and subclasses `TextResourceContents`, `BlobResourceContents`) +- `SubscribeRequestParams.uri` +- `UnsubscribeRequestParams.uri` +- `ResourceUpdatedNotificationParams.uri` + +The `ClientSession.read_resource()`, `subscribe_resource()`, and `unsubscribe_resource()` methods now accept both `str` and `AnyUrl` for backwards compatibility. + +## Deprecations + + + +## New Features + +### `streamable_http_app()` available on lowlevel Server + +The `streamable_http_app()` method is now available directly on the lowlevel `Server` class, not just `FastMCP`. This allows using the streamable HTTP transport without the FastMCP wrapper. + +```python +from mcp.server.lowlevel.server import Server + +server = Server("my-server") + +# Register handlers... +@server.list_tools() +async def list_tools(): + return [...] + +# Create a Starlette app for streamable HTTP +app = server.streamable_http_app( + streamable_http_path="/mcp", + json_response=False, + stateless_http=False, +) +``` + +The lowlevel `Server` also now exposes a `session_manager` property to access the `StreamableHTTPSessionManager` after calling `streamable_http_app()`. + +## Need Help? + +If you encounter issues during migration: + +1. Check the [API Reference](api.md) for updated method signatures +2. Review the [examples](https://github.com/modelcontextprotocol/python-sdk/tree/main/examples) for updated usage patterns +3. Open an issue on [GitHub](https://github.com/modelcontextprotocol/python-sdk/issues) if you find a bug or need further assistance diff --git a/docs/testing.md b/docs/testing.md index 8d84449893..f869873608 100644 --- a/docs/testing.md +++ b/docs/testing.md @@ -1,10 +1,11 @@ # Testing MCP Servers -If you call yourself a developer, you will want to test your MCP server. -The Python SDK offers the `create_connected_server_and_client_session` function to create a session -using an in-memory transport. I know, I know, the name is too long... We are working on improving it. +The Python SDK provides a `Client` class for testing MCP servers with an in-memory transport. +This makes it easy to write tests without network overhead. -Anyway, let's assume you have a simple server with a single tool: +## Basic Usage + +Let's assume you have a simple server with a single tool: ```python title="server.py" from mcp.server import FastMCP @@ -40,12 +41,9 @@ To run the below test, you'll need to install the following dependencies: server - you don't need to use it, but we are spreading the word for best practices. ```python title="test_server.py" -from collections.abc import AsyncGenerator - import pytest from inline_snapshot import snapshot -from mcp.client.session import ClientSession -from mcp.shared.memory import create_connected_server_and_client_session +from mcp import Client from mcp.types import CallToolResult, TextContent from server import app @@ -57,14 +55,14 @@ def anyio_backend(): # (1)! @pytest.fixture -async def client_session() -> AsyncGenerator[ClientSession]: - async with create_connected_server_and_client_session(app, raise_exceptions=True) as _session: - yield _session +async def client(): # (2)! + async with Client(app, raise_exceptions=True) as c: + yield c @pytest.mark.anyio -async def test_call_add_tool(client_session: ClientSession): - result = await client_session.call_tool("add", {"a": 1, "b": 2}) +async def test_call_add_tool(client: Client): + result = await client.call_tool("add", {"a": 1, "b": 2}) assert result == snapshot( CallToolResult( content=[TextContent(type="text", text="3")], @@ -74,5 +72,6 @@ async def test_call_add_tool(client_session: ClientSession): ``` 1. If you are using `trio`, you should set `"trio"` as the `anyio_backend`. Check more information in the [anyio documentation](https://anyio.readthedocs.io/en/stable/testing.html#specifying-the-backends-to-run-on). +2. The `client` fixture creates a connected client that can be reused across multiple tests. There you go! You can now extend your tests to cover more scenarios. diff --git a/examples/clients/conformance-auth-client/README.md b/examples/clients/conformance-auth-client/README.md deleted file mode 100644 index 312a992d0a..0000000000 --- a/examples/clients/conformance-auth-client/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# MCP Conformance Auth Client - -A Python OAuth client designed for use with the MCP conformance test framework. - -## Overview - -This client implements OAuth authentication for MCP and is designed to work automatically with the conformance test framework without requiring user interaction. It programmatically fetches authorization URLs and extracts auth codes from redirects. - -## Installation - -```bash -cd examples/clients/conformance-auth-client -uv sync -``` - -## Usage with Conformance Tests - -Run the auth conformance tests against this Python client: - -```bash -# From the conformance repository -npx @modelcontextprotocol/conformance client \ - --command "uv run --directory /path/to/python-sdk/examples/clients/conformance-auth-client python -m mcp_conformance_auth_client" \ - --scenario auth/basic-dcr -``` - -Available auth test scenarios: - -- `auth/basic-dcr` - Tests OAuth Dynamic Client Registration flow -- `auth/basic-metadata-var1` - Tests OAuth with authorization metadata - -## How It Works - -Unlike interactive OAuth clients that open a browser for user authentication, this client: - -1. Receives the authorization URL from the OAuth provider -2. Makes an HTTP request to that URL directly (without following redirects) -3. Extracts the authorization code from the redirect response -4. Uses the code to complete the OAuth token exchange - -This allows the conformance test framework's mock OAuth server to automatically provide auth codes without human interaction. - -## Direct Usage - -You can also run the client directly: - -```bash -uv run python -m mcp_conformance_auth_client http://localhost:3000/mcp -``` diff --git a/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__init__.py b/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__init__.py deleted file mode 100644 index eecd92409a..0000000000 --- a/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__init__.py +++ /dev/null @@ -1,316 +0,0 @@ -#!/usr/bin/env python3 -""" -MCP OAuth conformance test client. - -This client is designed to work with the MCP conformance test framework. -It automatically handles OAuth flows without user interaction by programmatically -fetching the authorization URL and extracting the auth code from the redirect. - -Usage: - python -m mcp_conformance_auth_client - -Environment Variables: - MCP_CONFORMANCE_CONTEXT - JSON object containing test credentials: - { - "client_id": "...", - "client_secret": "...", # For client_secret_basic flow - "private_key_pem": "...", # For private_key_jwt flow - "signing_algorithm": "ES256" # Optional, defaults to ES256 - } - -Scenarios: - auth/* - Authorization code flow scenarios (default behavior) - auth/client-credentials-jwt - Client credentials with JWT authentication (SEP-1046) - auth/client-credentials-basic - Client credentials with client_secret_basic -""" - -import asyncio -import json -import logging -import os -import sys -from datetime import timedelta -from urllib.parse import ParseResult, parse_qs, urlparse - -import httpx -from mcp import ClientSession -from mcp.client.auth import OAuthClientProvider, TokenStorage -from mcp.client.auth.extensions.client_credentials import ( - ClientCredentialsOAuthProvider, - PrivateKeyJWTOAuthProvider, - SignedJWTParameters, -) -from mcp.client.streamable_http import streamablehttp_client -from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata, OAuthToken -from pydantic import AnyUrl - - -def get_conformance_context() -> dict: - """Load conformance test context from MCP_CONFORMANCE_CONTEXT environment variable.""" - context_json = os.environ.get("MCP_CONFORMANCE_CONTEXT") - if not context_json: - raise RuntimeError( - "MCP_CONFORMANCE_CONTEXT environment variable not set. " - "Expected JSON with client_id, client_secret, and/or private_key_pem." - ) - try: - return json.loads(context_json) - except json.JSONDecodeError as e: - raise RuntimeError(f"Failed to parse MCP_CONFORMANCE_CONTEXT as JSON: {e}") from e - - -# Set up logging to stderr (stdout is for conformance test output) -logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - stream=sys.stderr, -) -logger = logging.getLogger(__name__) - - -class InMemoryTokenStorage(TokenStorage): - """Simple in-memory token storage for conformance testing.""" - - def __init__(self): - self._tokens: OAuthToken | None = None - self._client_info: OAuthClientInformationFull | None = None - - async def get_tokens(self) -> OAuthToken | None: - return self._tokens - - async def set_tokens(self, tokens: OAuthToken) -> None: - self._tokens = tokens - - async def get_client_info(self) -> OAuthClientInformationFull | None: - return self._client_info - - async def set_client_info(self, client_info: OAuthClientInformationFull) -> None: - self._client_info = client_info - - -class ConformanceOAuthCallbackHandler: - """ - OAuth callback handler that automatically fetches the authorization URL - and extracts the auth code, without requiring user interaction. - - This mimics the behavior of the TypeScript ConformanceOAuthProvider. - """ - - def __init__(self): - self._auth_code: str | None = None - self._state: str | None = None - - async def handle_redirect(self, authorization_url: str) -> None: - """ - Fetch the authorization URL and extract the auth code from the redirect. - - The conformance test server returns a redirect with the auth code, - so we can capture it programmatically. - """ - logger.debug(f"Fetching authorization URL: {authorization_url}") - - async with httpx.AsyncClient() as client: - response = await client.get( - authorization_url, - follow_redirects=False, # Don't follow redirects automatically - ) - - # Check for redirect response - if response.status_code in (301, 302, 303, 307, 308): - location = response.headers.get("location") - if location: - redirect_url: ParseResult = urlparse(location) - query_params: dict[str, list[str]] = parse_qs(redirect_url.query) - - if "code" in query_params: - self._auth_code = query_params["code"][0] - state_values = query_params.get("state") - self._state = state_values[0] if state_values else None - logger.debug(f"Got auth code from redirect: {self._auth_code[:10]}...") - return - else: - raise RuntimeError(f"No auth code in redirect URL: {location}") - else: - raise RuntimeError(f"No redirect location received from {authorization_url}") - else: - raise RuntimeError(f"Expected redirect response, got {response.status_code} from {authorization_url}") - - async def handle_callback(self) -> tuple[str, str | None]: - """Return the captured auth code and state, then clear them for potential reuse.""" - if self._auth_code is None: - raise RuntimeError("No authorization code available - was handle_redirect called?") - auth_code = self._auth_code - state = self._state - # Clear the stored values so the next auth flow gets fresh ones - self._auth_code = None - self._state = None - return auth_code, state - - -async def run_authorization_code_client(server_url: str) -> None: - """ - Run the conformance test client with authorization code flow. - - This function: - 1. Connects to the MCP server with OAuth authorization code flow - 2. Initializes the session - 3. Lists available tools - 4. Calls a test tool - """ - logger.debug(f"Starting conformance auth client (authorization_code) for {server_url}") - - # Create callback handler that will automatically fetch auth codes - callback_handler = ConformanceOAuthCallbackHandler() - - # Create OAuth authentication handler - oauth_auth = OAuthClientProvider( - server_url=server_url, - client_metadata=OAuthClientMetadata( - client_name="conformance-auth-client", - redirect_uris=[AnyUrl("http://localhost:3000/callback")], - grant_types=["authorization_code", "refresh_token"], - response_types=["code"], - ), - storage=InMemoryTokenStorage(), - redirect_handler=callback_handler.handle_redirect, - callback_handler=callback_handler.handle_callback, - ) - - await _run_session(server_url, oauth_auth) - - -async def run_client_credentials_jwt_client(server_url: str) -> None: - """ - Run the conformance test client with client credentials flow using private_key_jwt (SEP-1046). - - This function: - 1. Connects to the MCP server with OAuth client_credentials grant - 2. Uses private_key_jwt authentication with credentials from MCP_CONFORMANCE_CONTEXT - 3. Initializes the session - 4. Lists available tools - 5. Calls a test tool - """ - logger.debug(f"Starting conformance auth client (client_credentials_jwt) for {server_url}") - - # Load credentials from environment - context = get_conformance_context() - client_id = context.get("client_id") - private_key_pem = context.get("private_key_pem") - signing_algorithm = context.get("signing_algorithm", "ES256") - - if not client_id: - raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_id'") - if not private_key_pem: - raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'private_key_pem'") - - # Create JWT parameters for SDK-signed assertions - jwt_params = SignedJWTParameters( - issuer=client_id, - subject=client_id, - signing_algorithm=signing_algorithm, - signing_key=private_key_pem, - ) - - # Create OAuth provider for client_credentials with private_key_jwt - oauth_auth = PrivateKeyJWTOAuthProvider( - server_url=server_url, - storage=InMemoryTokenStorage(), - client_id=client_id, - assertion_provider=jwt_params.create_assertion_provider(), - ) - - await _run_session(server_url, oauth_auth) - - -async def run_client_credentials_basic_client(server_url: str) -> None: - """ - Run the conformance test client with client credentials flow using client_secret_basic. - - This function: - 1. Connects to the MCP server with OAuth client_credentials grant - 2. Uses client_secret_basic authentication with credentials from MCP_CONFORMANCE_CONTEXT - 3. Initializes the session - 4. Lists available tools - 5. Calls a test tool - """ - logger.debug(f"Starting conformance auth client (client_credentials_basic) for {server_url}") - - # Load credentials from environment - context = get_conformance_context() - client_id = context.get("client_id") - client_secret = context.get("client_secret") - - if not client_id: - raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_id'") - if not client_secret: - raise RuntimeError("MCP_CONFORMANCE_CONTEXT missing 'client_secret'") - - # Create OAuth provider for client_credentials with client_secret_basic - oauth_auth = ClientCredentialsOAuthProvider( - server_url=server_url, - storage=InMemoryTokenStorage(), - client_id=client_id, - client_secret=client_secret, - token_endpoint_auth_method="client_secret_basic", - ) - - await _run_session(server_url, oauth_auth) - - -async def _run_session(server_url: str, oauth_auth: OAuthClientProvider) -> None: - """Common session logic for all OAuth flows.""" - # Connect using streamable HTTP transport with OAuth - async with streamablehttp_client( - url=server_url, - auth=oauth_auth, - timeout=timedelta(seconds=30), - sse_read_timeout=timedelta(seconds=60), - ) as (read_stream, write_stream, _): - async with ClientSession(read_stream, write_stream) as session: - # Initialize the session - await session.initialize() - logger.debug("Successfully connected and initialized MCP session") - - # List tools - tools_result = await session.list_tools() - logger.debug(f"Listed tools: {[t.name for t in tools_result.tools]}") - - # Call test tool (expected by conformance tests) - try: - result = await session.call_tool("test-tool", {}) - logger.debug(f"Called test-tool, result: {result}") - except Exception as e: - logger.debug(f"Tool call result/error: {e}") - - logger.debug("Connection closed successfully") - - -def main() -> None: - """Main entry point for the conformance auth client.""" - if len(sys.argv) != 3: - print(f"Usage: {sys.argv[0]} ", file=sys.stderr) - print("", file=sys.stderr) - print("Scenarios:", file=sys.stderr) - print(" auth/* - Authorization code flow (default)", file=sys.stderr) - print(" auth/client-credentials-jwt - Client credentials with JWT auth (SEP-1046)", file=sys.stderr) - print(" auth/client-credentials-basic - Client credentials with client_secret_basic", file=sys.stderr) - sys.exit(1) - - scenario = sys.argv[1] - server_url = sys.argv[2] - - try: - if scenario == "auth/client-credentials-jwt": - asyncio.run(run_client_credentials_jwt_client(server_url)) - elif scenario == "auth/client-credentials-basic": - asyncio.run(run_client_credentials_basic_client(server_url)) - else: - # Default to authorization code flow for all other auth/* scenarios - asyncio.run(run_authorization_code_client(server_url)) - except Exception: - logger.exception("Client failed") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__main__.py b/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__main__.py deleted file mode 100644 index 1b8f8acb09..0000000000 --- a/examples/clients/conformance-auth-client/mcp_conformance_auth_client/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Allow running the module with python -m.""" - -from . import main - -if __name__ == "__main__": - main() diff --git a/examples/clients/conformance-auth-client/pyproject.toml b/examples/clients/conformance-auth-client/pyproject.toml deleted file mode 100644 index 3d03b4d4a1..0000000000 --- a/examples/clients/conformance-auth-client/pyproject.toml +++ /dev/null @@ -1,43 +0,0 @@ -[project] -name = "mcp-conformance-auth-client" -version = "0.1.0" -description = "OAuth conformance test client for MCP" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic" }] -keywords = ["mcp", "oauth", "client", "auth", "conformance", "testing"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = ["mcp", "httpx>=0.28.1"] - -[project.scripts] -mcp-conformance-auth-client = "mcp_conformance_auth_client:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_conformance_auth_client"] - -[tool.pyright] -include = ["mcp_conformance_auth_client"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 120 -target-version = "py310" - -[dependency-groups] -dev = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/clients/simple-auth-client/README.md b/examples/clients/simple-auth-client/README.md index 3e92f29479..708c0371b8 100644 --- a/examples/clients/simple-auth-client/README.md +++ b/examples/clients/simple-auth-client/README.md @@ -12,29 +12,48 @@ A demonstration of how to use the MCP Python SDK with OAuth authentication over ```bash cd examples/clients/simple-auth-client -uv sync --reinstall +uv sync --reinstall ``` ## Usage ### 1. Start an MCP server with OAuth support +The simple-auth server example provides three server configurations. See [examples/servers/simple-auth/README.md](../../servers/simple-auth/README.md) for full details. + +#### Option A: New Architecture (Recommended) + +Separate Authorization Server and Resource Server: + +```bash +# Terminal 1: Start Authorization Server on port 9000 +cd examples/servers/simple-auth +uv run mcp-simple-auth-as --port=9000 + +# Terminal 2: Start Resource Server on port 8001 +cd examples/servers/simple-auth +uv run mcp-simple-auth-rs --port=8001 --auth-server=http://localhost:9000 --transport=streamable-http +``` + +#### Option B: Legacy Server (Backwards Compatibility) + ```bash -# Example with mcp-simple-auth -cd path/to/mcp-simple-auth -uv run mcp-simple-auth --transport streamable-http --port 3001 +# Single server that acts as both AS and RS (port 8000) +cd examples/servers/simple-auth +uv run mcp-simple-auth-legacy --port=8000 --transport=streamable-http ``` ### 2. Run the client ```bash -uv run mcp-simple-auth-client +# Connect to Resource Server (new architecture, default port 8001) +MCP_SERVER_PORT=8001 uv run mcp-simple-auth-client -# Or with custom server URL -MCP_SERVER_PORT=3001 uv run mcp-simple-auth-client +# Connect to Legacy Server (port 8000) +uv run mcp-simple-auth-client # Use SSE transport -MCP_TRANSPORT_TYPE=sse uv run mcp-simple-auth-client +MCP_SERVER_PORT=8001 MCP_TRANSPORT_TYPE=sse uv run mcp-simple-auth-client ``` ### 3. Complete OAuth flow @@ -42,33 +61,38 @@ MCP_TRANSPORT_TYPE=sse uv run mcp-simple-auth-client The client will open your browser for authentication. After completing OAuth, you can use commands: - `list` - List available tools -- `call [args]` - Call a tool with optional JSON arguments +- `call [args]` - Call a tool with optional JSON arguments - `quit` - Exit ## Example ```markdown -🔐 Simple MCP Auth Client -Connecting to: http://localhost:3001 +🚀 Simple MCP Auth Client +Connecting to: http://localhost:8001/mcp +Transport type: streamable-http -Please visit the following URL to authorize the application: -http://localhost:3001/authorize?response_type=code&client_id=... +🔗 Attempting to connect to http://localhost:8001/mcp... +📡 Opening StreamableHTTP transport connection with auth... +Opening browser for authorization: http://localhost:9000/authorize?... -✅ Connected to MCP server at http://localhost:3001 +✅ Connected to MCP server at http://localhost:8001/mcp mcp> list 📋 Available tools: -1. echo - Echo back the input text +1. get_time + Description: Get the current server time. -mcp> call echo {"text": "Hello, world!"} -🔧 Tool 'echo' result: -Hello, world! +mcp> call get_time +🔧 Tool 'get_time' result: +{"current_time": "2024-01-15T10:30:00", "timezone": "UTC", ...} mcp> quit -👋 Goodbye! ``` ## Configuration -- `MCP_SERVER_PORT` - Server URL (default: 8000) -- `MCP_TRANSPORT_TYPE` - Transport type: `streamable-http` (default) or `sse` +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `MCP_SERVER_PORT` | Port number of the MCP server | `8000` | +| `MCP_TRANSPORT_TYPE` | Transport type: `streamable-http` or `sse` | `streamable-http` | +| `MCP_CLIENT_METADATA_URL` | Optional URL for client metadata (CIMD) | None | diff --git a/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py b/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py index a88c4ea6b6..684222dec1 100644 --- a/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py +++ b/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py @@ -1,26 +1,30 @@ #!/usr/bin/env python3 -""" -Simple MCP client example with OAuth authentication support. +"""Simple MCP client example with OAuth authentication support. This client connects to an MCP server using streamable HTTP transport with OAuth. """ +from __future__ import annotations as _annotations + import asyncio import os +import socketserver import threading import time import webbrowser from http.server import BaseHTTPRequestHandler, HTTPServer -from typing import Any +from typing import Any, Callable from urllib.parse import parse_qs, urlparse import httpx +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from mcp.client.auth import OAuthClientProvider, TokenStorage from mcp.client.session import ClientSession from mcp.client.sse import sse_client from mcp.client.streamable_http import streamable_http_client from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata, OAuthToken +from mcp.shared.message import SessionMessage class InMemoryTokenStorage(TokenStorage): @@ -46,7 +50,13 @@ async def set_client_info(self, client_info: OAuthClientInformationFull) -> None class CallbackHandler(BaseHTTPRequestHandler): """Simple HTTP handler to capture OAuth callback.""" - def __init__(self, request, client_address, server, callback_data): + def __init__( + self, + request: Any, + client_address: tuple[str, int], + server: socketserver.BaseServer, + callback_data: dict[str, Any], + ): """Initialize with callback data storage.""" self.callback_data = callback_data super().__init__(request, client_address, server) @@ -91,15 +101,14 @@ def do_GET(self): self.send_response(404) self.end_headers() - def log_message(self, format, *args): + def log_message(self, format: str, *args: Any): """Suppress default logging.""" - pass class CallbackServer: """Simple server to handle OAuth callbacks.""" - def __init__(self, port=3000): + def __init__(self, port: int = 3000): self.port = port self.server = None self.thread = None @@ -110,7 +119,12 @@ def _create_handler_with_data(self): callback_data = self.callback_data class DataCallbackHandler(CallbackHandler): - def __init__(self, request, client_address, server): + def __init__( + self, + request: BaseHTTPRequestHandler, + client_address: tuple[str, int], + server: socketserver.BaseServer, + ): super().__init__(request, client_address, server, callback_data) return DataCallbackHandler @@ -131,7 +145,7 @@ def stop(self): if self.thread: self.thread.join(timeout=1) - def wait_for_callback(self, timeout=300): + def wait_for_callback(self, timeout: int = 300): """Wait for OAuth callback with timeout.""" start_time = time.time() while time.time() - start_time < timeout: @@ -207,7 +221,7 @@ async def _default_redirect_handler(authorization_url: str) -> None: async with sse_client( url=self.server_url, auth=oauth_auth, - timeout=60, + timeout=60.0, ) as (read_stream, write_stream): await self._run_session(read_stream, write_stream, None) else: @@ -225,7 +239,12 @@ async def _default_redirect_handler(authorization_url: str) -> None: traceback.print_exc() - async def _run_session(self, read_stream, write_stream, get_session_id): + async def _run_session( + self, + read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], + write_stream: MemoryObjectSendStream[SessionMessage], + get_session_id: Callable[[], str | None] | None = None, + ): """Run the MCP session with the given streams.""" print("🤝 Initializing MCP session...") async with ClientSession(read_stream, write_stream) as session: @@ -314,7 +333,7 @@ async def interactive_loop(self): continue # Parse arguments (simple JSON-like format) - arguments = {} + arguments: dict[str, Any] = {} if len(parts) > 2: import json diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py index 78a81a4d9f..72b1a6f204 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import json import logging @@ -93,7 +95,7 @@ async def initialize(self) -> None: await self.cleanup() raise - async def list_tools(self) -> list[Any]: + async def list_tools(self) -> list[Tool]: """List available tools from the server. Returns: @@ -106,11 +108,11 @@ async def list_tools(self) -> list[Any]: raise RuntimeError(f"Server {self.name} not initialized") tools_response = await self.session.list_tools() - tools = [] + tools: list[Tool] = [] for item in tools_response: - if isinstance(item, tuple) and item[0] == "tools": - tools.extend(Tool(tool.name, tool.description, tool.inputSchema, tool.title) for tool in item[1]) + if item[0] == "tools": + tools.extend(Tool(tool.name, tool.description, tool.input_schema, tool.title) for tool in item[1]) return tools @@ -189,7 +191,7 @@ def format_for_llm(self) -> str: Returns: A formatted string describing the tool. """ - args_desc = [] + args_desc: list[str] = [] if "properties" in self.input_schema: for param_name, param_info in self.input_schema["properties"].items(): arg_desc = f"- {param_name}: {param_info.get('description', 'No description')}" @@ -311,9 +313,9 @@ def _clean_json_string(json_string: str) -> str: result = await server.execute_tool(tool_call["tool"], tool_call["arguments"]) if isinstance(result, dict) and "progress" in result: - progress = result["progress"] - total = result["total"] - percentage = (progress / total) * 100 + progress = result["progress"] # type: ignore + total = result["total"] # type: ignore + percentage = (progress / total) * 100 # type: ignore logging.info(f"Progress: {progress}/{total} ({percentage:.1f}%)") return f"Tool execution result: {result}" @@ -338,7 +340,7 @@ async def start(self) -> None: await self.cleanup_servers() return - all_tools = [] + all_tools: list[Tool] = [] for server in self.servers: tools = await server.list_tools() all_tools.extend(tools) diff --git a/examples/clients/simple-task-client/mcp_simple_task_client/main.py b/examples/clients/simple-task-client/mcp_simple_task_client/main.py index 12691162ab..1e653d58e4 100644 --- a/examples/clients/simple-task-client/mcp_simple_task_client/main.py +++ b/examples/clients/simple-task-client/mcp_simple_task_client/main.py @@ -4,12 +4,12 @@ import click from mcp import ClientSession -from mcp.client.streamable_http import streamablehttp_client +from mcp.client.streamable_http import streamable_http_client from mcp.types import CallToolResult, TextContent async def run(url: str) -> None: - async with streamablehttp_client(url) as (read, write, _): + async with streamable_http_client(url) as (read, write, _): async with ClientSession(read, write) as session: await session.initialize() @@ -25,15 +25,16 @@ async def run(url: str) -> None: arguments={}, ttl=60000, ) - task_id = result.task.taskId + task_id = result.task.task_id print(f"Task created: {task_id}") + status = None # Poll until done (respects server's pollInterval hint) async for status in session.experimental.poll_task(task_id): - print(f" Status: {status.status} - {status.statusMessage or ''}") + print(f" Status: {status.status} - {status.status_message or ''}") # Check final status - if status.status != "completed": + if status and status.status != "completed": print(f"Task ended with status: {status.status}") return diff --git a/examples/clients/simple-task-interactive-client/README.md b/examples/clients/simple-task-interactive-client/README.md index ac73d2bc12..3397d3b5d7 100644 --- a/examples/clients/simple-task-interactive-client/README.md +++ b/examples/clients/simple-task-interactive-client/README.md @@ -49,7 +49,7 @@ async def sampling_callback(context, params) -> CreateMessageResult: ```python # Call a tool as a task (returns immediately with task reference) result = await session.experimental.call_tool_as_task("tool_name", {"arg": "value"}) -task_id = result.task.taskId +task_id = result.task.task_id # Get result - this delivers elicitation/sampling requests and blocks until complete final = await session.experimental.get_task_result(task_id, CallToolResult) diff --git a/examples/clients/simple-task-interactive-client/mcp_simple_task_interactive_client/main.py b/examples/clients/simple-task-interactive-client/mcp_simple_task_interactive_client/main.py index a8a47dc57c..5f34eb9491 100644 --- a/examples/clients/simple-task-interactive-client/mcp_simple_task_interactive_client/main.py +++ b/examples/clients/simple-task-interactive-client/mcp_simple_task_interactive_client/main.py @@ -11,7 +11,7 @@ import click from mcp import ClientSession -from mcp.client.streamable_http import streamablehttp_client +from mcp.client.streamable_http import streamable_http_client from mcp.shared.context import RequestContext from mcp.types import ( CallToolResult, @@ -73,7 +73,7 @@ def get_text(result: CallToolResult) -> str: async def run(url: str) -> None: - async with streamablehttp_client(url) as (read, write, _): + async with streamable_http_client(url) as (read, write, _): async with ClientSession( read, write, @@ -91,7 +91,7 @@ async def run(url: str) -> None: print("Calling confirm_delete tool...") elicit_task = await session.experimental.call_tool_as_task("confirm_delete", {"filename": "important.txt"}) - elicit_task_id = elicit_task.task.taskId + elicit_task_id = elicit_task.task.task_id print(f"Task created: {elicit_task_id}") # Poll until terminal, calling tasks/result on input_required @@ -112,7 +112,7 @@ async def run(url: str) -> None: print("Calling write_haiku tool...") sampling_task = await session.experimental.call_tool_as_task("write_haiku", {"topic": "autumn leaves"}) - sampling_task_id = sampling_task.task.taskId + sampling_task_id = sampling_task.task.task_id print(f"Task created: {sampling_task_id}") # Poll until terminal, calling tasks/result on input_required diff --git a/examples/clients/sse-polling-client/mcp_sse_polling_client/main.py b/examples/clients/sse-polling-client/mcp_sse_polling_client/main.py index 1defd8eaa4..533fce3789 100644 --- a/examples/clients/sse-polling-client/mcp_sse_polling_client/main.py +++ b/examples/clients/sse-polling-client/mcp_sse_polling_client/main.py @@ -1,5 +1,4 @@ -""" -SSE Polling Demo Client +"""SSE Polling Demo Client Demonstrates the client-side auto-reconnect for SSE polling pattern. @@ -20,7 +19,7 @@ import click from mcp import ClientSession -from mcp.client.streamable_http import streamablehttp_client +from mcp.client.streamable_http import streamable_http_client logger = logging.getLogger(__name__) @@ -34,7 +33,7 @@ async def run_demo(url: str, items: int, checkpoint_every: int) -> None: print(f"Processing {items} items with checkpoints every {checkpoint_every}") print(f"{'=' * 60}\n") - async with streamablehttp_client(url) as (read_stream, write_stream, _): + async with streamable_http_client(url) as (read_stream, write_stream, _): async with ClientSession(read_stream, write_stream) as session: # Initialize the connection print("Initializing connection...") diff --git a/examples/fastmcp/complex_inputs.py b/examples/fastmcp/complex_inputs.py index e859165a97..b55d4f725b 100644 --- a/examples/fastmcp/complex_inputs.py +++ b/examples/fastmcp/complex_inputs.py @@ -1,5 +1,4 @@ -""" -FastMCP Complex inputs Example +"""FastMCP Complex inputs Example Demonstrates validation via pydantic with complex models. """ diff --git a/examples/fastmcp/desktop.py b/examples/fastmcp/desktop.py index add7f515bc..8ea62c70f8 100644 --- a/examples/fastmcp/desktop.py +++ b/examples/fastmcp/desktop.py @@ -1,5 +1,4 @@ -""" -FastMCP Desktop Example +"""FastMCP Desktop Example A simple example that exposes the desktop directory as a resource. """ diff --git a/examples/fastmcp/direct_call_tool_result_return.py b/examples/fastmcp/direct_call_tool_result_return.py index a441769b2a..2218af49b4 100644 --- a/examples/fastmcp/direct_call_tool_result_return.py +++ b/examples/fastmcp/direct_call_tool_result_return.py @@ -1,6 +1,4 @@ -""" -FastMCP Echo Server with direct CallToolResult return -""" +"""FastMCP Echo Server with direct CallToolResult return""" from typing import Annotated @@ -20,5 +18,5 @@ class EchoResponse(BaseModel): def echo(text: str) -> Annotated[CallToolResult, EchoResponse]: """Echo the input text with structure and metadata""" return CallToolResult( - content=[TextContent(type="text", text=text)], structuredContent={"text": text}, _meta={"some": "metadata"} + content=[TextContent(type="text", text=text)], structured_content={"text": text}, _meta={"some": "metadata"} ) diff --git a/examples/fastmcp/echo.py b/examples/fastmcp/echo.py index 7bdbcdce6b..9f01e60ca2 100644 --- a/examples/fastmcp/echo.py +++ b/examples/fastmcp/echo.py @@ -1,6 +1,4 @@ -""" -FastMCP Echo Server -""" +"""FastMCP Echo Server""" from mcp.server.fastmcp import FastMCP diff --git a/examples/fastmcp/icons_demo.py b/examples/fastmcp/icons_demo.py index c6cf48acd8..47601c0356 100644 --- a/examples/fastmcp/icons_demo.py +++ b/examples/fastmcp/icons_demo.py @@ -1,5 +1,4 @@ -""" -FastMCP Icons Demo Server +"""FastMCP Icons Demo Server Demonstrates using icons with tools, resources, prompts, and implementation. """ @@ -14,7 +13,7 @@ icon_data = base64.standard_b64encode(icon_path.read_bytes()).decode() icon_data_uri = f"data:image/png;base64,{icon_data}" -icon_data = Icon(src=icon_data_uri, mimeType="image/png", sizes=["64x64"]) +icon_data = Icon(src=icon_data_uri, mime_type="image/png", sizes=["64x64"]) # Create server with icons in implementation mcp = FastMCP("Icons Demo Server", website_url="https://github.com/modelcontextprotocol/python-sdk", icons=[icon_data]) @@ -40,9 +39,9 @@ def prompt_with_icon(text: str) -> str: @mcp.tool( icons=[ - Icon(src=icon_data_uri, mimeType="image/png", sizes=["16x16"]), - Icon(src=icon_data_uri, mimeType="image/png", sizes=["32x32"]), - Icon(src=icon_data_uri, mimeType="image/png", sizes=["64x64"]), + Icon(src=icon_data_uri, mime_type="image/png", sizes=["16x16"]), + Icon(src=icon_data_uri, mime_type="image/png", sizes=["32x32"]), + Icon(src=icon_data_uri, mime_type="image/png", sizes=["64x64"]), ] ) def multi_icon_tool(action: str) -> str: diff --git a/examples/fastmcp/logging_and_progress.py b/examples/fastmcp/logging_and_progress.py index 91c2b806dd..016155233a 100644 --- a/examples/fastmcp/logging_and_progress.py +++ b/examples/fastmcp/logging_and_progress.py @@ -1,6 +1,4 @@ -""" -FastMCP Echo Server that sends log messages and progress updates to the client -""" +"""FastMCP Echo Server that sends log messages and progress updates to the client""" import asyncio diff --git a/examples/fastmcp/memory.py b/examples/fastmcp/memory.py index 35094ec9c8..cc87ea930c 100644 --- a/examples/fastmcp/memory.py +++ b/examples/fastmcp/memory.py @@ -4,8 +4,7 @@ # uv pip install 'pydantic-ai-slim[openai]' asyncpg numpy pgvector -""" -Recursive memory system inspired by the human brain's clustering of memories. +"""Recursive memory system inspired by the human brain's clustering of memories. Uses OpenAI's 'text-embedding-3-small' model and pgvector for efficient similarity search. """ @@ -37,15 +36,7 @@ T = TypeVar("T") -mcp = FastMCP( - "memory", - dependencies=[ - "pydantic-ai-slim[openai]", - "asyncpg", - "numpy", - "pgvector", - ], -) +mcp = FastMCP("memory") DB_DSN = "postgresql://postgres:postgres@localhost:54320/memory_db" # reset memory with rm ~/.fastmcp/{USER}/memory/* diff --git a/examples/fastmcp/parameter_descriptions.py b/examples/fastmcp/parameter_descriptions.py index dc56e91821..307ae5cedd 100644 --- a/examples/fastmcp/parameter_descriptions.py +++ b/examples/fastmcp/parameter_descriptions.py @@ -1,6 +1,4 @@ -""" -FastMCP Example showing parameter descriptions -""" +"""FastMCP Example showing parameter descriptions""" from pydantic import Field diff --git a/examples/fastmcp/screenshot.py b/examples/fastmcp/screenshot.py index 694b49f2fa..2c73c9847c 100644 --- a/examples/fastmcp/screenshot.py +++ b/examples/fastmcp/screenshot.py @@ -1,5 +1,4 @@ -""" -FastMCP Screenshot Example +"""FastMCP Screenshot Example Give Claude a tool to capture and view screenshots. """ @@ -10,13 +9,12 @@ from mcp.server.fastmcp.utilities.types import Image # Create server -mcp = FastMCP("Screenshot Demo", dependencies=["pyautogui", "Pillow"]) +mcp = FastMCP("Screenshot Demo") @mcp.tool() def take_screenshot() -> Image: - """ - Take a screenshot of the user's screen and return it as an image. Use + """Take a screenshot of the user's screen and return it as an image. Use this tool anytime the user wants you to look at something they're doing. """ import pyautogui diff --git a/examples/fastmcp/simple_echo.py b/examples/fastmcp/simple_echo.py index c26152646f..d0fa597004 100644 --- a/examples/fastmcp/simple_echo.py +++ b/examples/fastmcp/simple_echo.py @@ -1,6 +1,4 @@ -""" -FastMCP Echo Server -""" +"""FastMCP Echo Server""" from mcp.server.fastmcp import FastMCP diff --git a/examples/fastmcp/text_me.py b/examples/fastmcp/text_me.py index 2434dcddd9..8a8dea351a 100644 --- a/examples/fastmcp/text_me.py +++ b/examples/fastmcp/text_me.py @@ -2,8 +2,7 @@ # dependencies = [] # /// -""" -FastMCP Text Me Server +"""FastMCP Text Me Server -------------------------------- This defines a simple FastMCP server that sends a text message to a phone number via https://surgemsg.com/. diff --git a/examples/fastmcp/unicode_example.py b/examples/fastmcp/unicode_example.py index bb487f6180..a598397845 100644 --- a/examples/fastmcp/unicode_example.py +++ b/examples/fastmcp/unicode_example.py @@ -1,5 +1,4 @@ -""" -Example FastMCP server that uses Unicode characters in various places to help test +"""Example FastMCP server that uses Unicode characters in various places to help test Unicode handling in tools and inspectors. """ @@ -10,8 +9,7 @@ @mcp.tool(description="🌟 A tool that uses various Unicode characters in its description: á é í ó ú ñ 漢字 🎉") def hello_unicode(name: str = "世界", greeting: str = "¡Hola") -> str: - """ - A simple tool that demonstrates Unicode handling in: + """A simple tool that demonstrates Unicode handling in: - Tool description (emojis, accents, CJK characters) - Parameter defaults (CJK characters) - Return values (Spanish punctuation, emojis) diff --git a/examples/fastmcp/weather_structured.py b/examples/fastmcp/weather_structured.py index 20cbf79578..af4e435dfd 100644 --- a/examples/fastmcp/weather_structured.py +++ b/examples/fastmcp/weather_structured.py @@ -1,5 +1,4 @@ -""" -FastMCP Weather Example with Structured Output +"""FastMCP Weather Example with Structured Output Demonstrates how to use structured output with tools to return well-typed, validated data that clients can easily process. @@ -14,8 +13,8 @@ from pydantic import BaseModel, Field +from mcp.client import Client from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import create_connected_server_and_client_session as client_session # Create server mcp = FastMCP("Weather Service") @@ -157,36 +156,36 @@ async def test() -> None: print("Testing Weather Service Tools (via MCP protocol)\n") print("=" * 80) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Test get_weather result = await client.call_tool("get_weather", {"city": "London"}) print("\nWeather in London:") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Test get_weather_summary result = await client.call_tool("get_weather_summary", {"city": "Paris"}) print("\nWeather summary for Paris:") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Test get_weather_metrics result = await client.call_tool("get_weather_metrics", {"cities": ["Tokyo", "Sydney", "Mumbai"]}) print("\nWeather metrics:") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Test get_weather_alerts result = await client.call_tool("get_weather_alerts", {"region": "California"}) print("\nWeather alerts for California:") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Test get_temperature result = await client.call_tool("get_temperature", {"city": "Berlin", "unit": "fahrenheit"}) print("\nTemperature in Berlin:") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Test get_weather_stats result = await client.call_tool("get_weather_stats", {"city": "Seattle", "days": 30}) print("\nWeather stats for Seattle (30 days):") - print(json.dumps(result.structuredContent, indent=2)) + print(json.dumps(result.structured_content, indent=2)) # Also show the text content for comparison print("\nText content for last result:") @@ -204,11 +203,11 @@ async def print_schemas() -> None: print(f"\nTool: {tool.name}") print(f"Description: {tool.description}") print("Input Schema:") - print(json.dumps(tool.inputSchema, indent=2)) + print(json.dumps(tool.input_schema, indent=2)) - if tool.outputSchema: + if tool.output_schema: print("Output Schema:") - print(json.dumps(tool.outputSchema, indent=2)) + print(json.dumps(tool.output_schema, indent=2)) else: print("Output Schema: None (returns unstructured content)") diff --git a/examples/servers/everything-server/mcp_everything_server/server.py b/examples/servers/everything-server/mcp_everything_server/server.py index 1f1ee7ecc4..db6b09f3fb 100644 --- a/examples/servers/everything-server/mcp_everything_server/server.py +++ b/examples/servers/everything-server/mcp_everything_server/server.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -MCP Everything Server - Conformance Test Server +"""MCP Everything Server - Conformance Test Server Server implementing all MCP features for conformance testing based on Conformance Server Specification. """ @@ -29,7 +28,7 @@ TextContent, TextResourceContents, ) -from pydantic import AnyUrl, BaseModel, Field +from pydantic import BaseModel, Field logger = logging.getLogger(__name__) @@ -83,8 +82,6 @@ async def replay_events_after(self, last_event_id: EventId, send_callback: Event mcp = FastMCP( name="mcp-conformance-test-server", - event_store=event_store, - retry_interval=100, # 100ms retry interval for SSE polling ) @@ -98,13 +95,13 @@ def test_simple_text() -> str: @mcp.tool() def test_image_content() -> list[ImageContent]: """Tests image content response""" - return [ImageContent(type="image", data=TEST_IMAGE_BASE64, mimeType="image/png")] + return [ImageContent(type="image", data=TEST_IMAGE_BASE64, mime_type="image/png")] @mcp.tool() def test_audio_content() -> list[AudioContent]: """Tests audio content response""" - return [AudioContent(type="audio", data=TEST_AUDIO_BASE64, mimeType="audio/wav")] + return [AudioContent(type="audio", data=TEST_AUDIO_BASE64, mime_type="audio/wav")] @mcp.tool() @@ -114,8 +111,8 @@ def test_embedded_resource() -> list[EmbeddedResource]: EmbeddedResource( type="resource", resource=TextResourceContents( - uri=AnyUrl("test://embedded-resource"), - mimeType="text/plain", + uri="test://embedded-resource", + mime_type="text/plain", text="This is an embedded resource content.", ), ) @@ -127,12 +124,12 @@ def test_multiple_content_types() -> list[TextContent | ImageContent | EmbeddedR """Tests response with multiple content types (text, image, resource)""" return [ TextContent(type="text", text="Multiple content types test:"), - ImageContent(type="image", data=TEST_IMAGE_BASE64, mimeType="image/png"), + ImageContent(type="image", data=TEST_IMAGE_BASE64, mime_type="image/png"), EmbeddedResource( type="resource", resource=TextResourceContents( - uri=AnyUrl("test://mixed-content-resource"), - mimeType="application/json", + uri="test://mixed-content-resource", + mime_type="application/json", text='{"test": "data", "value": 123}', ), ), @@ -164,7 +161,7 @@ async def test_tool_with_progress(ctx: Context[ServerSession, None]) -> str: await ctx.report_progress(progress=100, total=100, message="Completed step 100 of 100") # Return progress token as string - progress_token = ctx.request_context.meta.progressToken if ctx.request_context and ctx.request_context.meta else 0 + progress_token = ctx.request_context.meta.progress_token if ctx.request_context and ctx.request_context.meta else 0 return str(progress_token) @@ -372,8 +369,8 @@ def test_prompt_with_embedded_resource(resourceUri: str) -> list[UserMessage]: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=AnyUrl(resourceUri), - mimeType="text/plain", + uri=resourceUri, + mime_type="text/plain", text="Embedded resource content for testing.", ), ), @@ -386,7 +383,7 @@ def test_prompt_with_embedded_resource(resourceUri: str) -> list[UserMessage]: def test_prompt_with_image() -> list[UserMessage]: """A prompt that includes image content""" return [ - UserMessage(role="user", content=ImageContent(type="image", data=TEST_IMAGE_BASE64, mimeType="image/png")), + UserMessage(role="user", content=ImageContent(type="image", data=TEST_IMAGE_BASE64, mime_type="image/png")), UserMessage(role="user", content=TextContent(type="text", text="Please analyze the image above.")), ] @@ -402,13 +399,13 @@ async def handle_set_logging_level(level: str) -> None: # For conformance testing, we just acknowledge the request -async def handle_subscribe(uri: AnyUrl) -> None: +async def handle_subscribe(uri: str) -> None: """Handle resource subscription""" resource_subscriptions.add(str(uri)) logger.info(f"Subscribed to resource: {uri}") -async def handle_unsubscribe(uri: AnyUrl) -> None: +async def handle_unsubscribe(uri: str) -> None: """Handle resource unsubscription""" resource_subscriptions.discard(str(uri)) logger.info(f"Unsubscribed from resource: {uri}") @@ -427,7 +424,7 @@ async def _handle_completion( """Handle completion requests""" # Basic completion support - returns empty array for conformance # Real implementations would provide contextual suggestions - return Completion(values=[], total=0, hasMore=False) + return Completion(values=[], total=0, has_more=False) # CLI @@ -448,8 +445,12 @@ def main(port: int, log_level: str) -> int: logger.info(f"Starting MCP Everything Server on port {port}") logger.info(f"Endpoint will be: http://localhost:{port}/mcp") - mcp.settings.port = port - mcp.run(transport="streamable-http") + mcp.run( + transport="streamable-http", + port=port, + event_store=event_store, + retry_interval=100, # 100ms retry interval for SSE polling + ) return 0 diff --git a/examples/servers/simple-auth/README.md b/examples/servers/simple-auth/README.md index b80e98a047..d4a10c43b0 100644 --- a/examples/servers/simple-auth/README.md +++ b/examples/servers/simple-auth/README.md @@ -31,10 +31,10 @@ uv run mcp-simple-auth-as --port=9000 cd examples/servers/simple-auth # Start Resource Server on port 8001, connected to Authorization Server -uv run mcp-simple-auth-rs --port=8001 --auth-server=http://localhost:9000 --transport=streamable-http +uv run mcp-simple-auth-rs --port=8001 --auth-server=http://localhost:9000 --transport=streamable-http # With RFC 8707 strict resource validation (recommended for production) -uv run mcp-simple-auth-rs --port=8001 --auth-server=http://localhost:9000 --transport=streamable-http --oauth-strict +uv run mcp-simple-auth-rs --port=8001 --auth-server=http://localhost:9000 --transport=streamable-http --oauth-strict ``` @@ -84,8 +84,9 @@ For backwards compatibility with older MCP implementations, a legacy server is p ### Running the Legacy Server ```bash -# Start legacy authorization server on port 8002 -uv run mcp-simple-auth-legacy --port=8002 +# Start legacy server on port 8000 (the default) +cd examples/servers/simple-auth +uv run mcp-simple-auth-legacy --port=8000 --transport=streamable-http ``` **Differences from the new architecture:** @@ -101,7 +102,7 @@ uv run mcp-simple-auth-legacy --port=8002 ```bash # Test with client (will automatically fall back to legacy discovery) cd examples/clients/simple-auth-client -MCP_SERVER_PORT=8002 MCP_TRANSPORT_TYPE=streamable-http uv run mcp-simple-auth-client +MCP_SERVER_PORT=8000 MCP_TRANSPORT_TYPE=streamable-http uv run mcp-simple-auth-client ``` The client will: diff --git a/examples/servers/simple-auth/mcp_simple_auth/auth_server.py b/examples/servers/simple-auth/mcp_simple_auth/auth_server.py index 80a2e8b8a3..9d13fffe42 100644 --- a/examples/servers/simple-auth/mcp_simple_auth/auth_server.py +++ b/examples/servers/simple-auth/mcp_simple_auth/auth_server.py @@ -1,5 +1,4 @@ -""" -Authorization Server for MCP Split Demo. +"""Authorization Server for MCP Split Demo. This server handles OAuth flows, client registration, and token issuance. Can be replaced with enterprise authorization servers like Auth0, Entra ID, etc. @@ -41,8 +40,7 @@ class AuthServerSettings(BaseModel): class SimpleAuthProvider(SimpleOAuthProvider): - """ - Authorization Server provider with simple demo authentication. + """Authorization Server provider with simple demo authentication. This provider: 1. Issues MCP tokens after simple credential authentication @@ -98,8 +96,7 @@ async def login_callback_handler(request: Request) -> Response: # Add token introspection endpoint (RFC 7662) for Resource Servers async def introspect_handler(request: Request) -> Response: - """ - Token introspection endpoint for Resource Servers. + """Token introspection endpoint for Resource Servers. Resource Servers call this endpoint to validate tokens without needing direct access to token storage. @@ -157,8 +154,7 @@ async def run_server(server_settings: AuthServerSettings, auth_settings: SimpleA @click.command() @click.option("--port", default=9000, help="Port to listen on") def main(port: int) -> int: - """ - Run the MCP Authorization Server. + """Run the MCP Authorization Server. This server handles OAuth flows and can be used by multiple Resource Servers. diff --git a/examples/servers/simple-auth/mcp_simple_auth/legacy_as_server.py b/examples/servers/simple-auth/mcp_simple_auth/legacy_as_server.py index b0455c3e89..ac9dfcb571 100644 --- a/examples/servers/simple-auth/mcp_simple_auth/legacy_as_server.py +++ b/examples/servers/simple-auth/mcp_simple_auth/legacy_as_server.py @@ -1,5 +1,4 @@ -""" -Legacy Combined Authorization Server + Resource Server for MCP. +"""Legacy Combined Authorization Server + Resource Server for MCP. This server implements the old spec where MCP servers could act as both AS and RS. Used for backwards compatibility testing with the new split AS/RS architecture. @@ -66,11 +65,11 @@ def create_simple_mcp_server(server_settings: ServerSettings, auth_settings: Sim name="Simple Auth MCP Server", instructions="A simple MCP server with simple credential authentication", auth_server_provider=oauth_provider, - host=server_settings.host, - port=server_settings.port, debug=True, auth=mcp_auth_settings, ) + # Store server settings for later use in run() + app._server_settings = server_settings # type: ignore[attr-defined] @app.custom_route("/login", methods=["GET"]) async def login_page_handler(request: Request) -> Response: @@ -87,8 +86,7 @@ async def login_callback_handler(request: Request) -> Response: @app.tool() async def get_time() -> dict[str, Any]: - """ - Get the current server time. + """Get the current server time. This tool demonstrates that system information can be protected by OAuth authentication. User must be authenticated to access it. @@ -131,7 +129,7 @@ def main(port: int, transport: Literal["sse", "streamable-http"]) -> int: mcp_server = create_simple_mcp_server(server_settings, auth_settings) logger.info(f"🚀 MCP Legacy Server running on {server_url}") - mcp_server.run(transport=transport) + mcp_server.run(transport=transport, host=host, port=port) return 0 diff --git a/examples/servers/simple-auth/mcp_simple_auth/py.typed b/examples/servers/simple-auth/mcp_simple_auth/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/servers/simple-auth/mcp_simple_auth/server.py b/examples/servers/simple-auth/mcp_simple_auth/server.py index 5d88505708..28d2565429 100644 --- a/examples/servers/simple-auth/mcp_simple_auth/server.py +++ b/examples/servers/simple-auth/mcp_simple_auth/server.py @@ -1,5 +1,4 @@ -""" -MCP Resource Server with Token Introspection. +"""MCP Resource Server with Token Introspection. This server validates tokens via Authorization Server introspection and serves MCP resources. Demonstrates RFC 9728 Protected Resource Metadata for AS/RS separation. @@ -47,8 +46,7 @@ class ResourceServerSettings(BaseSettings): def create_resource_server(settings: ResourceServerSettings) -> FastMCP: - """ - Create MCP Resource Server with token introspection. + """Create MCP Resource Server with token introspection. This server: 1. Provides protected resource metadata (RFC 9728) @@ -66,8 +64,6 @@ def create_resource_server(settings: ResourceServerSettings) -> FastMCP: app = FastMCP( name="MCP Resource Server", instructions="Resource Server that validates tokens via Authorization Server introspection", - host=settings.host, - port=settings.port, debug=True, # Auth configuration for RS mode token_verifier=token_verifier, @@ -77,11 +73,12 @@ def create_resource_server(settings: ResourceServerSettings) -> FastMCP: resource_server_url=settings.server_url, ), ) + # Store settings for later use in run() + app._resource_server_settings = settings # type: ignore[attr-defined] @app.tool() async def get_time() -> dict[str, Any]: - """ - Get the current server time. + """Get the current server time. This tool demonstrates that system information can be protected by OAuth authentication. User must be authenticated to access it. @@ -114,8 +111,7 @@ async def get_time() -> dict[str, Any]: help="Enable RFC 8707 resource validation", ) def main(port: int, auth_server: str, transport: Literal["sse", "streamable-http"], oauth_strict: bool) -> int: - """ - Run the MCP Resource Server. + """Run the MCP Resource Server. This server: - Provides RFC 9728 Protected Resource Metadata @@ -153,7 +149,7 @@ def main(port: int, auth_server: str, transport: Literal["sse", "streamable-http logger.info(f"🔑 Using Authorization Server: {settings.auth_server_url}") # Run the server - this should block and keep running - mcp_server.run(transport=transport) + mcp_server.run(transport=transport, host=host, port=port) logger.info("Server stopped") return 0 except Exception: diff --git a/examples/servers/simple-auth/mcp_simple_auth/simple_auth_provider.py b/examples/servers/simple-auth/mcp_simple_auth/simple_auth_provider.py index e3a25d3e8c..e244e0fd94 100644 --- a/examples/servers/simple-auth/mcp_simple_auth/simple_auth_provider.py +++ b/examples/servers/simple-auth/mcp_simple_auth/simple_auth_provider.py @@ -1,5 +1,4 @@ -""" -Simple OAuth provider for MCP servers. +"""Simple OAuth provider for MCP servers. This module contains a basic OAuth implementation using hardcoded user credentials for demonstration purposes. No external authentication provider is required. @@ -47,8 +46,7 @@ class SimpleAuthSettings(BaseSettings): class SimpleOAuthProvider(OAuthAuthorizationServerProvider[AuthorizationCode, RefreshToken, AccessToken]): - """ - Simple OAuth provider for demo purposes. + """Simple OAuth provider for demo purposes. This provider handles the OAuth flow by: 1. Providing a simple login form for demo credentials diff --git a/examples/servers/simple-pagination/mcp_simple_pagination/server.py b/examples/servers/simple-pagination/mcp_simple_pagination/server.py index 360cbc3cff..74e9e3e82b 100644 --- a/examples/servers/simple-pagination/mcp_simple_pagination/server.py +++ b/examples/servers/simple-pagination/mcp_simple_pagination/server.py @@ -1,5 +1,4 @@ -""" -Simple MCP server demonstrating pagination for tools, resources, and prompts. +"""Simple MCP server demonstrating pagination for tools, resources, and prompts. This example shows how to use the paginated decorators to handle large lists of items that need to be split across multiple pages. @@ -11,7 +10,6 @@ import click import mcp.types as types from mcp.server.lowlevel import Server -from pydantic import AnyUrl from starlette.requests import Request # Sample data - in real scenarios, this might come from a database @@ -20,14 +18,14 @@ name=f"tool_{i}", title=f"Tool {i}", description=f"This is sample tool number {i}", - inputSchema={"type": "object", "properties": {"input": {"type": "string"}}}, + input_schema={"type": "object", "properties": {"input": {"type": "string"}}}, ) for i in range(1, 26) # 25 tools total ] SAMPLE_RESOURCES = [ types.Resource( - uri=AnyUrl(f"file:///path/to/resource_{i}.txt"), + uri=f"file:///path/to/resource_{i}.txt", name=f"resource_{i}", description=f"This is sample resource number {i}", ) @@ -72,7 +70,7 @@ async def list_tools_paginated(request: types.ListToolsRequest) -> types.ListToo start_idx = int(cursor) except (ValueError, TypeError): # Invalid cursor, return empty - return types.ListToolsResult(tools=[], nextCursor=None) + return types.ListToolsResult(tools=[], next_cursor=None) # Get the page of tools page_tools = SAMPLE_TOOLS[start_idx : start_idx + page_size] @@ -82,7 +80,7 @@ async def list_tools_paginated(request: types.ListToolsRequest) -> types.ListToo if start_idx + page_size < len(SAMPLE_TOOLS): next_cursor = str(start_idx + page_size) - return types.ListToolsResult(tools=page_tools, nextCursor=next_cursor) + return types.ListToolsResult(tools=page_tools, next_cursor=next_cursor) # Paginated list_resources - returns 10 resources per page @app.list_resources() @@ -101,7 +99,7 @@ async def list_resources_paginated( start_idx = int(cursor) except (ValueError, TypeError): # Invalid cursor, return empty - return types.ListResourcesResult(resources=[], nextCursor=None) + return types.ListResourcesResult(resources=[], next_cursor=None) # Get the page of resources page_resources = SAMPLE_RESOURCES[start_idx : start_idx + page_size] @@ -111,7 +109,7 @@ async def list_resources_paginated( if start_idx + page_size < len(SAMPLE_RESOURCES): next_cursor = str(start_idx + page_size) - return types.ListResourcesResult(resources=page_resources, nextCursor=next_cursor) + return types.ListResourcesResult(resources=page_resources, next_cursor=next_cursor) # Paginated list_prompts - returns 7 prompts per page @app.list_prompts() @@ -130,7 +128,7 @@ async def list_prompts_paginated( start_idx = int(cursor) except (ValueError, TypeError): # Invalid cursor, return empty - return types.ListPromptsResult(prompts=[], nextCursor=None) + return types.ListPromptsResult(prompts=[], next_cursor=None) # Get the page of prompts page_prompts = SAMPLE_PROMPTS[start_idx : start_idx + page_size] @@ -140,7 +138,7 @@ async def list_prompts_paginated( if start_idx + page_size < len(SAMPLE_PROMPTS): next_cursor = str(start_idx + page_size) - return types.ListPromptsResult(prompts=page_prompts, nextCursor=next_cursor) + return types.ListPromptsResult(prompts=page_prompts, next_cursor=next_cursor) # Implement call_tool handler @app.call_tool() @@ -160,7 +158,7 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[types.ContentB # Implement read_resource handler @app.read_resource() - async def read_resource(uri: AnyUrl) -> str: + async def read_resource(uri: str) -> str: # Find the resource in our sample data resource = next((r for r in SAMPLE_RESOURCES if r.uri == uri), None) if not resource: diff --git a/examples/servers/simple-resource/mcp_simple_resource/server.py b/examples/servers/simple-resource/mcp_simple_resource/server.py index 151a23eab4..f1ab4e4dcd 100644 --- a/examples/servers/simple-resource/mcp_simple_resource/server.py +++ b/examples/servers/simple-resource/mcp_simple_resource/server.py @@ -3,7 +3,6 @@ import mcp.types as types from mcp.server.lowlevel import Server from mcp.server.lowlevel.helper_types import ReadResourceContents -from pydantic import AnyUrl, FileUrl from starlette.requests import Request SAMPLE_RESOURCES = { @@ -37,20 +36,23 @@ def main(port: int, transport: str) -> int: async def list_resources() -> list[types.Resource]: return [ types.Resource( - uri=FileUrl(f"file:///{name}.txt"), + uri=f"file:///{name}.txt", name=name, title=SAMPLE_RESOURCES[name]["title"], description=f"A sample text resource named {name}", - mimeType="text/plain", + mime_type="text/plain", ) for name in SAMPLE_RESOURCES.keys() ] @app.read_resource() - async def read_resource(uri: AnyUrl): - if uri.path is None: + async def read_resource(uri: str): + from urllib.parse import urlparse + + parsed = urlparse(uri) + if not parsed.path: raise ValueError(f"Invalid resource path: {uri}") - name = uri.path.replace(".txt", "").lstrip("/") + name = parsed.path.replace(".txt", "").lstrip("/") if name not in SAMPLE_RESOURCES: raise ValueError(f"Unknown resource: {uri}") diff --git a/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py b/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py index f1b3987d28..1c31645249 100644 --- a/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py +++ b/examples/servers/simple-streamablehttp-stateless/mcp_simple_streamablehttp_stateless/server.py @@ -6,6 +6,7 @@ import anyio import click import mcp.types as types +import uvicorn from mcp.server.lowlevel import Server from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from starlette.applications import Starlette @@ -33,7 +34,7 @@ def main( port: int, log_level: str, json_response: bool, -) -> int: +) -> None: # Configure logging logging.basicConfig( level=getattr(logging, log_level.upper()), @@ -73,7 +74,7 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="start-notification-stream", description=("Sends a stream of notifications with configurable count and interval"), - inputSchema={ + input_schema={ "type": "object", "required": ["interval", "count", "caller"], "properties": { @@ -118,9 +119,7 @@ async def lifespan(app: Starlette) -> AsyncIterator[None]: # Create an ASGI application using the transport starlette_app = Starlette( debug=True, - routes=[ - Mount("/mcp", app=handle_streamable_http), - ], + routes=[Mount("/mcp", app=handle_streamable_http)], lifespan=lifespan, ) @@ -133,8 +132,4 @@ async def lifespan(app: Starlette) -> AsyncIterator[None]: expose_headers=["Mcp-Session-Id"], ) - import uvicorn - uvicorn.run(starlette_app, host="127.0.0.1", port=port) - - return 0 diff --git a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py index 0c3081ed64..3501fa47ce 100644 --- a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py +++ b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py @@ -1,5 +1,4 @@ -""" -In-memory event store for demonstrating resumability functionality. +"""In-memory event store for demonstrating resumability functionality. This is a simple implementation intended for examples and testing, not for production use where a persistent storage solution would be more appropriate. @@ -18,9 +17,7 @@ @dataclass class EventEntry: - """ - Represents an event entry in the event store. - """ + """Represents an event entry in the event store.""" event_id: EventId stream_id: StreamId @@ -28,8 +25,7 @@ class EventEntry: class InMemoryEventStore(EventStore): - """ - Simple in-memory implementation of the EventStore interface for resumability. + """Simple in-memory implementation of the EventStore interface for resumability. This is primarily intended for examples and testing, not for production use where a persistent storage solution would be more appropriate. diff --git a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py index 4b2604b9af..bb09c119f0 100644 --- a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py +++ b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py @@ -8,7 +8,6 @@ import mcp.types as types from mcp.server.lowlevel import Server from mcp.server.streamable_http_manager import StreamableHTTPSessionManager -from pydantic import AnyUrl from starlette.applications import Starlette from starlette.middleware.cors import CORSMiddleware from starlette.routing import Mount @@ -74,7 +73,7 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[types.ContentB # This will send a resource notificaiton though standalone SSE # established by GET request - await ctx.session.send_resource_updated(uri=AnyUrl("http:///test_resource")) + await ctx.session.send_resource_updated(uri="http:///test_resource") return [ types.TextContent( type="text", @@ -88,7 +87,7 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="start-notification-stream", description=("Sends a stream of notifications with configurable count and interval"), - inputSchema={ + input_schema={ "type": "object", "required": ["interval", "count", "caller"], "properties": { diff --git a/examples/servers/simple-task-interactive/mcp_simple_task_interactive/server.py b/examples/servers/simple-task-interactive/mcp_simple_task_interactive/server.py index 4d35ca8094..9e8c86eaac 100644 --- a/examples/servers/simple-task-interactive/mcp_simple_task_interactive/server.py +++ b/examples/servers/simple-task-interactive/mcp_simple_task_interactive/server.py @@ -31,17 +31,17 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="confirm_delete", description="Asks for confirmation before deleting (demonstrates elicitation)", - inputSchema={ + input_schema={ "type": "object", "properties": {"filename": {"type": "string"}}, }, - execution=types.ToolExecution(taskSupport=types.TASK_REQUIRED), + execution=types.ToolExecution(task_support=types.TASK_REQUIRED), ), types.Tool( name="write_haiku", description="Asks LLM to write a haiku (demonstrates sampling)", - inputSchema={"type": "object", "properties": {"topic": {"type": "string"}}}, - execution=types.ToolExecution(taskSupport=types.TASK_REQUIRED), + input_schema={"type": "object", "properties": {"topic": {"type": "string"}}}, + execution=types.ToolExecution(task_support=types.TASK_REQUIRED), ), ] @@ -59,7 +59,7 @@ async def work(task: ServerTaskContext) -> types.CallToolResult: result = await task.elicit( message=f"Are you sure you want to delete '{filename}'?", - requestedSchema={ + requested_schema={ "type": "object", "properties": {"confirm": {"type": "boolean"}}, "required": ["confirm"], @@ -121,7 +121,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> types.CallTo else: return types.CallToolResult( content=[types.TextContent(type="text", text=f"Unknown tool: {name}")], - isError=True, + is_error=True, ) diff --git a/examples/servers/simple-task/mcp_simple_task/server.py b/examples/servers/simple-task/mcp_simple_task/server.py index d0681b8423..ba0d962de1 100644 --- a/examples/servers/simple-task/mcp_simple_task/server.py +++ b/examples/servers/simple-task/mcp_simple_task/server.py @@ -26,8 +26,8 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="long_running_task", description="A task that takes a few seconds to complete with status updates", - inputSchema={"type": "object", "properties": {}}, - execution=types.ToolExecution(taskSupport=types.TASK_REQUIRED), + input_schema={"type": "object", "properties": {}}, + execution=types.ToolExecution(task_support=types.TASK_REQUIRED), ) ] @@ -60,7 +60,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> types.CallTo else: return types.CallToolResult( content=[types.TextContent(type="text", text=f"Unknown tool: {name}")], - isError=True, + is_error=True, ) diff --git a/examples/servers/simple-tool/mcp_simple_tool/server.py b/examples/servers/simple-tool/mcp_simple_tool/server.py index 5b2b7d068d..a9a40f4d68 100644 --- a/examples/servers/simple-tool/mcp_simple_tool/server.py +++ b/examples/servers/simple-tool/mcp_simple_tool/server.py @@ -44,7 +44,7 @@ async def list_tools() -> list[types.Tool]: name="fetch", title="Website Fetcher", description="Fetches a website and returns its content", - inputSchema={ + input_schema={ "type": "object", "required": ["url"], "properties": { diff --git a/examples/servers/sse-polling-demo/mcp_sse_polling_demo/event_store.py b/examples/servers/sse-polling-demo/mcp_sse_polling_demo/event_store.py index 75f98cdd49..c77bddef36 100644 --- a/examples/servers/sse-polling-demo/mcp_sse_polling_demo/event_store.py +++ b/examples/servers/sse-polling-demo/mcp_sse_polling_demo/event_store.py @@ -1,5 +1,4 @@ -""" -In-memory event store for demonstrating resumability functionality. +"""In-memory event store for demonstrating resumability functionality. This is a simple implementation intended for examples and testing, not for production use where a persistent storage solution would be more appropriate. @@ -26,8 +25,7 @@ class EventEntry: class InMemoryEventStore(EventStore): - """ - Simple in-memory implementation of the EventStore interface for resumability. + """Simple in-memory implementation of the EventStore interface for resumability. This is primarily intended for examples and testing, not for production use where a persistent storage solution would be more appropriate. diff --git a/examples/servers/sse-polling-demo/mcp_sse_polling_demo/server.py b/examples/servers/sse-polling-demo/mcp_sse_polling_demo/server.py index e4bdcaa396..94a9320af0 100644 --- a/examples/servers/sse-polling-demo/mcp_sse_polling_demo/server.py +++ b/examples/servers/sse-polling-demo/mcp_sse_polling_demo/server.py @@ -1,5 +1,4 @@ -""" -SSE Polling Demo Server +"""SSE Polling Demo Server Demonstrates the SSE polling pattern with close_sse_stream() for long-running tasks. @@ -120,7 +119,7 @@ async def list_tools() -> list[types.Tool]: "Process a batch of items with periodic checkpoints. " "Demonstrates SSE polling where server closes stream periodically." ), - inputSchema={ + input_schema={ "type": "object", "properties": { "items": { diff --git a/examples/servers/structured-output-lowlevel/mcp_structured_output_lowlevel/__main__.py b/examples/servers/structured-output-lowlevel/mcp_structured_output_lowlevel/__main__.py index 7f102ff8b5..49eba9464d 100644 --- a/examples/servers/structured-output-lowlevel/mcp_structured_output_lowlevel/__main__.py +++ b/examples/servers/structured-output-lowlevel/mcp_structured_output_lowlevel/__main__.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Example low-level MCP server demonstrating structured output support. +"""Example low-level MCP server demonstrating structured output support. This example shows how to use the low-level server API to return structured data from tools, with automatic validation against output @@ -27,12 +26,12 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="get_weather", description="Get weather information (simulated)", - inputSchema={ + input_schema={ "type": "object", "properties": {"city": {"type": "string", "description": "City name"}}, "required": ["city"], }, - outputSchema={ + output_schema={ "type": "object", "properties": { "temperature": {"type": "number"}, @@ -49,9 +48,7 @@ async def list_tools() -> list[types.Tool]: @server.call_tool() async def call_tool(name: str, arguments: dict[str, Any]) -> Any: - """ - Handle tool call with structured output. - """ + """Handle tool call with structured output.""" if name == "get_weather": # city = arguments["city"] # Would be used with real weather API diff --git a/examples/snippets/clients/completion_client.py b/examples/snippets/clients/completion_client.py index 8c5615926e..dc0c1b4f72 100644 --- a/examples/snippets/clients/completion_client.py +++ b/examples/snippets/clients/completion_client.py @@ -1,6 +1,5 @@ -""" -cd to the `examples/snippets` directory and run: - uv run completion-client +"""cd to the `examples/snippets` directory and run: +uv run completion-client """ import asyncio @@ -28,8 +27,8 @@ async def run(): # List available resource templates templates = await session.list_resource_templates() print("Available resource templates:") - for template in templates.resourceTemplates: - print(f" - {template.uriTemplate}") + for template in templates.resource_templates: + print(f" - {template.uri_template}") # List available prompts prompts = await session.list_prompts() @@ -38,20 +37,20 @@ async def run(): print(f" - {prompt.name}") # Complete resource template arguments - if templates.resourceTemplates: - template = templates.resourceTemplates[0] - print(f"\nCompleting arguments for resource template: {template.uriTemplate}") + if templates.resource_templates: + template = templates.resource_templates[0] + print(f"\nCompleting arguments for resource template: {template.uri_template}") # Complete without context result = await session.complete( - ref=ResourceTemplateReference(type="ref/resource", uri=template.uriTemplate), + ref=ResourceTemplateReference(type="ref/resource", uri=template.uri_template), argument={"name": "owner", "value": "model"}, ) print(f"Completions for 'owner' starting with 'model': {result.completion.values}") # Complete with context - repo suggestions based on owner result = await session.complete( - ref=ResourceTemplateReference(type="ref/resource", uri=template.uriTemplate), + ref=ResourceTemplateReference(type="ref/resource", uri=template.uri_template), argument={"name": "repo", "value": ""}, context_arguments={"owner": "modelcontextprotocol"}, ) diff --git a/examples/snippets/clients/display_utilities.py b/examples/snippets/clients/display_utilities.py index 5f1d50510d..40e31cf2b1 100644 --- a/examples/snippets/clients/display_utilities.py +++ b/examples/snippets/clients/display_utilities.py @@ -1,6 +1,5 @@ -""" -cd to the `examples/snippets` directory and run: - uv run display-utilities-client +"""cd to the `examples/snippets` directory and run: +uv run display-utilities-client """ import asyncio @@ -39,7 +38,7 @@ async def display_resources(session: ClientSession): print(f"Resource: {display_name} ({resource.uri})") templates_response = await session.list_resource_templates() - for template in templates_response.resourceTemplates: + for template in templates_response.resource_templates: display_name = get_display_name(template) print(f"Resource Template: {display_name}") diff --git a/examples/snippets/clients/oauth_client.py b/examples/snippets/clients/oauth_client.py index 140b38aedb..6d605afa92 100644 --- a/examples/snippets/clients/oauth_client.py +++ b/examples/snippets/clients/oauth_client.py @@ -1,5 +1,4 @@ -""" -Before running, specify running MCP RS server URL. +"""Before running, specify running MCP RS server URL. To spin up RS server locally, see examples/servers/simple-auth/README.md diff --git a/examples/snippets/clients/pagination_client.py b/examples/snippets/clients/pagination_client.py index 1805d2d315..b9b8c23ae7 100644 --- a/examples/snippets/clients/pagination_client.py +++ b/examples/snippets/clients/pagination_client.py @@ -1,6 +1,4 @@ -""" -Example of consuming paginated MCP endpoints from a client. -""" +"""Example of consuming paginated MCP endpoints from a client.""" import asyncio @@ -29,8 +27,8 @@ async def list_all_resources() -> None: print(f"Fetched {len(result.resources)} resources") # Check if there are more pages - if result.nextCursor: - cursor = result.nextCursor + if result.next_cursor: + cursor = result.next_cursor else: break diff --git a/examples/snippets/clients/parsing_tool_results.py b/examples/snippets/clients/parsing_tool_results.py index 5158735461..b166406774 100644 --- a/examples/snippets/clients/parsing_tool_results.py +++ b/examples/snippets/clients/parsing_tool_results.py @@ -22,9 +22,9 @@ async def parse_tool_results(): # Example 2: Parsing structured content from JSON tools result = await session.call_tool("get_user", {"id": "123"}) - if hasattr(result, "structuredContent") and result.structuredContent: + if hasattr(result, "structured_content") and result.structured_content: # Access structured data directly - user_data = result.structuredContent + user_data = result.structured_content print(f"User: {user_data.get('name')}, Age: {user_data.get('age')}") # Example 3: Parsing embedded resources @@ -41,11 +41,11 @@ async def parse_tool_results(): result = await session.call_tool("generate_chart", {"data": [1, 2, 3]}) for content in result.content: if isinstance(content, types.ImageContent): - print(f"Image ({content.mimeType}): {len(content.data)} bytes") + print(f"Image ({content.mime_type}): {len(content.data)} bytes") # Example 5: Handling errors result = await session.call_tool("failing_tool", {}) - if result.isError: + if result.is_error: print("Tool execution failed!") for content in result.content: if isinstance(content, types.TextContent): diff --git a/examples/snippets/clients/stdio_client.py b/examples/snippets/clients/stdio_client.py index ac978035d4..b594a217b1 100644 --- a/examples/snippets/clients/stdio_client.py +++ b/examples/snippets/clients/stdio_client.py @@ -1,6 +1,5 @@ -""" -cd to the `examples/snippets/clients` directory and run: - uv run client +"""cd to the `examples/snippets/clients` directory and run: +uv run client """ import asyncio @@ -32,7 +31,7 @@ async def handle_sampling_message( text="Hello, world! from model", ), model="gpt-3.5-turbo", - stopReason="endTurn", + stop_reason="endTurn", ) @@ -70,7 +69,7 @@ async def run(): result_unstructured = result.content[0] if isinstance(result_unstructured, types.TextContent): print(f"Tool result: {result_unstructured.text}") - result_structured = result.structuredContent + result_structured = result.structured_content print(f"Structured tool result: {result_structured}") diff --git a/examples/snippets/clients/streamable_basic.py b/examples/snippets/clients/streamable_basic.py index 071ea81553..87e16f4ba9 100644 --- a/examples/snippets/clients/streamable_basic.py +++ b/examples/snippets/clients/streamable_basic.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uv run examples/snippets/clients/streamable_basic.py +"""Run from the repository root: +uv run examples/snippets/clients/streamable_basic.py """ import asyncio diff --git a/examples/snippets/clients/url_elicitation_client.py b/examples/snippets/clients/url_elicitation_client.py index 56457512c6..300c38fa0c 100644 --- a/examples/snippets/clients/url_elicitation_client.py +++ b/examples/snippets/clients/url_elicitation_client.py @@ -154,7 +154,7 @@ async def call_tool_with_error_handling( result = await session.call_tool(tool_name, arguments) # Check if the tool returned an error in the result - if result.isError: + if result.is_error: print(f"Tool returned error: {result.content}") return None diff --git a/examples/snippets/servers/completion.py b/examples/snippets/servers/completion.py index 2a31541ddc..d7626f0b4b 100644 --- a/examples/snippets/servers/completion.py +++ b/examples/snippets/servers/completion.py @@ -36,7 +36,7 @@ async def handle_completion( languages = ["python", "javascript", "typescript", "go", "rust"] return Completion( values=[lang for lang in languages if lang.startswith(argument.value)], - hasMore=False, + has_more=False, ) # Complete repository names for GitHub resources @@ -44,6 +44,6 @@ async def handle_completion( if ref.uri == "github://repos/{owner}/{repo}" and argument.name == "repo": if context and context.arguments and context.arguments.get("owner") == "modelcontextprotocol": repos = ["python-sdk", "typescript-sdk", "specification"] - return Completion(values=repos, hasMore=False) + return Completion(values=repos, has_more=False) return None diff --git a/examples/snippets/servers/direct_call_tool_result.py b/examples/snippets/servers/direct_call_tool_result.py index 54d49b2f66..3dfff91f12 100644 --- a/examples/snippets/servers/direct_call_tool_result.py +++ b/examples/snippets/servers/direct_call_tool_result.py @@ -31,7 +31,7 @@ def validated_tool() -> Annotated[CallToolResult, ValidationModel]: """Return CallToolResult with structured output validation.""" return CallToolResult( content=[TextContent(type="text", text="Validated response")], - structuredContent={"status": "success", "data": {"result": 42}}, + structured_content={"status": "success", "data": {"result": 42}}, _meta={"internal": "metadata"}, ) diff --git a/examples/snippets/servers/elicitation.py b/examples/snippets/servers/elicitation.py index a1a65fb32c..34921aa4b3 100644 --- a/examples/snippets/servers/elicitation.py +++ b/examples/snippets/servers/elicitation.py @@ -93,7 +93,7 @@ async def connect_service(service_name: str, ctx: Context[ServerSession, None]) mode="url", message=f"Authorization required to connect to {service_name}", url=f"https://{service_name}.example.com/oauth/authorize?elicit={elicitation_id}", - elicitationId=elicitation_id, + elicitation_id=elicitation_id, ) ] ) diff --git a/examples/snippets/servers/fastmcp_quickstart.py b/examples/snippets/servers/fastmcp_quickstart.py index 931cd263f8..f762e908ab 100644 --- a/examples/snippets/servers/fastmcp_quickstart.py +++ b/examples/snippets/servers/fastmcp_quickstart.py @@ -1,5 +1,4 @@ -""" -FastMCP quickstart example. +"""FastMCP quickstart example. Run from the repository root: uv run examples/snippets/servers/fastmcp_quickstart.py @@ -8,7 +7,7 @@ from mcp.server.fastmcp import FastMCP # Create an MCP server -mcp = FastMCP("Demo", json_response=True) +mcp = FastMCP("Demo") # Add an addition tool @@ -40,4 +39,4 @@ def greet_user(name: str, style: str = "friendly") -> str: # Run with streamable HTTP transport if __name__ == "__main__": - mcp.run(transport="streamable-http") + mcp.run(transport="streamable-http", json_response=True) diff --git a/examples/snippets/servers/lowlevel/basic.py b/examples/snippets/servers/lowlevel/basic.py index a5c4149df7..ee01b84268 100644 --- a/examples/snippets/servers/lowlevel/basic.py +++ b/examples/snippets/servers/lowlevel/basic.py @@ -1,5 +1,4 @@ -""" -Run from the repository root: +"""Run from the repository root: uv run examples/snippets/servers/lowlevel/basic.py """ diff --git a/examples/snippets/servers/lowlevel/direct_call_tool_result.py b/examples/snippets/servers/lowlevel/direct_call_tool_result.py index 496eaad105..967dc0cbaa 100644 --- a/examples/snippets/servers/lowlevel/direct_call_tool_result.py +++ b/examples/snippets/servers/lowlevel/direct_call_tool_result.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/direct_call_tool_result.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/direct_call_tool_result.py """ import asyncio @@ -21,7 +20,7 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="advanced_tool", description="Tool with full control including _meta field", - inputSchema={ + input_schema={ "type": "object", "properties": {"message": {"type": "string"}}, "required": ["message"], @@ -37,7 +36,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> types.CallTo message = str(arguments.get("message", "")) return types.CallToolResult( content=[types.TextContent(type="text", text=f"Processed: {message}")], - structuredContent={"result": "success", "message": message}, + structured_content={"result": "success", "message": message}, _meta={"hidden": "data for client applications only"}, ) diff --git a/examples/snippets/servers/lowlevel/lifespan.py b/examples/snippets/servers/lowlevel/lifespan.py index ada3731224..89ef0385a2 100644 --- a/examples/snippets/servers/lowlevel/lifespan.py +++ b/examples/snippets/servers/lowlevel/lifespan.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/lifespan.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/lifespan.py """ from collections.abc import AsyncIterator @@ -56,7 +55,7 @@ async def handle_list_tools() -> list[types.Tool]: types.Tool( name="query_db", description="Query the database", - inputSchema={ + input_schema={ "type": "object", "properties": {"query": {"type": "string", "description": "SQL query to execute"}}, "required": ["query"], diff --git a/examples/snippets/servers/lowlevel/structured_output.py b/examples/snippets/servers/lowlevel/structured_output.py index 0237c9ab31..a99a1ac635 100644 --- a/examples/snippets/servers/lowlevel/structured_output.py +++ b/examples/snippets/servers/lowlevel/structured_output.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uv run examples/snippets/servers/lowlevel/structured_output.py +"""Run from the repository root: +uv run examples/snippets/servers/lowlevel/structured_output.py """ import asyncio @@ -21,12 +20,12 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="get_weather", description="Get current weather for a city", - inputSchema={ + input_schema={ "type": "object", "properties": {"city": {"type": "string", "description": "City name"}}, "required": ["city"], }, - outputSchema={ + output_schema={ "type": "object", "properties": { "temperature": {"type": "number", "description": "Temperature in Celsius"}, diff --git a/examples/snippets/servers/oauth_server.py b/examples/snippets/servers/oauth_server.py index 3717c66de8..226a3ec6e1 100644 --- a/examples/snippets/servers/oauth_server.py +++ b/examples/snippets/servers/oauth_server.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uv run examples/snippets/servers/oauth_server.py +"""Run from the repository root: +uv run examples/snippets/servers/oauth_server.py """ from pydantic import AnyHttpUrl @@ -20,7 +19,6 @@ async def verify_token(self, token: str) -> AccessToken | None: # Create FastMCP instance as a Resource Server mcp = FastMCP( "Weather Service", - json_response=True, # Token verifier for authentication token_verifier=SimpleTokenVerifier(), # Auth settings for RFC 9728 Protected Resource Metadata @@ -44,4 +42,4 @@ async def get_weather(city: str = "London") -> dict[str, str]: if __name__ == "__main__": - mcp.run(transport="streamable-http") + mcp.run(transport="streamable-http", json_response=True) diff --git a/examples/snippets/servers/pagination_example.py b/examples/snippets/servers/pagination_example.py index 70c3b3492c..7ed30365ce 100644 --- a/examples/snippets/servers/pagination_example.py +++ b/examples/snippets/servers/pagination_example.py @@ -1,8 +1,4 @@ -""" -Example of implementing pagination with MCP server decorators. -""" - -from pydantic import AnyUrl +"""Example of implementing pagination with MCP server decorators.""" import mcp.types as types from mcp.server.lowlevel import Server @@ -28,11 +24,11 @@ async def list_resources_paginated(request: types.ListResourcesRequest) -> types # Get page of resources page_items = [ - types.Resource(uri=AnyUrl(f"resource://items/{item}"), name=item, description=f"Description for {item}") + types.Resource(uri=f"resource://items/{item}", name=item, description=f"Description for {item}") for item in ITEMS[start:end] ] # Determine next cursor next_cursor = str(end) if end < len(ITEMS) else None - return types.ListResourcesResult(resources=page_items, nextCursor=next_cursor) + return types.ListResourcesResult(resources=page_items, next_cursor=next_cursor) diff --git a/examples/snippets/servers/streamable_config.py b/examples/snippets/servers/streamable_config.py index d351a45d86..ca68102242 100644 --- a/examples/snippets/servers/streamable_config.py +++ b/examples/snippets/servers/streamable_config.py @@ -1,19 +1,10 @@ -""" -Run from the repository root: - uv run examples/snippets/servers/streamable_config.py +"""Run from the repository root: +uv run examples/snippets/servers/streamable_config.py """ from mcp.server.fastmcp import FastMCP -# Stateless server with JSON responses (recommended) -mcp = FastMCP("StatelessServer", stateless_http=True, json_response=True) - -# Other configuration options: -# Stateless server with SSE streaming responses -# mcp = FastMCP("StatelessServer", stateless_http=True) - -# Stateful server with session persistence -# mcp = FastMCP("StatefulServer") +mcp = FastMCP("StatelessServer") # Add a simple tool to demonstrate the server @@ -24,5 +15,14 @@ def greet(name: str = "World") -> str: # Run server with streamable_http transport +# Transport-specific options (stateless_http, json_response) are passed to run() if __name__ == "__main__": - mcp.run(transport="streamable-http") + # Stateless server with JSON responses (recommended) + mcp.run(transport="streamable-http", stateless_http=True, json_response=True) + + # Other configuration options: + # Stateless server with SSE streaming responses + # mcp.run(transport="streamable-http", stateless_http=True) + + # Stateful server with session persistence + # mcp.run(transport="streamable-http") diff --git a/examples/snippets/servers/streamable_http_basic_mounting.py b/examples/snippets/servers/streamable_http_basic_mounting.py index 74aa36ed4f..7a32dbef96 100644 --- a/examples/snippets/servers/streamable_http_basic_mounting.py +++ b/examples/snippets/servers/streamable_http_basic_mounting.py @@ -1,5 +1,4 @@ -""" -Basic example showing how to mount StreamableHTTP server in Starlette. +"""Basic example showing how to mount StreamableHTTP server in Starlette. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_basic_mounting:app --reload @@ -13,7 +12,7 @@ from mcp.server.fastmcp import FastMCP # Create MCP server -mcp = FastMCP("My App", json_response=True) +mcp = FastMCP("My App") @mcp.tool() @@ -30,9 +29,10 @@ async def lifespan(app: Starlette): # Mount the StreamableHTTP server to the existing ASGI server +# Transport-specific options are passed to streamable_http_app() app = Starlette( routes=[ - Mount("/", app=mcp.streamable_http_app()), + Mount("/", app=mcp.streamable_http_app(json_response=True)), ], lifespan=lifespan, ) diff --git a/examples/snippets/servers/streamable_http_host_mounting.py b/examples/snippets/servers/streamable_http_host_mounting.py index 3ae9d341e1..57da57f7b7 100644 --- a/examples/snippets/servers/streamable_http_host_mounting.py +++ b/examples/snippets/servers/streamable_http_host_mounting.py @@ -1,5 +1,4 @@ -""" -Example showing how to mount StreamableHTTP server using Host-based routing. +"""Example showing how to mount StreamableHTTP server using Host-based routing. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_host_mounting:app --reload @@ -13,7 +12,7 @@ from mcp.server.fastmcp import FastMCP # Create MCP server -mcp = FastMCP("MCP Host App", json_response=True) +mcp = FastMCP("MCP Host App") @mcp.tool() @@ -30,9 +29,10 @@ async def lifespan(app: Starlette): # Mount using Host-based routing +# Transport-specific options are passed to streamable_http_app() app = Starlette( routes=[ - Host("mcp.acme.corp", app=mcp.streamable_http_app()), + Host("mcp.acme.corp", app=mcp.streamable_http_app(json_response=True)), ], lifespan=lifespan, ) diff --git a/examples/snippets/servers/streamable_http_multiple_servers.py b/examples/snippets/servers/streamable_http_multiple_servers.py index 8d0a1018d2..cf6c6985d2 100644 --- a/examples/snippets/servers/streamable_http_multiple_servers.py +++ b/examples/snippets/servers/streamable_http_multiple_servers.py @@ -1,5 +1,4 @@ -""" -Example showing how to mount multiple StreamableHTTP servers with path configuration. +"""Example showing how to mount multiple StreamableHTTP servers with path configuration. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_multiple_servers:app --reload @@ -13,8 +12,8 @@ from mcp.server.fastmcp import FastMCP # Create multiple MCP servers -api_mcp = FastMCP("API Server", json_response=True) -chat_mcp = FastMCP("Chat Server", json_response=True) +api_mcp = FastMCP("API Server") +chat_mcp = FastMCP("Chat Server") @api_mcp.tool() @@ -29,12 +28,6 @@ def send_message(message: str) -> str: return f"Message sent: {message}" -# Configure servers to mount at the root of each path -# This means endpoints will be at /api and /chat instead of /api/mcp and /chat/mcp -api_mcp.settings.streamable_http_path = "/" -chat_mcp.settings.streamable_http_path = "/" - - # Create a combined lifespan to manage both session managers @contextlib.asynccontextmanager async def lifespan(app: Starlette): @@ -44,11 +37,12 @@ async def lifespan(app: Starlette): yield -# Mount the servers +# Mount the servers with transport-specific options passed to streamable_http_app() +# streamable_http_path="/" means endpoints will be at /api and /chat instead of /api/mcp and /chat/mcp app = Starlette( routes=[ - Mount("/api", app=api_mcp.streamable_http_app()), - Mount("/chat", app=chat_mcp.streamable_http_app()), + Mount("/api", app=api_mcp.streamable_http_app(json_response=True, streamable_http_path="/")), + Mount("/chat", app=chat_mcp.streamable_http_app(json_response=True, streamable_http_path="/")), ], lifespan=lifespan, ) diff --git a/examples/snippets/servers/streamable_http_path_config.py b/examples/snippets/servers/streamable_http_path_config.py index 9fabf12fa7..1dcceeeeb7 100644 --- a/examples/snippets/servers/streamable_http_path_config.py +++ b/examples/snippets/servers/streamable_http_path_config.py @@ -1,5 +1,4 @@ -""" -Example showing path configuration during FastMCP initialization. +"""Example showing path configuration when mounting FastMCP. Run from the repository root: uvicorn examples.snippets.servers.streamable_http_path_config:app --reload @@ -10,13 +9,8 @@ from mcp.server.fastmcp import FastMCP -# Configure streamable_http_path during initialization -# This server will mount at the root of wherever it's mounted -mcp_at_root = FastMCP( - "My Server", - json_response=True, - streamable_http_path="/", -) +# Create a simple FastMCP server +mcp_at_root = FastMCP("My Server") @mcp_at_root.tool() @@ -25,9 +19,13 @@ def process_data(data: str) -> str: return f"Processed: {data}" -# Mount at /process - endpoints will be at /process instead of /process/mcp +# Mount at /process with streamable_http_path="/" so the endpoint is /process (not /process/mcp) +# Transport-specific options like json_response are passed to streamable_http_app() app = Starlette( routes=[ - Mount("/process", app=mcp_at_root.streamable_http_app()), + Mount( + "/process", + app=mcp_at_root.streamable_http_app(json_response=True, streamable_http_path="/"), + ), ] ) diff --git a/examples/snippets/servers/streamable_starlette_mount.py b/examples/snippets/servers/streamable_starlette_mount.py index b3a630b0f5..c33dc71bc7 100644 --- a/examples/snippets/servers/streamable_starlette_mount.py +++ b/examples/snippets/servers/streamable_starlette_mount.py @@ -1,6 +1,5 @@ -""" -Run from the repository root: - uvicorn examples.snippets.servers.streamable_starlette_mount:app --reload +"""Run from the repository root: +uvicorn examples.snippets.servers.streamable_starlette_mount:app --reload """ import contextlib @@ -11,7 +10,7 @@ from mcp.server.fastmcp import FastMCP # Create the Echo server -echo_mcp = FastMCP(name="EchoServer", stateless_http=True, json_response=True) +echo_mcp = FastMCP(name="EchoServer") @echo_mcp.tool() @@ -21,7 +20,7 @@ def echo(message: str) -> str: # Create the Math server -math_mcp = FastMCP(name="MathServer", stateless_http=True, json_response=True) +math_mcp = FastMCP(name="MathServer") @math_mcp.tool() @@ -42,13 +41,13 @@ async def lifespan(app: Starlette): # Create the Starlette app and mount the MCP servers app = Starlette( routes=[ - Mount("/echo", echo_mcp.streamable_http_app()), - Mount("/math", math_mcp.streamable_http_app()), + Mount("/echo", echo_mcp.streamable_http_app(stateless_http=True, json_response=True)), + Mount("/math", math_mcp.streamable_http_app(stateless_http=True, json_response=True)), ], lifespan=lifespan, ) # Note: Clients connect to http://localhost:8000/echo/mcp and http://localhost:8000/math/mcp # To mount at the root of each path (e.g., /echo instead of /echo/mcp): -# echo_mcp.settings.streamable_http_path = "/" -# math_mcp.settings.streamable_http_path = "/" +# echo_mcp.streamable_http_app(streamable_http_path="/", stateless_http=True, json_response=True) +# math_mcp.streamable_http_app(streamable_http_path="/", stateless_http=True, json_response=True) diff --git a/mkdocs.yml b/mkdocs.yml index 22c323d9d4..3019f5214b 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -13,6 +13,7 @@ site_url: https://modelcontextprotocol.github.io/python-sdk nav: - Introduction: index.md - Installation: installation.md + - Migration Guide: migration.md - Documentation: - Concepts: concepts.md - Low-Level Server: low-level-server.md @@ -126,7 +127,7 @@ plugins: group_by_category: false # 3 because docs are in pages with an H2 just above them heading_level: 3 - import: + inventories: - url: https://docs.python.org/3/objects.inv - url: https://docs.pydantic.dev/latest/objects.inv - url: https://typing-extensions.readthedocs.io/en/latest/objects.inv diff --git a/pyproject.toml b/pyproject.toml index 078a1dfdcb..87eac72133 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,19 +20,22 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", ] dependencies = [ "anyio>=4.5", "httpx>=0.27.1", "httpx-sse>=0.4", - "pydantic>=2.11.0,<3.0.0", - "starlette>=0.27", + "pydantic>=2.12.0; python_version >= '3.14'", + "pydantic>=2.11.0; python_version < '3.14'", + "starlette>=0.48.0; python_version >= '3.14'", + "starlette>=0.27; python_version < '3.14'", "python-multipart>=0.0.9", "sse-starlette>=1.6.1", "pydantic-settings>=2.5.2", "uvicorn>=0.31.1; sys_platform != 'emscripten'", "jsonschema>=4.20.0", - "pywin32>=310; sys_platform == 'win32'", + "pywin32>=311; sys_platform == 'win32'", "pyjwt[crypto]>=2.10.1", "typing-extensions>=4.9.0", "typing-inspection>=0.4.1", @@ -52,6 +55,8 @@ required-version = ">=0.9.5" [dependency-groups] dev = [ + # We add mcp[cli,ws] so `uv sync` considers the extras. + "mcp[cli,ws]", "pyright>=1.1.400", "pytest>=8.3.4", "ruff>=0.8.5", @@ -62,13 +67,14 @@ dev = [ "pytest-pretty>=1.2.0", "inline-snapshot>=0.23.0", "dirty-equals>=0.9.0", - "coverage[toml]==7.10.7", + "coverage[toml]>=7.13.1", + "pillow>=12.0", ] docs = [ "mkdocs>=1.6.1", "mkdocs-glightbox>=0.4.0", - "mkdocs-material[imaging]>=9.5.45", - "mkdocstrings-python>=1.12.2", + "mkdocs-material>=9.5.45", + "mkdocstrings-python>=2.0.1", ] [build-system] @@ -93,7 +99,13 @@ packages = ["src/mcp"] [tool.pyright] typeCheckingMode = "strict" -include = ["src/mcp", "tests", "examples/servers", "examples/snippets"] +include = [ + "src/mcp", + "tests", + "examples/servers", + "examples/snippets", + "examples/clients", +] venvPath = "." venv = ".venv" # The FastAPI style of using decorators in tests gives a `reportUnusedFunction` error. @@ -102,7 +114,9 @@ venv = ".venv" # those private functions instead of testing the private functions directly. It makes it easier to maintain the code source # and refactor code that is not public. executionEnvironments = [ - { root = "tests", extraPaths = ["."], reportUnusedFunction = false, reportPrivateUsage = false }, + { root = "tests", extraPaths = [ + ".", + ], reportUnusedFunction = false, reportPrivateUsage = false }, { root = "examples/servers", reportUnusedFunction = false }, ] @@ -113,17 +127,25 @@ extend-exclude = ["README.md"] [tool.ruff.lint] select = [ - "C4", # flake8-comprehensions - "C90", # mccabe - "E", # pycodestyle - "F", # pyflakes - "I", # isort - "PERF", # Perflint - "PL", # Pylint - "UP", # pyupgrade + "C4", # flake8-comprehensions + "C90", # mccabe + "D212", # pydocstyle: multi-line docstring summary should start at the first line + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "PERF", # Perflint + "PL", # Pylint + "UP", # pyupgrade + "TID251", # https://docs.astral.sh/ruff/rules/banned-api/ ] ignore = ["PERF203", "PLC0415", "PLR0402"] -mccabe.max-complexity = 24 # Default is 10 + +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"pydantic.RootModel".msg = "Use `pydantic.TypeAdapter` instead." + + +[tool.ruff.lint.mccabe] +max-complexity = 24 # Default is 10 [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] @@ -149,7 +171,6 @@ xfail_strict = true addopts = """ --color=yes --capture=fd - --numprocesses auto """ filterwarnings = [ "error", diff --git a/scripts/update_readme_snippets.py b/scripts/update_readme_snippets.py index d325333fff..8d1f198230 100755 --- a/scripts/update_readme_snippets.py +++ b/scripts/update_readme_snippets.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -""" -Update README.md with live code snippets from example files. +"""Update README.md with live code snippets from example files. This script finds specially marked code blocks in README.md and updates them with the actual code from the referenced files. diff --git a/src/mcp/__init__.py b/src/mcp/__init__.py index fbec40d0a9..9823523148 100644 --- a/src/mcp/__init__.py +++ b/src/mcp/__init__.py @@ -1,3 +1,4 @@ +from .client.client import Client from .client.session import ClientSession from .client.session_group import ClientSessionGroup from .client.stdio import StdioServerParameters, stdio_client @@ -62,12 +63,11 @@ ToolUseContent, UnsubscribeRequest, ) -from .types import ( - Role as SamplingRole, -) +from .types import Role as SamplingRole __all__ = [ "CallToolRequest", + "Client", "ClientCapabilities", "ClientNotification", "ClientRequest", diff --git a/src/mcp/client/__init__.py b/src/mcp/client/__init__.py index e69de29bb2..7b94647102 100644 --- a/src/mcp/client/__init__.py +++ b/src/mcp/client/__init__.py @@ -0,0 +1,9 @@ +"""MCP Client module.""" + +from mcp.client.client import Client +from mcp.client.session import ClientSession + +__all__ = [ + "Client", + "ClientSession", +] diff --git a/src/mcp/client/__main__.py b/src/mcp/client/__main__.py index 2efe05d536..bef466b30e 100644 --- a/src/mcp/client/__main__.py +++ b/src/mcp/client/__main__.py @@ -1,6 +1,7 @@ import argparse import logging import sys +import warnings from functools import partial from urllib.parse import urlparse @@ -15,8 +16,6 @@ from mcp.shared.session import RequestResponder if not sys.warnoptions: - import warnings - warnings.simplefilter("ignore") logging.basicConfig(level=logging.INFO) diff --git a/src/mcp/client/_memory.py b/src/mcp/client/_memory.py new file mode 100644 index 0000000000..3589d0da75 --- /dev/null +++ b/src/mcp/client/_memory.py @@ -0,0 +1,89 @@ +"""In-memory transport for testing MCP servers without network overhead.""" + +from __future__ import annotations + +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager +from typing import Any + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +from mcp.server import Server +from mcp.server.fastmcp import FastMCP +from mcp.shared.memory import create_client_server_memory_streams +from mcp.shared.message import SessionMessage + + +class InMemoryTransport: + """In-memory transport for testing MCP servers without network overhead. + + This transport starts the server in a background task and provides + streams for client-side communication. The server is automatically + stopped when the context manager exits. + + Example: + server = FastMCP("test") + transport = InMemoryTransport(server) + + async with transport.connect() as (read_stream, write_stream): + async with ClientSession(read_stream, write_stream) as session: + await session.initialize() + # Use the session... + + Or more commonly, use with Client: + async with Client(server) as client: + result = await client.call_tool("my_tool", {...}) + """ + + def __init__(self, server: Server[Any] | FastMCP, *, raise_exceptions: bool = False) -> None: + """Initialize the in-memory transport. + + Args: + server: The MCP server to connect to (Server or FastMCP instance) + raise_exceptions: Whether to raise exceptions from the server + """ + self._server = server + self._raise_exceptions = raise_exceptions + + @asynccontextmanager + async def connect( + self, + ) -> AsyncGenerator[ + tuple[ + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], + ], + None, + ]: + """Connect to the server and return streams for communication. + + Yields: + A tuple of (read_stream, write_stream) for bidirectional communication + """ + # Unwrap FastMCP to get underlying Server + actual_server: Server[Any] + if isinstance(self._server, FastMCP): + actual_server = self._server._mcp_server # type: ignore[reportPrivateUsage] + else: + actual_server = self._server + + async with create_client_server_memory_streams() as (client_streams, server_streams): + client_read, client_write = client_streams + server_read, server_write = server_streams + + async with anyio.create_task_group() as tg: + # Start server in background + tg.start_soon( + lambda: actual_server.run( + server_read, + server_write, + actual_server.create_initialization_options(), + raise_exceptions=self._raise_exceptions, + ) + ) + + try: + yield client_read, client_write + finally: + tg.cancel_scope.cancel() diff --git a/src/mcp/client/auth/__init__.py b/src/mcp/client/auth/__init__.py index 252dfd9e4c..ab3179ecb9 100644 --- a/src/mcp/client/auth/__init__.py +++ b/src/mcp/client/auth/__init__.py @@ -1,5 +1,4 @@ -""" -OAuth2 Authentication implementation for HTTPX. +"""OAuth2 Authentication implementation for HTTPX. Implements authorization code flow with PKCE and automatic token refresh. """ diff --git a/src/mcp/client/auth/extensions/client_credentials.py b/src/mcp/client/auth/extensions/client_credentials.py index e2f3f08a4d..07f6180bf1 100644 --- a/src/mcp/client/auth/extensions/client_credentials.py +++ b/src/mcp/client/auth/extensions/client_credentials.py @@ -1,5 +1,4 @@ -""" -OAuth client credential extensions for MCP. +"""OAuth client credential extensions for MCP. Provides OAuth providers for machine-to-machine authentication flows: - ClientCredentialsOAuthProvider: For client_credentials with client_id + client_secret @@ -9,6 +8,7 @@ """ import time +import warnings from collections.abc import Awaitable, Callable from typing import Any, Literal from uuid import uuid4 @@ -409,8 +409,6 @@ def __init__( timeout: float = 300.0, jwt_parameters: JWTParameters | None = None, ) -> None: - import warnings - warnings.warn( "RFC7523OAuthClientProvider is deprecated. Use ClientCredentialsOAuthProvider " "or PrivateKeyJWTOAuthProvider instead.", diff --git a/src/mcp/client/auth/oauth2.py b/src/mcp/client/auth/oauth2.py index cd96a7566d..8e699b57a0 100644 --- a/src/mcp/client/auth/oauth2.py +++ b/src/mcp/client/auth/oauth2.py @@ -1,5 +1,4 @@ -""" -OAuth2 Authentication implementation for HTTPX. +"""OAuth2 Authentication implementation for HTTPX. Implements authorization code flow with PKCE and automatic token refresh. """ @@ -215,8 +214,7 @@ def prepare_token_auth( class OAuthClientProvider(httpx.Auth): - """ - OAuth2 authentication for httpx. + """OAuth2 authentication for httpx. Handles OAuth flow with automatic client registration and token storage. """ @@ -268,8 +266,7 @@ def __init__( self._initialized = False async def _handle_protected_resource_response(self, response: httpx.Response) -> bool: - """ - Handle protected resource metadata discovery response. + """Handle protected resource metadata discovery response. Per SEP-985, supports fallback when discovery fails at one URL. @@ -399,7 +396,7 @@ async def _exchange_token_authorization_code( async def _handle_token_response(self, response: httpx.Response) -> None: """Handle token exchange response.""" - if response.status_code != 200: + if response.status_code not in {200, 201}: body = await response.aread() # pragma: no cover body_text = body.decode("utf-8") # pragma: no cover raise OAuthTokenError(f"Token exchange failed ({response.status_code}): {body_text}") # pragma: no cover diff --git a/src/mcp/client/auth/utils.py b/src/mcp/client/auth/utils.py index b4426be7f8..1ce57818dc 100644 --- a/src/mcp/client/auth/utils.py +++ b/src/mcp/client/auth/utils.py @@ -20,8 +20,7 @@ def extract_field_from_www_auth(response: Response, field_name: str) -> str | None: - """ - Extract field from WWW-Authenticate header. + """Extract field from WWW-Authenticate header. Returns: Field value if found in WWW-Authenticate header, None otherwise @@ -42,8 +41,7 @@ def extract_field_from_www_auth(response: Response, field_name: str) -> str | No def extract_scope_from_www_auth(response: Response) -> str | None: - """ - Extract scope parameter from WWW-Authenticate header as per RFC6750. + """Extract scope parameter from WWW-Authenticate header as per RFC6750. Returns: Scope string if found in WWW-Authenticate header, None otherwise @@ -52,8 +50,7 @@ def extract_scope_from_www_auth(response: Response) -> str | None: def extract_resource_metadata_from_www_auth(response: Response) -> str | None: - """ - Extract protected resource metadata URL from WWW-Authenticate header as per RFC9728. + """Extract protected resource metadata URL from WWW-Authenticate header as per RFC9728. Returns: Resource metadata URL if found in WWW-Authenticate header, None otherwise @@ -65,8 +62,7 @@ def extract_resource_metadata_from_www_auth(response: Response) -> str | None: def build_protected_resource_metadata_discovery_urls(www_auth_url: str | None, server_url: str) -> list[str]: - """ - Build ordered list of URLs to try for protected resource metadata discovery. + """Build ordered list of URLs to try for protected resource metadata discovery. Per SEP-985, the client MUST: 1. Try resource_metadata from WWW-Authenticate header (if present) @@ -127,8 +123,7 @@ def get_client_metadata_scopes( def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: str | None, server_url: str) -> list[str]: - """ - Generate ordered list of (url, type) tuples for discovery attempts. + """Generate ordered list of (url, type) tuples for discovery attempts. Args: auth_server_url: URL for the OAuth Authorization Metadata URL if found, otherwise None @@ -173,8 +168,7 @@ def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: st async def handle_protected_resource_response( response: Response, ) -> ProtectedResourceMetadata | None: - """ - Handle protected resource metadata discovery response. + """Handle protected resource metadata discovery response. Per SEP-985, supports fallback when discovery fails at one URL. diff --git a/src/mcp/client/client.py b/src/mcp/client/client.py new file mode 100644 index 0000000000..6eafb794a3 --- /dev/null +++ b/src/mcp/client/client.py @@ -0,0 +1,273 @@ +"""Unified MCP Client that wraps ClientSession with transport management.""" + +from __future__ import annotations + +import logging +from contextlib import AsyncExitStack +from typing import Any + +from pydantic import AnyUrl + +import mcp.types as types +from mcp.client._memory import InMemoryTransport +from mcp.client.session import ( + ClientSession, + ElicitationFnT, + ListRootsFnT, + LoggingFnT, + MessageHandlerFnT, + SamplingFnT, +) +from mcp.server import Server +from mcp.server.fastmcp import FastMCP +from mcp.shared.session import ProgressFnT + +logger = logging.getLogger(__name__) + + +class Client: + """A high-level MCP client for connecting to MCP servers. + + Currently supports in-memory transport for testing. Pass a Server or + FastMCP instance directly to the constructor. + + Example: + ```python + from mcp.client import Client + from mcp.server.fastmcp import FastMCP + + server = FastMCP("test") + + @server.tool() + def add(a: int, b: int) -> int: + return a + b + + async with Client(server) as client: + result = await client.call_tool("add", {"a": 1, "b": 2}) + ``` + """ + + # TODO(felixweinberger): Expand to support all transport types (like FastMCP 2): + # - Add ClientTransport base class with connect_session() method + # - Add StreamableHttpTransport, SSETransport, StdioTransport + # - Add infer_transport() to auto-detect transport from input type + # - Accept URL strings, Path objects, config dicts in constructor + # - Add auth support (OAuth, bearer tokens) + + def __init__( + self, + server: Server[Any] | FastMCP, + *, + # TODO(Marcelo): When do `raise_exceptions=True` actually raises? + raise_exceptions: bool = False, + read_timeout_seconds: float | None = None, + sampling_callback: SamplingFnT | None = None, + list_roots_callback: ListRootsFnT | None = None, + logging_callback: LoggingFnT | None = None, + message_handler: MessageHandlerFnT | None = None, + client_info: types.Implementation | None = None, + elicitation_callback: ElicitationFnT | None = None, + ) -> None: + """Initialize the client with a server. + + Args: + server: The MCP server to connect to (Server or FastMCP instance) + raise_exceptions: Whether to raise exceptions from the server + read_timeout_seconds: Timeout for read operations + sampling_callback: Callback for handling sampling requests + list_roots_callback: Callback for handling list roots requests + logging_callback: Callback for handling logging notifications + message_handler: Callback for handling raw messages + client_info: Client implementation info to send to server + elicitation_callback: Callback for handling elicitation requests + """ + self._server = server + self._raise_exceptions = raise_exceptions + self._read_timeout_seconds = read_timeout_seconds + self._sampling_callback = sampling_callback + self._list_roots_callback = list_roots_callback + self._logging_callback = logging_callback + self._message_handler = message_handler + self._client_info = client_info + self._elicitation_callback = elicitation_callback + + self._session: ClientSession | None = None + self._exit_stack: AsyncExitStack | None = None + + async def __aenter__(self) -> Client: + """Enter the async context manager.""" + if self._session is not None: + raise RuntimeError("Client is already entered; cannot reenter") + + async with AsyncExitStack() as exit_stack: + # Create transport and connect + transport = InMemoryTransport(self._server, raise_exceptions=self._raise_exceptions) + read_stream, write_stream = await exit_stack.enter_async_context(transport.connect()) + + # Create session + self._session = await exit_stack.enter_async_context( + ClientSession( + read_stream=read_stream, + write_stream=write_stream, + read_timeout_seconds=self._read_timeout_seconds, + sampling_callback=self._sampling_callback, + list_roots_callback=self._list_roots_callback, + logging_callback=self._logging_callback, + message_handler=self._message_handler, + client_info=self._client_info, + elicitation_callback=self._elicitation_callback, + ) + ) + + # Initialize the session + await self._session.initialize() + + # Transfer ownership to self for __aexit__ to handle + self._exit_stack = exit_stack.pop_all() + return self + + async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any) -> None: + """Exit the async context manager.""" + if self._exit_stack: # pragma: no branch + await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb) + self._session = None + + @property + def session(self) -> ClientSession: + """Get the underlying ClientSession. + + This provides access to the full ClientSession API for advanced use cases. + + Raises: + RuntimeError: If accessed before entering the context manager. + """ + if self._session is None: + raise RuntimeError("Client must be used within an async context manager") + return self._session + + @property + def server_capabilities(self) -> types.ServerCapabilities | None: + """The server capabilities received during initialization, or None if not yet initialized.""" + return self.session.get_server_capabilities() + + async def send_ping(self) -> types.EmptyResult: + """Send a ping request to the server.""" + return await self.session.send_ping() + + async def send_progress_notification( + self, + progress_token: str | int, + progress: float, + total: float | None = None, + message: str | None = None, + ) -> None: + """Send a progress notification to the server.""" + await self.session.send_progress_notification( + progress_token=progress_token, + progress=progress, + total=total, + message=message, + ) + + async def set_logging_level(self, level: types.LoggingLevel) -> types.EmptyResult: + """Set the logging level on the server.""" + return await self.session.set_logging_level(level) + + async def list_resources(self, *, cursor: str | None = None) -> types.ListResourcesResult: + """List available resources from the server.""" + return await self.session.list_resources(params=types.PaginatedRequestParams(cursor=cursor)) + + async def list_resource_templates(self, *, cursor: str | None = None) -> types.ListResourceTemplatesResult: + """List available resource templates from the server.""" + return await self.session.list_resource_templates(params=types.PaginatedRequestParams(cursor=cursor)) + + async def read_resource(self, uri: str | AnyUrl) -> types.ReadResourceResult: + """Read a resource from the server. + + Args: + uri: The URI of the resource to read. + + Returns: + The resource content. + """ + return await self.session.read_resource(uri) + + async def subscribe_resource(self, uri: str | AnyUrl) -> types.EmptyResult: + """Subscribe to resource updates.""" + return await self.session.subscribe_resource(uri) + + async def unsubscribe_resource(self, uri: str | AnyUrl) -> types.EmptyResult: + """Unsubscribe from resource updates.""" + return await self.session.unsubscribe_resource(uri) + + async def call_tool( + self, + name: str, + arguments: dict[str, Any] | None = None, + read_timeout_seconds: float | None = None, + progress_callback: ProgressFnT | None = None, + *, + meta: dict[str, Any] | None = None, + ) -> types.CallToolResult: + """Call a tool on the server. + + Args: + name: The name of the tool to call + arguments: Arguments to pass to the tool + read_timeout_seconds: Timeout for the tool call + progress_callback: Callback for progress updates + meta: Additional metadata for the request + + Returns: + The tool result + """ + return await self.session.call_tool( + name=name, + arguments=arguments, + read_timeout_seconds=read_timeout_seconds, + progress_callback=progress_callback, + meta=meta, + ) + + async def list_prompts(self, *, cursor: str | None = None) -> types.ListPromptsResult: + """List available prompts from the server.""" + return await self.session.list_prompts(params=types.PaginatedRequestParams(cursor=cursor)) + + async def get_prompt(self, name: str, arguments: dict[str, str] | None = None) -> types.GetPromptResult: + """Get a prompt from the server. + + Args: + name: The name of the prompt + arguments: Arguments to pass to the prompt + + Returns: + The prompt content. + """ + return await self.session.get_prompt(name=name, arguments=arguments) + + async def complete( + self, + ref: types.ResourceTemplateReference | types.PromptReference, + argument: dict[str, str], + context_arguments: dict[str, str] | None = None, + ) -> types.CompleteResult: + """Get completions for a prompt or resource template argument. + + Args: + ref: Reference to the prompt or resource template + argument: The argument to complete + context_arguments: Additional context arguments + + Returns: + Completion suggestions. + """ + return await self.session.complete(ref=ref, argument=argument, context_arguments=context_arguments) + + async def list_tools(self, *, cursor: str | None = None) -> types.ListToolsResult: + """List available tools from the server.""" + return await self.session.list_tools(params=types.PaginatedRequestParams(cursor=cursor)) + + async def send_roots_list_changed(self) -> None: + """Send a notification that the roots list has changed.""" + # TODO(Marcelo): Currently, there is no way for the server to handle this. We should add support. + await self.session.send_roots_list_changed() # pragma: no cover diff --git a/src/mcp/client/experimental/__init__.py b/src/mcp/client/experimental/__init__.py index b6579b191e..8d74cb3044 100644 --- a/src/mcp/client/experimental/__init__.py +++ b/src/mcp/client/experimental/__init__.py @@ -1,5 +1,4 @@ -""" -Experimental client features. +"""Experimental client features. WARNING: These APIs are experimental and may change without notice. """ diff --git a/src/mcp/client/experimental/task_handlers.py b/src/mcp/client/experimental/task_handlers.py index a47508674b..d6cde09faf 100644 --- a/src/mcp/client/experimental/task_handlers.py +++ b/src/mcp/client/experimental/task_handlers.py @@ -1,5 +1,4 @@ -""" -Experimental task handler protocols for server -> client requests. +"""Experimental task handler protocols for server -> client requests. This module provides Protocol types and default handlers for when servers send task-related requests to clients (the reverse of normal client -> server flow). @@ -225,7 +224,7 @@ def build_capability(self) -> types.ClientTasksCapability | None: requests_capability: types.ClientTasksRequestsCapability | None = None if has_sampling or has_elicitation: requests_capability = types.ClientTasksRequestsCapability( - sampling=types.TasksSamplingCapability(createMessage=types.TasksCreateMessageCapability()) + sampling=types.TasksSamplingCapability(create_message=types.TasksCreateMessageCapability()) if has_sampling else None, elicitation=types.TasksElicitationCapability(create=types.TasksCreateElicitationCapability()) @@ -243,7 +242,7 @@ def build_capability(self) -> types.ClientTasksCapability | None: def handles_request(request: types.ServerRequest) -> bool: """Check if this handler handles the given request type.""" return isinstance( - request.root, + request, types.GetTaskRequest | types.GetTaskPayloadRequest | types.ListTasksRequest | types.CancelTaskRequest, ) @@ -260,7 +259,7 @@ async def handle_request( types.ClientResult | types.ErrorData ) - match responder.request.root: + match responder.request: case types.GetTaskRequest(params=params): response = await self.get_task(ctx, params) client_response = client_response_type.validate_python(response) @@ -282,7 +281,7 @@ async def handle_request( await responder.respond(client_response) case _: # pragma: no cover - raise ValueError(f"Unhandled request type: {type(responder.request.root)}") + raise ValueError(f"Unhandled request type: {type(responder.request)}") # Backwards compatibility aliases diff --git a/src/mcp/client/experimental/tasks.py b/src/mcp/client/experimental/tasks.py index ce9c387462..2f890245c4 100644 --- a/src/mcp/client/experimental/tasks.py +++ b/src/mcp/client/experimental/tasks.py @@ -1,5 +1,4 @@ -""" -Experimental client-side task support. +"""Experimental client-side task support. This module provides client methods for interacting with MCP tasks. @@ -8,7 +7,7 @@ Example: # Call a tool as a task result = await session.experimental.call_tool_as_task("tool_name", {"arg": "value"}) - task_id = result.task.taskId + task_id = result.task.task_id # Get task status status = await session.experimental.get_task(task_id) @@ -37,8 +36,7 @@ class ExperimentalClientFeatures: - """ - Experimental client features for tasks and other experimental APIs. + """Experimental client features for tasks and other experimental APIs. WARNING: These APIs are experimental and may change without notice. @@ -77,7 +75,7 @@ async def call_tool_as_task( result = await session.experimental.call_tool_as_task( "long_running_tool", {"input": "data"} ) - task_id = result.task.taskId + task_id = result.task.task_id # Poll for completion while True: @@ -94,22 +92,19 @@ async def call_tool_as_task( _meta = types.RequestParams.Meta(**meta) return await self._session.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams( - name=name, - arguments=arguments, - task=types.TaskMetadata(ttl=ttl), - _meta=_meta, - ), - ) + types.CallToolRequest( + params=types.CallToolRequestParams( + name=name, + arguments=arguments, + task=types.TaskMetadata(ttl=ttl), + _meta=_meta, + ), ), types.CreateTaskResult, ) async def get_task(self, task_id: str) -> types.GetTaskResult: - """ - Get the current status of a task. + """Get the current status of a task. Args: task_id: The task identifier @@ -118,10 +113,8 @@ async def get_task(self, task_id: str) -> types.GetTaskResult: GetTaskResult containing the task status and metadata """ return await self._session.send_request( - types.ClientRequest( - types.GetTaskRequest( - params=types.GetTaskRequestParams(taskId=task_id), - ) + types.GetTaskRequest( + params=types.GetTaskRequestParams(task_id=task_id), ), types.GetTaskResult, ) @@ -131,8 +124,7 @@ async def get_task_result( task_id: str, result_type: type[ResultT], ) -> ResultT: - """ - Get the result of a completed task. + """Get the result of a completed task. The result type depends on the original request type: - tools/call tasks return CallToolResult @@ -146,10 +138,8 @@ async def get_task_result( The task result, validated against result_type """ return await self._session.send_request( - types.ClientRequest( - types.GetTaskPayloadRequest( - params=types.GetTaskPayloadRequestParams(taskId=task_id), - ) + types.GetTaskPayloadRequest( + params=types.GetTaskPayloadRequestParams(task_id=task_id), ), result_type, ) @@ -158,8 +148,7 @@ async def list_tasks( self, cursor: str | None = None, ) -> types.ListTasksResult: - """ - List all tasks. + """List all tasks. Args: cursor: Optional pagination cursor @@ -169,15 +158,12 @@ async def list_tasks( """ params = types.PaginatedRequestParams(cursor=cursor) if cursor else None return await self._session.send_request( - types.ClientRequest( - types.ListTasksRequest(params=params), - ), + types.ListTasksRequest(params=params), types.ListTasksResult, ) async def cancel_task(self, task_id: str) -> types.CancelTaskResult: - """ - Cancel a running task. + """Cancel a running task. Args: task_id: The task identifier @@ -186,17 +172,14 @@ async def cancel_task(self, task_id: str) -> types.CancelTaskResult: CancelTaskResult with the updated task state """ return await self._session.send_request( - types.ClientRequest( - types.CancelTaskRequest( - params=types.CancelTaskRequestParams(taskId=task_id), - ) + types.CancelTaskRequest( + params=types.CancelTaskRequestParams(task_id=task_id), ), types.CancelTaskResult, ) async def poll_task(self, task_id: str) -> AsyncIterator[types.GetTaskResult]: - """ - Poll a task until it reaches a terminal status. + """Poll a task until it reaches a terminal status. Yields GetTaskResult for each poll, allowing the caller to react to status changes (e.g., handle input_required). Exits when task reaches diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 8519f15cec..7151d57cd6 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -1,11 +1,9 @@ import logging -from datetime import timedelta -from typing import Any, Protocol, overload +from typing import Any, Protocol import anyio.lowlevel from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import AnyUrl, TypeAdapter -from typing_extensions import deprecated import mcp.types as types from mcp.client.experimental import ExperimentalClientFeatures @@ -113,7 +111,7 @@ def __init__( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], write_stream: MemoryObjectSendStream[SessionMessage], - read_timeout_seconds: timedelta | None = None, + read_timeout_seconds: float | None = None, sampling_callback: SamplingFnT | None = None, elicitation_callback: ElicitationFnT | None = None, list_roots_callback: ListRootsFnT | None = None, @@ -124,13 +122,7 @@ def __init__( sampling_capabilities: types.SamplingCapability | None = None, experimental_task_handlers: ExperimentalTaskHandlers | None = None, ) -> None: - super().__init__( - read_stream, - write_stream, - types.ServerRequest, - types.ServerNotification, - read_timeout_seconds=read_timeout_seconds, - ) + super().__init__(read_stream, write_stream, read_timeout_seconds=read_timeout_seconds) self._client_info = client_info or DEFAULT_CLIENT_INFO self._sampling_callback = sampling_callback or _default_sampling_callback self._sampling_capabilities = sampling_capabilities @@ -145,6 +137,14 @@ def __init__( # Experimental: Task handlers (use defaults if not provided) self._task_handlers = experimental_task_handlers or ExperimentalTaskHandlers() + @property + def _receive_request_adapter(self) -> TypeAdapter[types.ServerRequest]: + return types.server_request_adapter + + @property + def _receive_notification_adapter(self) -> TypeAdapter[types.ServerNotification]: + return types.server_notification_adapter + async def initialize(self) -> types.InitializeResult: sampling = ( (self._sampling_capabilities or types.SamplingCapability()) @@ -163,36 +163,34 @@ async def initialize(self) -> types.InitializeResult: # TODO: Should this be based on whether we # _will_ send notifications, or only whether # they're supported? - types.RootsCapability(listChanged=True) + types.RootsCapability(list_changed=True) if self._list_roots_callback is not _default_list_roots_callback else None ) result = await self.send_request( - types.ClientRequest( - types.InitializeRequest( - params=types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, - capabilities=types.ClientCapabilities( - sampling=sampling, - elicitation=elicitation, - experimental=None, - roots=roots, - tasks=self._task_handlers.build_capability(), - ), - clientInfo=self._client_info, + types.InitializeRequest( + params=types.InitializeRequestParams( + protocol_version=types.LATEST_PROTOCOL_VERSION, + capabilities=types.ClientCapabilities( + sampling=sampling, + elicitation=elicitation, + experimental=None, + roots=roots, + tasks=self._task_handlers.build_capability(), ), - ) + client_info=self._client_info, + ), ), types.InitializeResult, ) - if result.protocolVersion not in SUPPORTED_PROTOCOL_VERSIONS: - raise RuntimeError(f"Unsupported protocol version from the server: {result.protocolVersion}") + if result.protocol_version not in SUPPORTED_PROTOCOL_VERSIONS: + raise RuntimeError(f"Unsupported protocol version from the server: {result.protocol_version}") self._server_capabilities = result.capabilities - await self.send_notification(types.ClientNotification(types.InitializedNotification())) + await self.send_notification(types.InitializedNotification()) return result @@ -207,7 +205,8 @@ def get_server_capabilities(self) -> types.ServerCapabilities | None: def experimental(self) -> ExperimentalClientFeatures: """Experimental APIs for tasks and other features. - WARNING: These APIs are experimental and may change without notice. + !!! warning + These APIs are experimental and may change without notice. Example: status = await session.experimental.get_task(task_id) @@ -219,10 +218,7 @@ def experimental(self) -> ExperimentalClientFeatures: async def send_ping(self) -> types.EmptyResult: """Send a ping request.""" - return await self.send_request( - types.ClientRequest(types.PingRequest()), - types.EmptyResult, - ) + return await self.send_request(types.PingRequest(), types.EmptyResult) async def send_progress_notification( self, @@ -233,14 +229,12 @@ async def send_progress_notification( ) -> None: """Send a progress notification.""" await self.send_notification( - types.ClientNotification( - types.ProgressNotification( - params=types.ProgressNotificationParams( - progressToken=progress_token, - progress=progress, - total=total, - message=message, - ), + types.ProgressNotification( + params=types.ProgressNotificationParams( + progress_token=progress_token, + progress=progress, + total=total, + message=message, ), ) ) @@ -248,120 +242,49 @@ async def send_progress_notification( async def set_logging_level(self, level: types.LoggingLevel) -> types.EmptyResult: """Send a logging/setLevel request.""" return await self.send_request( # pragma: no cover - types.ClientRequest( - types.SetLevelRequest( - params=types.SetLevelRequestParams(level=level), - ) - ), + types.SetLevelRequest(params=types.SetLevelRequestParams(level=level)), types.EmptyResult, ) - @overload - @deprecated("Use list_resources(params=PaginatedRequestParams(...)) instead") - async def list_resources(self, cursor: str | None) -> types.ListResourcesResult: ... - - @overload - async def list_resources(self, *, params: types.PaginatedRequestParams | None) -> types.ListResourcesResult: ... - - @overload - async def list_resources(self) -> types.ListResourcesResult: ... - - async def list_resources( - self, - cursor: str | None = None, - *, - params: types.PaginatedRequestParams | None = None, - ) -> types.ListResourcesResult: + async def list_resources(self, *, params: types.PaginatedRequestParams | None = None) -> types.ListResourcesResult: """Send a resources/list request. Args: - cursor: Simple cursor string for pagination (deprecated, use params instead) params: Full pagination parameters including cursor and any future fields """ - if params is not None and cursor is not None: - raise ValueError("Cannot specify both cursor and params") - - if params is not None: - request_params = params - elif cursor is not None: - request_params = types.PaginatedRequestParams(cursor=cursor) - else: - request_params = None - - return await self.send_request( - types.ClientRequest(types.ListResourcesRequest(params=request_params)), - types.ListResourcesResult, - ) - - @overload - @deprecated("Use list_resource_templates(params=PaginatedRequestParams(...)) instead") - async def list_resource_templates(self, cursor: str | None) -> types.ListResourceTemplatesResult: ... - - @overload - async def list_resource_templates( - self, *, params: types.PaginatedRequestParams | None - ) -> types.ListResourceTemplatesResult: ... - - @overload - async def list_resource_templates(self) -> types.ListResourceTemplatesResult: ... + return await self.send_request(types.ListResourcesRequest(params=params), types.ListResourcesResult) async def list_resource_templates( - self, - cursor: str | None = None, - *, - params: types.PaginatedRequestParams | None = None, + self, *, params: types.PaginatedRequestParams | None = None ) -> types.ListResourceTemplatesResult: """Send a resources/templates/list request. Args: - cursor: Simple cursor string for pagination (deprecated, use params instead) params: Full pagination parameters including cursor and any future fields """ - if params is not None and cursor is not None: - raise ValueError("Cannot specify both cursor and params") - - if params is not None: - request_params = params - elif cursor is not None: - request_params = types.PaginatedRequestParams(cursor=cursor) - else: - request_params = None - return await self.send_request( - types.ClientRequest(types.ListResourceTemplatesRequest(params=request_params)), + types.ListResourceTemplatesRequest(params=params), types.ListResourceTemplatesResult, ) - async def read_resource(self, uri: AnyUrl) -> types.ReadResourceResult: + async def read_resource(self, uri: str | AnyUrl) -> types.ReadResourceResult: """Send a resources/read request.""" return await self.send_request( - types.ClientRequest( - types.ReadResourceRequest( - params=types.ReadResourceRequestParams(uri=uri), - ) - ), + types.ReadResourceRequest(params=types.ReadResourceRequestParams(uri=str(uri))), types.ReadResourceResult, ) - async def subscribe_resource(self, uri: AnyUrl) -> types.EmptyResult: + async def subscribe_resource(self, uri: str | AnyUrl) -> types.EmptyResult: """Send a resources/subscribe request.""" return await self.send_request( # pragma: no cover - types.ClientRequest( - types.SubscribeRequest( - params=types.SubscribeRequestParams(uri=uri), - ) - ), + types.SubscribeRequest(params=types.SubscribeRequestParams(uri=str(uri))), types.EmptyResult, ) - async def unsubscribe_resource(self, uri: AnyUrl) -> types.EmptyResult: + async def unsubscribe_resource(self, uri: str | AnyUrl) -> types.EmptyResult: """Send a resources/unsubscribe request.""" return await self.send_request( # pragma: no cover - types.ClientRequest( - types.UnsubscribeRequest( - params=types.UnsubscribeRequestParams(uri=uri), - ) - ), + types.UnsubscribeRequest(params=types.UnsubscribeRequestParams(uri=str(uri))), types.EmptyResult, ) @@ -369,7 +292,7 @@ async def call_tool( self, name: str, arguments: dict[str, Any] | None = None, - read_timeout_seconds: timedelta | None = None, + read_timeout_seconds: float | None = None, progress_callback: ProgressFnT | None = None, *, meta: dict[str, Any] | None = None, @@ -381,17 +304,15 @@ async def call_tool( _meta = types.RequestParams.Meta(**meta) result = await self.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams(name=name, arguments=arguments, _meta=_meta), - ) + types.CallToolRequest( + params=types.CallToolRequestParams(name=name, arguments=arguments, _meta=_meta), ), types.CallToolResult, request_read_timeout_seconds=read_timeout_seconds, progress_callback=progress_callback, ) - if not result.isError: + if not result.is_error: await self._validate_tool_result(name, result) return result @@ -411,61 +332,30 @@ async def _validate_tool_result(self, name: str, result: types.CallToolResult) - if output_schema is not None: from jsonschema import SchemaError, ValidationError, validate - if result.structuredContent is None: + if result.structured_content is None: raise RuntimeError( f"Tool {name} has an output schema but did not return structured content" ) # pragma: no cover try: - validate(result.structuredContent, output_schema) + validate(result.structured_content, output_schema) except ValidationError as e: raise RuntimeError(f"Invalid structured content returned by tool {name}: {e}") # pragma: no cover except SchemaError as e: # pragma: no cover raise RuntimeError(f"Invalid schema for tool {name}: {e}") # pragma: no cover - @overload - @deprecated("Use list_prompts(params=PaginatedRequestParams(...)) instead") - async def list_prompts(self, cursor: str | None) -> types.ListPromptsResult: ... - - @overload - async def list_prompts(self, *, params: types.PaginatedRequestParams | None) -> types.ListPromptsResult: ... - - @overload - async def list_prompts(self) -> types.ListPromptsResult: ... - - async def list_prompts( - self, - cursor: str | None = None, - *, - params: types.PaginatedRequestParams | None = None, - ) -> types.ListPromptsResult: + async def list_prompts(self, *, params: types.PaginatedRequestParams | None = None) -> types.ListPromptsResult: """Send a prompts/list request. Args: - cursor: Simple cursor string for pagination (deprecated, use params instead) params: Full pagination parameters including cursor and any future fields """ - if params is not None and cursor is not None: - raise ValueError("Cannot specify both cursor and params") - - if params is not None: - request_params = params - elif cursor is not None: - request_params = types.PaginatedRequestParams(cursor=cursor) - else: - request_params = None - - return await self.send_request( - types.ClientRequest(types.ListPromptsRequest(params=request_params)), - types.ListPromptsResult, - ) + return await self.send_request(types.ListPromptsRequest(params=params), types.ListPromptsResult) async def get_prompt(self, name: str, arguments: dict[str, str] | None = None) -> types.GetPromptResult: """Send a prompts/get request.""" return await self.send_request( - types.ClientRequest( - types.GetPromptRequest( - params=types.GetPromptRequestParams(name=name, arguments=arguments), - ) + types.GetPromptRequest( + params=types.GetPromptRequestParams(name=name, arguments=arguments), ), types.GetPromptResult, ) @@ -482,65 +372,37 @@ async def complete( context = types.CompletionContext(arguments=context_arguments) return await self.send_request( - types.ClientRequest( - types.CompleteRequest( - params=types.CompleteRequestParams( - ref=ref, - argument=types.CompletionArgument(**argument), - context=context, - ), - ) + types.CompleteRequest( + params=types.CompleteRequestParams( + ref=ref, + argument=types.CompletionArgument(**argument), + context=context, + ), ), types.CompleteResult, ) - @overload - @deprecated("Use list_tools(params=PaginatedRequestParams(...)) instead") - async def list_tools(self, cursor: str | None) -> types.ListToolsResult: ... - - @overload - async def list_tools(self, *, params: types.PaginatedRequestParams | None) -> types.ListToolsResult: ... - - @overload - async def list_tools(self) -> types.ListToolsResult: ... - - async def list_tools( - self, - cursor: str | None = None, - *, - params: types.PaginatedRequestParams | None = None, - ) -> types.ListToolsResult: + async def list_tools(self, *, params: types.PaginatedRequestParams | None = None) -> types.ListToolsResult: """Send a tools/list request. Args: - cursor: Simple cursor string for pagination (deprecated, use params instead) params: Full pagination parameters including cursor and any future fields """ - if params is not None and cursor is not None: - raise ValueError("Cannot specify both cursor and params") - - if params is not None: - request_params = params - elif cursor is not None: - request_params = types.PaginatedRequestParams(cursor=cursor) - else: - request_params = None - result = await self.send_request( - types.ClientRequest(types.ListToolsRequest(params=request_params)), + types.ListToolsRequest(params=params), types.ListToolsResult, ) # Cache tool output schemas for future validation # Note: don't clear the cache, as we may be using a cursor for tool in result.tools: - self._tool_output_schemas[tool.name] = tool.outputSchema + self._tool_output_schemas[tool.name] = tool.output_schema return result async def send_roots_list_changed(self) -> None: # pragma: no cover """Send a roots/list_changed notification.""" - await self.send_notification(types.ClientNotification(types.RootsListChangedNotification())) + await self.send_notification(types.RootsListChangedNotification()) async def _received_request(self, responder: RequestResponder[types.ServerRequest, types.ClientResult]) -> None: ctx = RequestContext[ClientSession, Any]( @@ -557,7 +419,7 @@ async def _received_request(self, responder: RequestResponder[types.ServerReques return None # Core request handling - match responder.request.root: + match responder.request: case types.CreateMessageRequest(params=params): with responder: # Check if this is a task-augmented request @@ -586,7 +448,7 @@ async def _received_request(self, responder: RequestResponder[types.ServerReques case types.PingRequest(): # pragma: no cover with responder: - return await responder.respond(types.ClientResult(root=types.EmptyResult())) + return await responder.respond(types.EmptyResult()) case _: # pragma: no cover pass # Task requests handled above by _task_handlers @@ -603,7 +465,7 @@ async def _handle_incoming( async def _received_notification(self, notification: types.ServerNotification) -> None: """Handle notifications from the server.""" # Process specific notification types - match notification.root: + match notification: case types.LoggingMessageNotification(params=params): await self._logging_callback(params) case types.ElicitCompleteNotification(params=params): diff --git a/src/mcp/client/session_group.py b/src/mcp/client/session_group.py index f82677d27c..31b9d475d6 100644 --- a/src/mcp/client/session_group.py +++ b/src/mcp/client/session_group.py @@ -1,25 +1,22 @@ -""" -SessionGroup concurrently manages multiple MCP session connections. +"""SessionGroup concurrently manages multiple MCP session connections. Tools, resources, and prompts are aggregated across servers. Servers may be connected to or disconnected from at any point after initialization. -This abstractions can handle naming collisions using a custom user-provided -hook. +This abstractions can handle naming collisions using a custom user-provided hook. """ import contextlib import logging from collections.abc import Callable from dataclasses import dataclass -from datetime import timedelta from types import TracebackType -from typing import Any, TypeAlias, overload +from typing import Any, TypeAlias import anyio import httpx from pydantic import BaseModel -from typing_extensions import Self, deprecated +from typing_extensions import Self import mcp from mcp import types @@ -41,11 +38,11 @@ class SseServerParameters(BaseModel): # Optional headers to include in requests. headers: dict[str, Any] | None = None - # HTTP timeout for regular operations. - timeout: float = 5 + # HTTP timeout for regular operations (in seconds). + timeout: float = 5.0 - # Timeout for SSE read operations. - sse_read_timeout: float = 60 * 5 + # Timeout for SSE read operations (in seconds). + sse_read_timeout: float = 300.0 class StreamableHttpParameters(BaseModel): @@ -57,11 +54,11 @@ class StreamableHttpParameters(BaseModel): # Optional headers to include in requests. headers: dict[str, Any] | None = None - # HTTP timeout for regular operations. - timeout: timedelta = timedelta(seconds=30) + # HTTP timeout for regular operations (in seconds). + timeout: float = 30.0 - # Timeout for SSE read operations. - sse_read_timeout: timedelta = timedelta(seconds=60 * 5) + # Timeout for SSE read operations (in seconds). + sse_read_timeout: float = 300.0 # Close the client session when the transport closes. terminate_on_close: bool = True @@ -76,7 +73,7 @@ class StreamableHttpParameters(BaseModel): class ClientSessionParameters: """Parameters for establishing a client session to an MCP server.""" - read_timeout_seconds: timedelta | None = None + read_timeout_seconds: float | None = None sampling_callback: SamplingFnT | None = None elicitation_callback: ElicitationFnT | None = None list_roots_callback: ListRootsFnT | None = None @@ -121,9 +118,9 @@ class _ComponentNames(BaseModel): _exit_stack: contextlib.AsyncExitStack _session_exit_stacks: dict[mcp.ClientSession, contextlib.AsyncExitStack] - # Optional fn consuming (component_name, serverInfo) for custom names. + # Optional fn consuming (component_name, server_info) for custom names. # This is provide a means to mitigate naming conflicts across servers. - # Example: (tool_name, serverInfo) => "{result.serverInfo.name}.{tool_name}" + # Example: (tool_name, server_info) => "{result.server_info.name}.{tool_name}" _ComponentNameHook: TypeAlias = Callable[[str, types.Implementation], str] _component_name_hook: _ComponentNameHook | None @@ -192,45 +189,21 @@ def tools(self) -> dict[str, types.Tool]: """Returns the tools as a dictionary of names to tools.""" return self._tools - @overload - async def call_tool( - self, - name: str, - arguments: dict[str, Any], - read_timeout_seconds: timedelta | None = None, - progress_callback: ProgressFnT | None = None, - *, - meta: dict[str, Any] | None = None, - ) -> types.CallToolResult: ... - - @overload - @deprecated("The 'args' parameter is deprecated. Use 'arguments' instead.") - async def call_tool( - self, - name: str, - *, - args: dict[str, Any], - read_timeout_seconds: timedelta | None = None, - progress_callback: ProgressFnT | None = None, - meta: dict[str, Any] | None = None, - ) -> types.CallToolResult: ... - async def call_tool( self, name: str, arguments: dict[str, Any] | None = None, - read_timeout_seconds: timedelta | None = None, + read_timeout_seconds: float | None = None, progress_callback: ProgressFnT | None = None, *, meta: dict[str, Any] | None = None, - args: dict[str, Any] | None = None, ) -> types.CallToolResult: """Executes a tool given its name and arguments.""" session = self._tool_to_session[name] session_tool_name = self.tools[name].name return await session.call_tool( session_tool_name, - arguments if args is None else args, + arguments=arguments, read_timeout_seconds=read_timeout_seconds, progress_callback=progress_callback, meta=meta, @@ -314,8 +287,8 @@ async def _establish_session( httpx_client = create_mcp_http_client( headers=server_params.headers, timeout=httpx.Timeout( - server_params.timeout.total_seconds(), - read=server_params.sse_read_timeout.total_seconds(), + server_params.timeout, + read=server_params.sse_read_timeout, ), ) await session_stack.enter_async_context(httpx_client) @@ -350,7 +323,7 @@ async def _establish_session( # main _exit_stack. await self._exit_stack.enter_async_context(session_stack) - return result.serverInfo, session + return result.server_info, session except Exception: # pragma: no cover # If anything during this setup fails, ensure the session-specific # stack is closed. diff --git a/src/mcp/client/sse.py b/src/mcp/client/sse.py index b2ac67744e..47e5b845a9 100644 --- a/src/mcp/client/sse.py +++ b/src/mcp/client/sse.py @@ -31,14 +31,13 @@ def _extract_session_id_from_endpoint(endpoint_url: str) -> str | None: async def sse_client( url: str, headers: dict[str, Any] | None = None, - timeout: float = 5, - sse_read_timeout: float = 60 * 5, + timeout: float = 5.0, + sse_read_timeout: float = 300.0, httpx_client_factory: McpHttpClientFactory = create_mcp_http_client, auth: httpx.Auth | None = None, on_session_created: Callable[[str], None] | None = None, ): - """ - Client transport for SSE. + """Client transport for SSE. `sse_read_timeout` determines how long (in seconds) the client will wait for a new event before disconnecting. All other HTTP operations are controlled by `timeout`. @@ -46,8 +45,8 @@ async def sse_client( Args: url: The SSE endpoint URL. headers: Optional headers to include in requests. - timeout: HTTP timeout for regular operations. - sse_read_timeout: Timeout for SSE read operations. + timeout: HTTP timeout for regular operations (in seconds). + sse_read_timeout: Timeout for SSE read operations (in seconds). auth: Optional HTTPX authentication handler. on_session_created: Optional callback invoked with the session ID when received. """ @@ -74,9 +73,7 @@ async def sse_client( event_source.response.raise_for_status() logger.debug("SSE connection established") - async def sse_reader( - task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED, - ): + async def sse_reader(task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED): try: async for sse in event_source.aiter_sse(): # pragma: no branch logger.debug(f"Received SSE event: {sse.event}") @@ -109,8 +106,8 @@ async def sse_reader( if not sse.data: continue try: - message = types.JSONRPCMessage.model_validate_json( # noqa: E501 - sse.data + message = types.jsonrpc_message_adapter.validate_json( + sse.data, by_name=False ) logger.debug(f"Received server message: {message}") except Exception as exc: # pragma: no cover diff --git a/src/mcp/client/stdio/__init__.py b/src/mcp/client/stdio/__init__.py index 0d76bb958b..19fdec5a38 100644 --- a/src/mcp/client/stdio/__init__.py +++ b/src/mcp/client/stdio/__init__.py @@ -49,8 +49,7 @@ def get_default_environment() -> dict[str, str]: - """ - Returns a default environment object including only environment variables deemed + """Returns a default environment object including only environment variables deemed safe to inherit. """ env: dict[str, str] = {} @@ -104,8 +103,7 @@ class StdioServerParameters(BaseModel): @asynccontextmanager async def stdio_client(server: StdioServerParameters, errlog: TextIO = sys.stderr): - """ - Client transport for stdio: this will connect to a server by spawning a + """Client transport for stdio: this will connect to a server by spawning a process and communicating with it over stdin/stdout. """ read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] @@ -152,7 +150,7 @@ async def stdout_reader(): for line in lines: try: - message = types.JSONRPCMessage.model_validate_json(line) + message = types.jsonrpc_message_adapter.validate_json(line, by_name=False) except Exception as exc: # pragma: no cover logger.exception("Failed to parse JSONRPC message from server") await read_stream_writer.send(exc) @@ -217,8 +215,7 @@ async def stdin_writer(): def _get_executable_command(command: str) -> str: - """ - Get the correct executable command normalized for the current platform. + """Get the correct executable command normalized for the current platform. Args: command: Base command (e.g., 'uvx', 'npx') @@ -239,8 +236,7 @@ async def _create_platform_compatible_process( errlog: TextIO = sys.stderr, cwd: Path | str | None = None, ): - """ - Creates a subprocess in a platform-compatible way. + """Creates a subprocess in a platform-compatible way. Unix: Creates process in a new session/process group for killpg support Windows: Creates process in a Job Object for reliable child termination @@ -260,8 +256,7 @@ async def _create_platform_compatible_process( async def _terminate_process_tree(process: Process | FallbackProcess, timeout_seconds: float = 2.0) -> None: - """ - Terminate a process and all its children using platform-specific methods. + """Terminate a process and all its children using platform-specific methods. Unix: Uses os.killpg() for atomic process group termination Windows: Uses Job Objects via pywin32 for reliable child process cleanup diff --git a/src/mcp/client/streamable_http.py b/src/mcp/client/streamable_http.py index ed28fcc275..555dd1290c 100644 --- a/src/mcp/client/streamable_http.py +++ b/src/mcp/client/streamable_http.py @@ -1,31 +1,20 @@ -""" -StreamableHTTP Client Transport Module +"""Implements StreamableHTTP transport for MCP clients.""" -This module implements the StreamableHTTP transport for MCP clients, -providing support for HTTP POST requests with optional SSE streaming responses -and session management. -""" +from __future__ import annotations as _annotations import contextlib import logging from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass -from datetime import timedelta -from typing import Any, overload -from warnings import warn import anyio import httpx from anyio.abc import TaskGroup from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from httpx_sse import EventSource, ServerSentEvent, aconnect_sse -from typing_extensions import deprecated -from mcp.shared._httpx_utils import ( - McpHttpClientFactory, - create_mcp_http_client, -) +from mcp.shared._httpx_utils import create_mcp_http_client from mcp.shared.message import ClientMessageMetadata, SessionMessage from mcp.types import ( ErrorData, @@ -36,6 +25,7 @@ JSONRPCRequest, JSONRPCResponse, RequestId, + jsonrpc_message_adapter, ) logger = logging.getLogger(__name__) @@ -53,15 +43,6 @@ # Reconnection defaults DEFAULT_RECONNECTION_DELAY_MS = 1000 # 1 second fallback when server doesn't provide retry MAX_RECONNECTION_ATTEMPTS = 2 # Max retry attempts before giving up -CONTENT_TYPE = "content-type" -ACCEPT = "accept" - - -JSON = "application/json" -SSE = "text/event-stream" - -# Sentinel value for detecting unset optional parameters -_UNSET = object() class StreamableHTTPError(Exception): @@ -81,69 +62,20 @@ class RequestContext: session_message: SessionMessage metadata: ClientMessageMetadata | None read_stream_writer: StreamWriter - headers: dict[str, str] | None = None # Deprecated - no longer used - sse_read_timeout: float | None = None # Deprecated - no longer used class StreamableHTTPTransport: """StreamableHTTP client transport implementation.""" - @overload - def __init__(self, url: str) -> None: ... - - @overload - @deprecated( - "Parameters headers, timeout, sse_read_timeout, and auth are deprecated. " - "Configure these on the httpx.AsyncClient instead." - ) - def __init__( - self, - url: str, - headers: dict[str, str] | None = None, - timeout: float | timedelta = 30, - sse_read_timeout: float | timedelta = 60 * 5, - auth: httpx.Auth | None = None, - ) -> None: ... - - def __init__( - self, - url: str, - headers: Any = _UNSET, - timeout: Any = _UNSET, - sse_read_timeout: Any = _UNSET, - auth: Any = _UNSET, - ) -> None: + def __init__(self, url: str) -> None: """Initialize the StreamableHTTP transport. Args: url: The endpoint URL. - headers: Optional headers to include in requests. - timeout: HTTP timeout for regular operations. - sse_read_timeout: Timeout for SSE read operations. - auth: Optional HTTPX authentication handler. """ - # Check for deprecated parameters and issue runtime warning - deprecated_params: list[str] = [] - if headers is not _UNSET: - deprecated_params.append("headers") - if timeout is not _UNSET: - deprecated_params.append("timeout") - if sse_read_timeout is not _UNSET: - deprecated_params.append("sse_read_timeout") - if auth is not _UNSET: - deprecated_params.append("auth") - - if deprecated_params: - warn( - f"Parameters {', '.join(deprecated_params)} are deprecated and will be ignored. " - "Configure these on the httpx.AsyncClient instead.", - DeprecationWarning, - stacklevel=2, - ) - self.url = url - self.session_id = None - self.protocol_version = None + self.session_id: str | None = None + self.protocol_version: str | None = None def _prepare_headers(self) -> dict[str, str]: """Build MCP-specific request headers. @@ -151,10 +83,10 @@ def _prepare_headers(self) -> dict[str, str]: These headers will be merged with the httpx.AsyncClient's default headers, with these MCP-specific headers taking precedence. """ - headers: dict[str, str] = {} - # Add MCP protocol headers - headers[ACCEPT] = f"{JSON}, {SSE}" - headers[CONTENT_TYPE] = JSON + headers: dict[str, str] = { + "accept": "application/json, text/event-stream", + "content-type": "application/json", + } # Add session headers if available if self.session_id: headers[MCP_SESSION_ID] = self.session_id @@ -164,38 +96,30 @@ def _prepare_headers(self) -> dict[str, str]: def _is_initialization_request(self, message: JSONRPCMessage) -> bool: """Check if the message is an initialization request.""" - return isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize" + return isinstance(message, JSONRPCRequest) and message.method == "initialize" def _is_initialized_notification(self, message: JSONRPCMessage) -> bool: """Check if the message is an initialized notification.""" - return isinstance(message.root, JSONRPCNotification) and message.root.method == "notifications/initialized" + return isinstance(message, JSONRPCNotification) and message.method == "notifications/initialized" - def _maybe_extract_session_id_from_response( - self, - response: httpx.Response, - ) -> None: + def _maybe_extract_session_id_from_response(self, response: httpx.Response) -> None: """Extract and store session ID from response headers.""" new_session_id = response.headers.get(MCP_SESSION_ID) if new_session_id: self.session_id = new_session_id logger.info(f"Received session ID: {self.session_id}") - def _maybe_extract_protocol_version_from_message( - self, - message: JSONRPCMessage, - ) -> None: + def _maybe_extract_protocol_version_from_message(self, message: JSONRPCMessage) -> None: """Extract protocol version from initialization response message.""" - if isinstance(message.root, JSONRPCResponse) and message.root.result: # pragma: no branch + if isinstance(message, JSONRPCResponse) and message.result: # pragma: no branch try: # Parse the result as InitializeResult for type safety - init_result = InitializeResult.model_validate(message.root.result) - self.protocol_version = str(init_result.protocolVersion) + init_result = InitializeResult.model_validate(message.result, by_name=False) + self.protocol_version = str(init_result.protocol_version) logger.info(f"Negotiated protocol version: {self.protocol_version}") - except Exception as exc: # pragma: no cover - logger.warning( - f"Failed to parse initialization response as InitializeResult: {exc}" - ) # pragma: no cover - logger.warning(f"Raw result: {message.root.result}") + except Exception: # pragma: no cover + logger.warning("Failed to parse initialization response as InitializeResult", exc_info=True) + logger.warning(f"Raw result: {message.result}") async def _handle_sse_event( self, @@ -214,7 +138,7 @@ async def _handle_sse_event( await resumption_callback(sse.id) return False try: - message = JSONRPCMessage.model_validate_json(sse.data) + message = jsonrpc_message_adapter.validate_json(sse.data, by_name=False) logger.debug(f"SSE message: {message}") # Extract protocol version from initialization response @@ -222,8 +146,8 @@ async def _handle_sse_event( self._maybe_extract_protocol_version_from_message(message) # If this is a response and we have original_request_id, replace it - if original_request_id is not None and isinstance(message.root, JSONRPCResponse | JSONRPCError): - message.root.id = original_request_id + if original_request_id is not None and isinstance(message, JSONRPCResponse | JSONRPCError): + message.id = original_request_id session_message = SessionMessage(message) await read_stream_writer.send(session_message) @@ -234,7 +158,7 @@ async def _handle_sse_event( # If this is a response or error return True indicating completion # Otherwise, return False to continue listening - return isinstance(message.root, JSONRPCResponse | JSONRPCError) + return isinstance(message, JSONRPCResponse | JSONRPCError) except Exception as exc: # pragma: no cover logger.exception("Error parsing SSE message") @@ -244,11 +168,7 @@ async def _handle_sse_event( logger.warning(f"Unknown SSE event: {sse.event}") return False - async def handle_get_stream( - self, - client: httpx.AsyncClient, - read_stream_writer: StreamWriter, - ) -> None: + async def handle_get_stream(self, client: httpx.AsyncClient, read_stream_writer: StreamWriter) -> None: """Handle GET stream for server-initiated messages with auto-reconnect.""" last_event_id: str | None = None retry_interval_ms: int | None = None @@ -263,12 +183,7 @@ async def handle_get_stream( if last_event_id: headers[LAST_EVENT_ID] = last_event_id # pragma: no cover - async with aconnect_sse( - client, - "GET", - self.url, - headers=headers, - ) as event_source: + async with aconnect_sse(client, "GET", self.url, headers=headers) as event_source: event_source.response.raise_for_status() logger.debug("GET SSE connection established") @@ -308,15 +223,10 @@ async def _handle_resumption_request(self, ctx: RequestContext) -> None: # Extract original request ID to map responses original_request_id = None - if isinstance(ctx.session_message.message.root, JSONRPCRequest): # pragma: no branch - original_request_id = ctx.session_message.message.root.id + if isinstance(ctx.session_message.message, JSONRPCRequest): # pragma: no branch + original_request_id = ctx.session_message.message.id - async with aconnect_sse( - ctx.client, - "GET", - self.url, - headers=headers, - ) as event_source: + async with aconnect_sse(ctx.client, "GET", self.url, headers=headers) as event_source: event_source.response.raise_for_status() logger.debug("Resumption GET SSE connection established") @@ -348,12 +258,9 @@ async def _handle_post_request(self, ctx: RequestContext) -> None: return if response.status_code == 404: # pragma: no branch - if isinstance(message.root, JSONRPCRequest): - await self._send_session_terminated_error( # pragma: no cover - ctx.read_stream_writer, # pragma: no cover - message.root.id, # pragma: no cover - ) # pragma: no cover - return # pragma: no cover + if isinstance(message, JSONRPCRequest): # pragma: no branch + await self._send_session_terminated_error(ctx.read_stream_writer, message.id) + return response.raise_for_status() if is_initialization: @@ -361,11 +268,11 @@ async def _handle_post_request(self, ctx: RequestContext) -> None: # Per https://modelcontextprotocol.io/specification/2025-06-18/basic#notifications: # The server MUST NOT send a response to notifications. - if isinstance(message.root, JSONRPCRequest): - content_type = response.headers.get(CONTENT_TYPE, "").lower() - if content_type.startswith(JSON): + if isinstance(message, JSONRPCRequest): + content_type = response.headers.get("content-type", "").lower() + if content_type.startswith("application/json"): await self._handle_json_response(response, ctx.read_stream_writer, is_initialization) - elif content_type.startswith(SSE): + elif content_type.startswith("text/event-stream"): await self._handle_sse_response(response, ctx, is_initialization) else: await self._handle_unexpected_content_type( # pragma: no cover @@ -382,7 +289,7 @@ async def _handle_json_response( """Handle JSON response from the server.""" try: content = await response.aread() - message = JSONRPCMessage.model_validate_json(content) + message = jsonrpc_message_adapter.validate_json(content, by_name=False) # Extract protocol version from initialization response if is_initialization: @@ -456,16 +363,11 @@ async def _handle_reconnection( # Extract original request ID to map responses original_request_id = None - if isinstance(ctx.session_message.message.root, JSONRPCRequest): # pragma: no branch - original_request_id = ctx.session_message.message.root.id + if isinstance(ctx.session_message.message, JSONRPCRequest): # pragma: no branch + original_request_id = ctx.session_message.message.id try: - async with aconnect_sse( - ctx.client, - "GET", - self.url, - headers=headers, - ) as event_source: + async with aconnect_sse(ctx.client, "GET", self.url, headers=headers) as event_source: event_source.response.raise_for_status() logger.info("Reconnected to SSE stream") @@ -498,27 +400,21 @@ async def _handle_reconnection( await self._handle_reconnection(ctx, last_event_id, retry_interval_ms, attempt + 1) async def _handle_unexpected_content_type( - self, - content_type: str, - read_stream_writer: StreamWriter, + self, content_type: str, read_stream_writer: StreamWriter ) -> None: # pragma: no cover """Handle unexpected content type in response.""" error_msg = f"Unexpected content type: {content_type}" # pragma: no cover logger.error(error_msg) # pragma: no cover await read_stream_writer.send(ValueError(error_msg)) # pragma: no cover - async def _send_session_terminated_error( - self, - read_stream_writer: StreamWriter, - request_id: RequestId, - ) -> None: + async def _send_session_terminated_error(self, read_stream_writer: StreamWriter, request_id: RequestId) -> None: """Send a session terminated error response.""" jsonrpc_error = JSONRPCError( jsonrpc="2.0", id=request_id, error=ErrorData(code=32600, message="Session terminated"), ) - session_message = SessionMessage(JSONRPCMessage(jsonrpc_error)) + session_message = SessionMessage(jsonrpc_error) await read_stream_writer.send(session_message) async def post_writer( @@ -565,7 +461,7 @@ async def handle_request_async(): await self._handle_post_request(ctx) # If this is a request, start a new task to handle it - if isinstance(message.root, JSONRPCRequest): + if isinstance(message, JSONRPCRequest): tg.start_soon(handle_request_async) else: await handle_request_async() @@ -611,16 +507,14 @@ async def streamable_http_client( ], None, ]: - """ - Client transport for StreamableHTTP. + """Client transport for StreamableHTTP. Args: url: The MCP server endpoint URL. http_client: Optional pre-configured httpx.AsyncClient. If None, a default client with recommended MCP timeouts will be created. To configure headers, authentication, or other HTTP settings, create an httpx.AsyncClient and pass it here. - terminate_on_close: If True, send a DELETE request to terminate the session - when the context exits. + terminate_on_close: If True, send a DELETE request to terminate the session when the context exits. Yields: Tuple containing: @@ -667,11 +561,7 @@ def start_get_stream() -> None: ) try: - yield ( - read_stream, - write_stream, - transport.get_session_id, - ) + yield (read_stream, write_stream, transport.get_session_id) finally: if transport.session_id and terminate_on_close: await transport.terminate_session(client) @@ -679,44 +569,3 @@ def start_get_stream() -> None: finally: await read_stream_writer.aclose() await write_stream.aclose() - - -@asynccontextmanager -@deprecated("Use `streamable_http_client` instead.") -async def streamablehttp_client( - url: str, - headers: dict[str, str] | None = None, - timeout: float | timedelta = 30, - sse_read_timeout: float | timedelta = 60 * 5, - terminate_on_close: bool = True, - httpx_client_factory: McpHttpClientFactory = create_mcp_http_client, - auth: httpx.Auth | None = None, -) -> AsyncGenerator[ - tuple[ - MemoryObjectReceiveStream[SessionMessage | Exception], - MemoryObjectSendStream[SessionMessage], - GetSessionIdCallback, - ], - None, -]: - # Convert timeout parameters - timeout_seconds = timeout.total_seconds() if isinstance(timeout, timedelta) else timeout - sse_read_timeout_seconds = ( - sse_read_timeout.total_seconds() if isinstance(sse_read_timeout, timedelta) else sse_read_timeout - ) - - # Create httpx client using the factory with old-style parameters - client = httpx_client_factory( - headers=headers, - timeout=httpx.Timeout(timeout_seconds, read=sse_read_timeout_seconds), - auth=auth, - ) - - # Manage client lifecycle since we created it - async with client: - async with streamable_http_client( - url, - http_client=client, - terminate_on_close=terminate_on_close, - ) as streams: - yield streams diff --git a/src/mcp/client/websocket.py b/src/mcp/client/websocket.py index e8c8d9af87..d9d0aa4975 100644 --- a/src/mcp/client/websocket.py +++ b/src/mcp/client/websocket.py @@ -1,5 +1,4 @@ import json -import logging from collections.abc import AsyncGenerator from contextlib import asynccontextmanager @@ -12,8 +11,6 @@ import mcp.types as types from mcp.shared.message import SessionMessage -logger = logging.getLogger(__name__) - @asynccontextmanager async def websocket_client( @@ -22,8 +19,7 @@ async def websocket_client( tuple[MemoryObjectReceiveStream[SessionMessage | Exception], MemoryObjectSendStream[SessionMessage]], None, ]: - """ - WebSocket client transport for MCP, symmetrical to the server version. + """WebSocket client transport for MCP, symmetrical to the server version. Connects to 'url' using the 'mcp' subprotocol, then yields: (read_stream, write_stream) @@ -49,14 +45,13 @@ async def websocket_client( async with ws_connect(url, subprotocols=[Subprotocol("mcp")]) as ws: async def ws_reader(): - """ - Reads text messages from the WebSocket, parses them as JSON-RPC messages, + """Reads text messages from the WebSocket, parses them as JSON-RPC messages, and sends them into read_stream_writer. """ async with read_stream_writer: async for raw_text in ws: try: - message = types.JSONRPCMessage.model_validate_json(raw_text) + message = types.jsonrpc_message_adapter.validate_json(raw_text, by_name=False) session_message = SessionMessage(message) await read_stream_writer.send(session_message) except ValidationError as exc: # pragma: no cover @@ -64,8 +59,7 @@ async def ws_reader(): await read_stream_writer.send(exc) async def ws_writer(): - """ - Reads JSON-RPC messages from write_stream_reader and + """Reads JSON-RPC messages from write_stream_reader and sends them to the server. """ async with write_stream_reader: diff --git a/src/mcp/os/posix/utilities.py b/src/mcp/os/posix/utilities.py index dd1aea363a..0e9d74cf3c 100644 --- a/src/mcp/os/posix/utilities.py +++ b/src/mcp/os/posix/utilities.py @@ -1,6 +1,4 @@ -""" -POSIX-specific functionality for stdio client operations. -""" +"""POSIX-specific functionality for stdio client operations.""" import logging import os @@ -13,8 +11,7 @@ async def terminate_posix_process_tree(process: Process, timeout_seconds: float = 2.0) -> None: - """ - Terminate a process and all its children on POSIX systems. + """Terminate a process and all its children on POSIX systems. Uses os.killpg() for atomic process group termination. diff --git a/src/mcp/os/win32/utilities.py b/src/mcp/os/win32/utilities.py index 962be0229b..fa4e4b399b 100644 --- a/src/mcp/os/win32/utilities.py +++ b/src/mcp/os/win32/utilities.py @@ -1,6 +1,4 @@ -""" -Windows-specific functionality for stdio client operations. -""" +"""Windows-specific functionality for stdio client operations.""" import logging import shutil @@ -34,8 +32,7 @@ def get_windows_executable_command(command: str) -> str: - """ - Get the correct executable command normalized for Windows. + """Get the correct executable command normalized for Windows. On Windows, commands might exist with specific extensions (.exe, .cmd, etc.) that need to be located for proper execution. @@ -66,8 +63,7 @@ def get_windows_executable_command(command: str) -> str: class FallbackProcess: - """ - A fallback process wrapper for Windows to handle async I/O + """A fallback process wrapper for Windows to handle async I/O when using subprocess.Popen, which provides sync-only FileIO objects. This wraps stdin and stdout into async-compatible @@ -140,8 +136,7 @@ async def create_windows_process( errlog: TextIO | None = sys.stderr, cwd: Path | str | None = None, ) -> Process | FallbackProcess: - """ - Creates a subprocess in a Windows-compatible way with Job Object support. + """Creates a subprocess in a Windows-compatible way with Job Object support. Attempt to use anyio's open_process for async subprocess creation. In some cases this will throw NotImplementedError on Windows, e.g. @@ -199,8 +194,7 @@ async def _create_windows_fallback_process( errlog: TextIO | None = sys.stderr, cwd: Path | str | None = None, ) -> FallbackProcess: - """ - Create a subprocess using subprocess.Popen as a fallback when anyio fails. + """Create a subprocess using subprocess.Popen as a fallback when anyio fails. This function wraps the sync subprocess.Popen in an async-compatible interface. """ @@ -231,9 +225,7 @@ async def _create_windows_fallback_process( def _create_job_object() -> int | None: - """ - Create a Windows Job Object configured to terminate all processes when closed. - """ + """Create a Windows Job Object configured to terminate all processes when closed.""" if sys.platform != "win32" or not win32job: return None @@ -250,8 +242,7 @@ def _create_job_object() -> int | None: def _maybe_assign_process_to_job(process: Process | FallbackProcess, job: JobHandle | None) -> None: - """ - Try to assign a process to a job object. If assignment fails + """Try to assign a process to a job object. If assignment fails for any reason, the job handle is closed. """ if not job: @@ -279,8 +270,7 @@ def _maybe_assign_process_to_job(process: Process | FallbackProcess, job: JobHan async def terminate_windows_process_tree(process: Process | FallbackProcess, timeout_seconds: float = 2.0) -> None: - """ - Terminate a process and all its children on Windows. + """Terminate a process and all its children on Windows. If the process has an associated job object, it will be terminated. Otherwise, falls back to basic process termination. @@ -318,8 +308,7 @@ async def terminate_windows_process_tree(process: Process | FallbackProcess, tim "Process termination is now handled internally by the stdio_client context manager." ) async def terminate_windows_process(process: Process | FallbackProcess): - """ - Terminate a Windows process. + """Terminate a Windows process. Note: On Windows, terminating a process with process.terminate() doesn't always guarantee immediate process termination. diff --git a/src/mcp/server/__main__.py b/src/mcp/server/__main__.py index 1970eca7d3..dbc50b8a79 100644 --- a/src/mcp/server/__main__.py +++ b/src/mcp/server/__main__.py @@ -1,6 +1,7 @@ import importlib.metadata import logging import sys +import warnings import anyio @@ -10,8 +11,6 @@ from mcp.types import ServerCapabilities if not sys.warnoptions: - import warnings - warnings.simplefilter("ignore") logging.basicConfig(level=logging.INFO) diff --git a/src/mcp/server/auth/__init__.py b/src/mcp/server/auth/__init__.py index 6888ffe8d9..61b60e3487 100644 --- a/src/mcp/server/auth/__init__.py +++ b/src/mcp/server/auth/__init__.py @@ -1,3 +1 @@ -""" -MCP OAuth server authorization components. -""" +"""MCP OAuth server authorization components.""" diff --git a/src/mcp/server/auth/handlers/__init__.py b/src/mcp/server/auth/handlers/__init__.py index e99a62de1a..fd8a462b37 100644 --- a/src/mcp/server/auth/handlers/__init__.py +++ b/src/mcp/server/auth/handlers/__init__.py @@ -1,3 +1 @@ -""" -Request handlers for MCP authorization endpoints. -""" +"""Request handlers for MCP authorization endpoints.""" diff --git a/src/mcp/server/auth/handlers/authorize.py b/src/mcp/server/auth/handlers/authorize.py index 3570d28c2a..dec6713b13 100644 --- a/src/mcp/server/auth/handlers/authorize.py +++ b/src/mcp/server/auth/handlers/authorize.py @@ -2,7 +2,8 @@ from dataclasses import dataclass from typing import Any, Literal -from pydantic import AnyUrl, BaseModel, Field, RootModel, ValidationError +# TODO(Marcelo): We should drop the `RootModel`. +from pydantic import AnyUrl, BaseModel, Field, RootModel, ValidationError # noqa: TID251 from starlette.datastructures import FormData, QueryParams from starlette.requests import Request from starlette.responses import RedirectResponse, Response diff --git a/src/mcp/server/auth/handlers/register.py b/src/mcp/server/auth/handlers/register.py index c65473d1fc..79eb0fb0c1 100644 --- a/src/mcp/server/auth/handlers/register.py +++ b/src/mcp/server/auth/handlers/register.py @@ -4,7 +4,7 @@ from typing import Any from uuid import uuid4 -from pydantic import BaseModel, RootModel, ValidationError +from pydantic import BaseModel, ValidationError from starlette.requests import Request from starlette.responses import Response @@ -14,11 +14,9 @@ from mcp.server.auth.settings import ClientRegistrationOptions from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata - -class RegistrationRequest(RootModel[OAuthClientMetadata]): - # this wrapper is a no-op; it's just to separate out the types exposed to the - # provider from what we use in the HTTP handler - root: OAuthClientMetadata +# this alias is a no-op; it's just to separate out the types exposed to the +# provider from what we use in the HTTP handler +RegistrationRequest = OAuthClientMetadata class RegistrationErrorResponse(BaseModel): @@ -34,9 +32,8 @@ class RegistrationHandler: async def handle(self, request: Request) -> Response: # Implements dynamic client registration as defined in https://datatracker.ietf.org/doc/html/rfc7591#section-3.1 try: - # Parse request body as JSON - body = await request.json() - client_metadata = OAuthClientMetadata.model_validate(body) + body = await request.body() + client_metadata = OAuthClientMetadata.model_validate_json(body) # Scope validation is handled below except ValidationError as validation_error: @@ -73,11 +70,11 @@ async def handle(self, request: Request) -> Response: ), status_code=400, ) - if not {"authorization_code", "refresh_token"}.issubset(set(client_metadata.grant_types)): + if "authorization_code" not in client_metadata.grant_types: return PydanticJSONResponse( content=RegistrationErrorResponse( error="invalid_client_metadata", - error_description="grant_types must be authorization_code and refresh_token", + error_description="grant_types must include 'authorization_code'", ), status_code=400, ) diff --git a/src/mcp/server/auth/handlers/revoke.py b/src/mcp/server/auth/handlers/revoke.py index fa8cfc99d0..68a3392b4f 100644 --- a/src/mcp/server/auth/handlers/revoke.py +++ b/src/mcp/server/auth/handlers/revoke.py @@ -15,9 +15,7 @@ class RevocationRequest(BaseModel): - """ - # See https://datatracker.ietf.org/doc/html/rfc7009#section-2.1 - """ + """# See https://datatracker.ietf.org/doc/html/rfc7009#section-2.1""" token: str token_type_hint: Literal["access_token", "refresh_token"] | None = None @@ -36,9 +34,7 @@ class RevocationHandler: client_authenticator: ClientAuthenticator async def handle(self, request: Request) -> Response: - """ - Handler for the OAuth 2.0 Token Revocation endpoint. - """ + """Handler for the OAuth 2.0 Token Revocation endpoint.""" try: client = await self.client_authenticator.authenticate_request(request) except AuthenticationError as e: # pragma: no cover diff --git a/src/mcp/server/auth/handlers/token.py b/src/mcp/server/auth/handlers/token.py index 4467da6172..14f6f68720 100644 --- a/src/mcp/server/auth/handlers/token.py +++ b/src/mcp/server/auth/handlers/token.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from typing import Annotated, Any, Literal -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, RootModel, ValidationError +from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, TypeAdapter, ValidationError from starlette.requests import Request from mcp.server.auth.errors import stringify_pydantic_error @@ -40,35 +40,22 @@ class RefreshTokenRequest(BaseModel): resource: str | None = Field(None, description="Resource indicator for the token") -class TokenRequest( - RootModel[ - Annotated[ - AuthorizationCodeRequest | RefreshTokenRequest, - Field(discriminator="grant_type"), - ] - ] -): - root: Annotated[ - AuthorizationCodeRequest | RefreshTokenRequest, - Field(discriminator="grant_type"), - ] +TokenRequest = Annotated[AuthorizationCodeRequest | RefreshTokenRequest, Field(discriminator="grant_type")] +token_request_adapter = TypeAdapter[TokenRequest](TokenRequest) class TokenErrorResponse(BaseModel): - """ - See https://datatracker.ietf.org/doc/html/rfc6749#section-5.2 - """ + """See https://datatracker.ietf.org/doc/html/rfc6749#section-5.2""" error: TokenErrorCode error_description: str | None = None error_uri: AnyHttpUrl | None = None -class TokenSuccessResponse(RootModel[OAuthToken]): - # this is just a wrapper over OAuthToken; the only reason we do this - # is to have some separation between the HTTP response type, and the - # type returned by the provider - root: OAuthToken +# this is just an alias over OAuthToken; the only reason we do this +# is to have some separation between the HTTP response type, and the +# type returned by the provider +TokenSuccessResponse = OAuthToken @dataclass @@ -97,7 +84,7 @@ async def handle(self, request: Request): # Authentication failures should return 401 return PydanticJSONResponse( content=TokenErrorResponse( - error="unauthorized_client", + error="invalid_client", error_description=e.message, ), status_code=401, @@ -109,7 +96,8 @@ async def handle(self, request: Request): try: form_data = await request.form() - token_request = TokenRequest.model_validate(dict(form_data)).root + # TODO(Marcelo): Can someone check if this `dict()` wrapper is necessary? + token_request = token_request_adapter.validate_python(dict(form_data)) except ValidationError as validation_error: # pragma: no cover return self.response( TokenErrorResponse( @@ -188,12 +176,7 @@ async def handle(self, request: Request): # Exchange authorization code for tokens tokens = await self.provider.exchange_authorization_code(client_info, auth_code) except TokenError as e: - return self.response( - TokenErrorResponse( - error=e.error, - error_description=e.error_description, - ) - ) + return self.response(TokenErrorResponse(error=e.error, error_description=e.error_description)) case RefreshTokenRequest(): # pragma: no cover refresh_token = await self.provider.load_refresh_token(client_info, token_request.refresh_token) @@ -231,11 +214,6 @@ async def handle(self, request: Request): # Exchange refresh token for new tokens tokens = await self.provider.exchange_refresh_token(client_info, refresh_token, scopes) except TokenError as e: - return self.response( - TokenErrorResponse( - error=e.error, - error_description=e.error_description, - ) - ) + return self.response(TokenErrorResponse(error=e.error, error_description=e.error_description)) - return self.response(TokenSuccessResponse(root=tokens)) + return self.response(tokens) diff --git a/src/mcp/server/auth/middleware/__init__.py b/src/mcp/server/auth/middleware/__init__.py index ba3ff63c34..ab07d84161 100644 --- a/src/mcp/server/auth/middleware/__init__.py +++ b/src/mcp/server/auth/middleware/__init__.py @@ -1,3 +1 @@ -""" -Middleware for MCP authorization. -""" +"""Middleware for MCP authorization.""" diff --git a/src/mcp/server/auth/middleware/auth_context.py b/src/mcp/server/auth/middleware/auth_context.py index e2116c3bfd..1d34a5546b 100644 --- a/src/mcp/server/auth/middleware/auth_context.py +++ b/src/mcp/server/auth/middleware/auth_context.py @@ -11,8 +11,7 @@ def get_access_token() -> AccessToken | None: - """ - Get the access token from the current context. + """Get the access token from the current context. Returns: The access token if an authenticated user is available, None otherwise. @@ -22,8 +21,7 @@ def get_access_token() -> AccessToken | None: class AuthContextMiddleware: - """ - Middleware that extracts the authenticated user from the request + """Middleware that extracts the authenticated user from the request and sets it in a contextvar for easy access throughout the request lifecycle. This middleware should be added after the AuthenticationMiddleware in the diff --git a/src/mcp/server/auth/middleware/bearer_auth.py b/src/mcp/server/auth/middleware/bearer_auth.py index 64c9b8841f..6825c00b9e 100644 --- a/src/mcp/server/auth/middleware/bearer_auth.py +++ b/src/mcp/server/auth/middleware/bearer_auth.py @@ -20,9 +20,7 @@ def __init__(self, auth_info: AccessToken): class BearerAuthBackend(AuthenticationBackend): - """ - Authentication backend that validates Bearer tokens using a TokenVerifier. - """ + """Authentication backend that validates Bearer tokens using a TokenVerifier.""" def __init__(self, token_verifier: TokenVerifier): self.token_verifier = token_verifier @@ -50,8 +48,7 @@ async def authenticate(self, conn: HTTPConnection): class RequireAuthMiddleware: - """ - Middleware that requires a valid Bearer token in the Authorization header. + """Middleware that requires a valid Bearer token in the Authorization header. This will validate the token with the auth provider and store the resulting auth info in the request state. @@ -63,8 +60,7 @@ def __init__( required_scopes: list[str], resource_metadata_url: AnyHttpUrl | None = None, ): - """ - Initialize the middleware. + """Initialize the middleware. Args: app: ASGI application diff --git a/src/mcp/server/auth/middleware/client_auth.py b/src/mcp/server/auth/middleware/client_auth.py index 6126c6e4f9..4e4d9be2f6 100644 --- a/src/mcp/server/auth/middleware/client_auth.py +++ b/src/mcp/server/auth/middleware/client_auth.py @@ -17,8 +17,7 @@ def __init__(self, message: str): class ClientAuthenticator: - """ - ClientAuthenticator is a callable which validates requests from a client + """ClientAuthenticator is a callable which validates requests from a client application, used to verify /token calls. If, during registration, the client requested to be issued a secret, the authenticator asserts that /token calls must be authenticated with @@ -28,8 +27,7 @@ class ClientAuthenticator: """ def __init__(self, provider: OAuthAuthorizationServerProvider[Any, Any, Any]): - """ - Initialize the dependency. + """Initialize the dependency. Args: provider: Provider to look up client information @@ -37,8 +35,7 @@ def __init__(self, provider: OAuthAuthorizationServerProvider[Any, Any, Any]): self.provider = provider async def authenticate_request(self, request: Request) -> OAuthClientInformationFull: - """ - Authenticate a client from an HTTP request. + """Authenticate a client from an HTTP request. Extracts client credentials from the appropriate location based on the client's registered authentication method and validates them. diff --git a/src/mcp/server/auth/provider.py b/src/mcp/server/auth/provider.py index 96296c148e..9fb30c1406 100644 --- a/src/mcp/server/auth/provider.py +++ b/src/mcp/server/auth/provider.py @@ -105,8 +105,7 @@ async def verify_token(self, token: str) -> AccessToken | None: class OAuthAuthorizationServerProvider(Protocol, Generic[AuthorizationCodeT, RefreshTokenT, AccessTokenT]): async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: - """ - Retrieves client information by client ID. + """Retrieves client information by client ID. Implementors MAY raise NotImplementedError if dynamic client registration is disabled in ClientRegistrationOptions. @@ -119,8 +118,7 @@ async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: """ async def register_client(self, client_info: OAuthClientInformationFull) -> None: - """ - Saves client information as part of registering it. + """Saves client information as part of registering it. Implementors MAY raise NotImplementedError if dynamic client registration is disabled in ClientRegistrationOptions. @@ -133,8 +131,7 @@ async def register_client(self, client_info: OAuthClientInformationFull) -> None """ async def authorize(self, client: OAuthClientInformationFull, params: AuthorizationParams) -> str: - """ - Called as part of the /authorize endpoint, and returns a URL that the client + """Called as part of the /authorize endpoint, and returns a URL that the client will be redirected to. Many MCP implementations will redirect to a third-party provider to perform a second OAuth exchange with that provider. In this sort of setup, the client @@ -178,8 +175,7 @@ async def authorize(self, client: OAuthClientInformationFull, params: Authorizat async def load_authorization_code( self, client: OAuthClientInformationFull, authorization_code: str ) -> AuthorizationCodeT | None: - """ - Loads an AuthorizationCode by its code. + """Loads an AuthorizationCode by its code. Args: client: The client that requested the authorization code. @@ -193,8 +189,7 @@ async def load_authorization_code( async def exchange_authorization_code( self, client: OAuthClientInformationFull, authorization_code: AuthorizationCodeT ) -> OAuthToken: - """ - Exchanges an authorization code for an access token and refresh token. + """Exchanges an authorization code for an access token and refresh token. Args: client: The client exchanging the authorization code. @@ -209,8 +204,7 @@ async def exchange_authorization_code( ... async def load_refresh_token(self, client: OAuthClientInformationFull, refresh_token: str) -> RefreshTokenT | None: - """ - Loads a RefreshToken by its token string. + """Loads a RefreshToken by its token string. Args: client: The client that is requesting to load the refresh token. @@ -227,8 +221,7 @@ async def exchange_refresh_token( refresh_token: RefreshTokenT, scopes: list[str], ) -> OAuthToken: - """ - Exchanges a refresh token for an access token and refresh token. + """Exchanges a refresh token for an access token and refresh token. Implementations SHOULD rotate both the access token and refresh token. @@ -246,8 +239,7 @@ async def exchange_refresh_token( ... async def load_access_token(self, token: str) -> AccessTokenT | None: - """ - Loads an access token by its token. + """Loads an access token by its token. Args: token: The access token to verify. @@ -260,8 +252,7 @@ async def revoke_token( self, token: AccessTokenT | RefreshTokenT, ) -> None: - """ - Revokes an access or refresh token. + """Revokes an access or refresh token. If the given token is invalid or already revoked, this method should do nothing. diff --git a/src/mcp/server/auth/routes.py b/src/mcp/server/auth/routes.py index 71a9c8b165..08f735f362 100644 --- a/src/mcp/server/auth/routes.py +++ b/src/mcp/server/auth/routes.py @@ -10,7 +10,7 @@ from starlette.types import ASGIApp from mcp.server.auth.handlers.authorize import AuthorizationHandler -from mcp.server.auth.handlers.metadata import MetadataHandler +from mcp.server.auth.handlers.metadata import MetadataHandler, ProtectedResourceMetadataHandler from mcp.server.auth.handlers.register import RegistrationHandler from mcp.server.auth.handlers.revoke import RevocationHandler from mcp.server.auth.handlers.token import TokenHandler @@ -18,12 +18,11 @@ from mcp.server.auth.provider import OAuthAuthorizationServerProvider from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions from mcp.server.streamable_http import MCP_PROTOCOL_VERSION_HEADER -from mcp.shared.auth import OAuthMetadata +from mcp.shared.auth import OAuthMetadata, ProtectedResourceMetadata def validate_issuer_url(url: AnyHttpUrl): - """ - Validate that the issuer URL meets OAuth 2.0 requirements. + """Validate that the issuer URL meets OAuth 2.0 requirements. Args: url: The issuer URL to validate @@ -188,8 +187,7 @@ def build_metadata( def build_resource_metadata_url(resource_server_url: AnyHttpUrl) -> AnyHttpUrl: - """ - Build RFC 9728 compliant protected resource metadata URL. + """Build RFC 9728 compliant protected resource metadata URL. Inserts /.well-known/oauth-protected-resource between host and resource path as specified in RFC 9728 §3.1. @@ -213,8 +211,7 @@ def create_protected_resource_routes( resource_name: str | None = None, resource_documentation: AnyHttpUrl | None = None, ) -> list[Route]: - """ - Create routes for OAuth 2.0 Protected Resource Metadata (RFC 9728). + """Create routes for OAuth 2.0 Protected Resource Metadata (RFC 9728). Args: resource_url: The URL of this resource server @@ -224,9 +221,6 @@ def create_protected_resource_routes( Returns: List of Starlette routes for protected resource metadata """ - from mcp.server.auth.handlers.metadata import ProtectedResourceMetadataHandler - from mcp.shared.auth import ProtectedResourceMetadata - metadata = ProtectedResourceMetadata( resource=resource_url, authorization_servers=authorization_servers, diff --git a/src/mcp/server/elicitation.py b/src/mcp/server/elicitation.py index 49195415bf..58e9fe4485 100644 --- a/src/mcp/server/elicitation.py +++ b/src/mcp/server/elicitation.py @@ -125,7 +125,7 @@ async def elicit_with_validation( result = await session.elicit_form( message=message, - requestedSchema=json_schema, + requested_schema=json_schema, related_request_id=related_request_id, ) diff --git a/src/mcp/server/experimental/__init__.py b/src/mcp/server/experimental/__init__.py index 824bb8b8be..fd1db623f2 100644 --- a/src/mcp/server/experimental/__init__.py +++ b/src/mcp/server/experimental/__init__.py @@ -1,5 +1,4 @@ -""" -Server-side experimental features. +"""Server-side experimental features. WARNING: These APIs are experimental and may change without notice. diff --git a/src/mcp/server/experimental/request_context.py b/src/mcp/server/experimental/request_context.py index 78e75beb6a..14059f7f3f 100644 --- a/src/mcp/server/experimental/request_context.py +++ b/src/mcp/server/experimental/request_context.py @@ -1,5 +1,4 @@ -""" -Experimental request context features. +"""Experimental request context features. This module provides the Experimental class which gives access to experimental features within a request context, such as task-augmented request handling. @@ -32,8 +31,7 @@ @dataclass class Experimental: - """ - Experimental features context for task-augmented requests. + """Experimental features context for task-augmented requests. Provides helpers for validating task execution compatibility and running tasks with automatic lifecycle management. @@ -64,8 +62,7 @@ def validate_task_mode( *, raise_error: bool = True, ) -> ErrorData | None: - """ - Validate that the request is compatible with the tool's task execution mode. + """Validate that the request is compatible with the tool's task execution mode. Per MCP spec: - "required": Clients MUST invoke as task. Server returns -32601 if not. @@ -110,8 +107,7 @@ def validate_for_tool( *, raise_error: bool = True, ) -> ErrorData | None: - """ - Validate that the request is compatible with the given tool. + """Validate that the request is compatible with the given tool. Convenience wrapper around validate_task_mode that extracts the mode from a Tool. @@ -122,12 +118,11 @@ def validate_for_tool( Returns: None if valid, ErrorData if invalid and raise_error=False """ - mode = tool.execution.taskSupport if tool.execution else None + mode = tool.execution.task_support if tool.execution else None return self.validate_task_mode(mode, raise_error=raise_error) def can_use_tool(self, tool_task_mode: TaskExecutionMode | None) -> bool: - """ - Check if this client can use a tool with the given task mode. + """Check if this client can use a tool with the given task mode. Useful for filtering tool lists or providing warnings. Returns False if tool requires "required" but client doesn't support tasks. @@ -150,8 +145,7 @@ async def run_task( task_id: str | None = None, model_immediate_response: str | None = None, ) -> CreateTaskResult: - """ - Create a task, spawn background work, and return CreateTaskResult immediately. + """Create a task, spawn background work, and return CreateTaskResult immediately. This is the recommended way to handle task-augmented tool calls. It: 1. Creates a task in the store diff --git a/src/mcp/server/experimental/session_features.py b/src/mcp/server/experimental/session_features.py index 4842da5175..a189c3cbca 100644 --- a/src/mcp/server/experimental/session_features.py +++ b/src/mcp/server/experimental/session_features.py @@ -1,5 +1,4 @@ -""" -Experimental server session features for server→client task operations. +"""Experimental server session features for server→client task operations. This module provides the server-side equivalent of ExperimentalClientFeatures, allowing the server to send task-augmented requests to the client and poll for results. @@ -25,8 +24,7 @@ class ExperimentalServerSessionFeatures: - """ - Experimental server session features for server→client task operations. + """Experimental server session features for server→client task operations. This provides the server-side equivalent of ExperimentalClientFeatures, allowing the server to send task-augmented requests to the client and @@ -42,8 +40,7 @@ def __init__(self, session: "ServerSession") -> None: self._session = session async def get_task(self, task_id: str) -> types.GetTaskResult: - """ - Send tasks/get to the client to get task status. + """Send tasks/get to the client to get task status. Args: task_id: The task identifier @@ -52,7 +49,7 @@ async def get_task(self, task_id: str) -> types.GetTaskResult: GetTaskResult containing the task status """ return await self._session.send_request( - types.ServerRequest(types.GetTaskRequest(params=types.GetTaskRequestParams(taskId=task_id))), + types.GetTaskRequest(params=types.GetTaskRequestParams(task_id=task_id)), types.GetTaskResult, ) @@ -61,8 +58,7 @@ async def get_task_result( task_id: str, result_type: type[ResultT], ) -> ResultT: - """ - Send tasks/result to the client to retrieve the final result. + """Send tasks/result to the client to retrieve the final result. Args: task_id: The task identifier @@ -72,13 +68,12 @@ async def get_task_result( The task result, validated against result_type """ return await self._session.send_request( - types.ServerRequest(types.GetTaskPayloadRequest(params=types.GetTaskPayloadRequestParams(taskId=task_id))), + types.GetTaskPayloadRequest(params=types.GetTaskPayloadRequestParams(task_id=task_id)), result_type, ) async def poll_task(self, task_id: str) -> AsyncIterator[types.GetTaskResult]: - """ - Poll a client task until it reaches terminal status. + """Poll a client task until it reaches terminal status. Yields GetTaskResult for each poll, allowing the caller to react to status changes. Exits when task reaches a terminal status. @@ -97,12 +92,11 @@ async def poll_task(self, task_id: str) -> AsyncIterator[types.GetTaskResult]: async def elicit_as_task( self, message: str, - requestedSchema: types.ElicitRequestedSchema, + requested_schema: types.ElicitRequestedSchema, *, ttl: int = 60000, ) -> types.ElicitResult: - """ - Send a task-augmented elicitation to the client and poll until complete. + """Send a task-augmented elicitation to the client and poll until complete. The client will create a local task, process the elicitation asynchronously, and return the result when ready. This method handles the full flow: @@ -113,7 +107,7 @@ async def elicit_as_task( Args: message: The message to present to the user - requestedSchema: Schema defining the expected response + requested_schema: Schema defining the expected response ttl: Task time-to-live in milliseconds Returns: @@ -126,19 +120,17 @@ async def elicit_as_task( require_task_augmented_elicitation(client_caps) create_result = await self._session.send_request( - types.ServerRequest( - types.ElicitRequest( - params=types.ElicitRequestFormParams( - message=message, - requestedSchema=requestedSchema, - task=types.TaskMetadata(ttl=ttl), - ) + types.ElicitRequest( + params=types.ElicitRequestFormParams( + message=message, + requested_schema=requested_schema, + task=types.TaskMetadata(ttl=ttl), ) ), types.CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id async for _ in self.poll_task(task_id): pass @@ -160,8 +152,7 @@ async def create_message_as_task( tools: list[types.Tool] | None = None, tool_choice: types.ToolChoice | None = None, ) -> types.CreateMessageResult: - """ - Send a task-augmented sampling request and poll until complete. + """Send a task-augmented sampling request and poll until complete. The client will create a local task, process the sampling request asynchronously, and return the result when ready. @@ -192,27 +183,25 @@ async def create_message_as_task( validate_tool_use_result_messages(messages) create_result = await self._session.send_request( - types.ServerRequest( - types.CreateMessageRequest( - params=types.CreateMessageRequestParams( - messages=messages, - maxTokens=max_tokens, - systemPrompt=system_prompt, - includeContext=include_context, - temperature=temperature, - stopSequences=stop_sequences, - metadata=metadata, - modelPreferences=model_preferences, - tools=tools, - toolChoice=tool_choice, - task=types.TaskMetadata(ttl=ttl), - ) + types.CreateMessageRequest( + params=types.CreateMessageRequestParams( + messages=messages, + max_tokens=max_tokens, + system_prompt=system_prompt, + include_context=include_context, + temperature=temperature, + stop_sequences=stop_sequences, + metadata=metadata, + model_preferences=model_preferences, + tools=tools, + tool_choice=tool_choice, + task=types.TaskMetadata(ttl=ttl), ) ), types.CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id async for _ in self.poll_task(task_id): pass diff --git a/src/mcp/server/experimental/task_context.py b/src/mcp/server/experimental/task_context.py index e6e14fc938..871cefd9f5 100644 --- a/src/mcp/server/experimental/task_context.py +++ b/src/mcp/server/experimental/task_context.py @@ -1,5 +1,4 @@ -""" -ServerTaskContext - Server-integrated task context with elicitation and sampling. +"""ServerTaskContext - Server-integrated task context with elicitation and sampling. This wraps the pure TaskContext and adds server-specific functionality: - Elicitation (task.elicit()) @@ -40,7 +39,6 @@ Result, SamplingCapability, SamplingMessage, - ServerNotification, Task, TaskMetadata, TaskStatusNotification, @@ -51,8 +49,7 @@ class ServerTaskContext: - """ - Server-integrated task context with elicitation and sampling. + """Server-integrated task context with elicitation and sampling. This wraps a pure TaskContext and adds server-specific functionality: - elicit() for sending elicitation requests to the client @@ -65,7 +62,7 @@ async def my_task_work(task: ServerTaskContext) -> CallToolResult: result = await task.elicit( message="Continue?", - requestedSchema={"type": "object", "properties": {"ok": {"type": "boolean"}}} + requested_schema={"type": "object", "properties": {"ok": {"type": "boolean"}}} ) if result.content.get("ok"): @@ -83,8 +80,7 @@ def __init__( queue: TaskMessageQueue, handler: TaskResultHandler | None = None, ): - """ - Create a ServerTaskContext. + """Create a ServerTaskContext. Args: task: The Task object @@ -123,8 +119,7 @@ def request_cancellation(self) -> None: # Enhanced methods with notifications async def update_status(self, message: str, *, notify: bool = True) -> None: - """ - Update the task's status message. + """Update the task's status message. Args: message: The new status message @@ -135,8 +130,7 @@ async def update_status(self, message: str, *, notify: bool = True) -> None: await self._send_notification() async def complete(self, result: Result, *, notify: bool = True) -> None: - """ - Mark the task as completed with the given result. + """Mark the task as completed with the given result. Args: result: The task result @@ -147,8 +141,7 @@ async def complete(self, result: Result, *, notify: bool = True) -> None: await self._send_notification() async def fail(self, error: str, *, notify: bool = True) -> None: - """ - Mark the task as failed with an error message. + """Mark the task as failed with an error message. Args: error: The error message @@ -162,17 +155,15 @@ async def _send_notification(self) -> None: """Send a task status notification to the client.""" task = self._ctx.task await self._session.send_notification( - ServerNotification( - TaskStatusNotification( - params=TaskStatusNotificationParams( - taskId=task.taskId, - status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, - ttl=task.ttl, - pollInterval=task.pollInterval, - ) + TaskStatusNotification( + params=TaskStatusNotificationParams( + task_id=task.task_id, + status=task.status, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, + ttl=task.ttl, + poll_interval=task.poll_interval, ) ) ) @@ -202,10 +193,9 @@ def _check_sampling_capability(self) -> None: async def elicit( self, message: str, - requestedSchema: ElicitRequestedSchema, + requested_schema: ElicitRequestedSchema, ) -> ElicitResult: - """ - Send an elicitation request via the task message queue. + """Send an elicitation request via the task message queue. This method: 1. Checks client capability @@ -217,7 +207,7 @@ async def elicit( Args: message: The message to present to the user - requestedSchema: Schema defining the expected response structure + requested_schema: Schema defining the expected response structure Returns: The client's response @@ -236,7 +226,7 @@ async def elicit( # Build the request using session's helper request = self._session._build_elicit_form_request( # pyright: ignore[reportPrivateUsage] message=message, - requestedSchema=requestedSchema, + requested_schema=requested_schema, related_task_id=self.task_id, ) request_id: RequestId = request.id @@ -270,8 +260,7 @@ async def elicit_url( url: str, elicitation_id: str, ) -> ElicitResult: - """ - Send a URL mode elicitation request via the task message queue. + """Send a URL mode elicitation request via the task message queue. This directs the user to an external URL for out-of-band interactions like OAuth flows, credential collection, or payment processing. @@ -347,8 +336,7 @@ async def create_message( tools: list[Tool] | None = None, tool_choice: ToolChoice | None = None, ) -> CreateMessageResult: - """ - Send a sampling request via the task message queue. + """Send a sampling request via the task message queue. This method: 1. Checks client capability @@ -430,12 +418,11 @@ async def create_message( async def elicit_as_task( self, message: str, - requestedSchema: ElicitRequestedSchema, + requested_schema: ElicitRequestedSchema, *, ttl: int = 60000, ) -> ElicitResult: - """ - Send a task-augmented elicitation via the queue, then poll client. + """Send a task-augmented elicitation via the queue, then poll client. This is for use inside a task-augmented tool call when you want the client to handle the elicitation as its own task. The elicitation request is queued @@ -444,7 +431,7 @@ async def elicit_as_task( Args: message: The message to present to the user - requestedSchema: Schema defining the expected response structure + requested_schema: Schema defining the expected response structure ttl: Task time-to-live in milliseconds for the client's task Returns: @@ -465,7 +452,7 @@ async def elicit_as_task( request = self._session._build_elicit_form_request( # pyright: ignore[reportPrivateUsage] message=message, - requestedSchema=requestedSchema, + requested_schema=requested_schema, related_task_id=self.task_id, task=TaskMetadata(ttl=ttl), ) @@ -486,7 +473,7 @@ async def elicit_as_task( # Wait for initial response (CreateTaskResult from client) response_data = await resolver.wait() create_result = CreateTaskResult.model_validate(response_data) - client_task_id = create_result.task.taskId + client_task_id = create_result.task.task_id # Poll the client's task using session.experimental async for _ in self._session.experimental.poll_task(client_task_id): @@ -520,8 +507,7 @@ async def create_message_as_task( tools: list[Tool] | None = None, tool_choice: ToolChoice | None = None, ) -> CreateMessageResult: - """ - Send a task-augmented sampling request via the queue, then poll client. + """Send a task-augmented sampling request via the queue, then poll client. This is for use inside a task-augmented tool call when you want the client to handle the sampling as its own task. The request is queued and delivered @@ -592,7 +578,7 @@ async def create_message_as_task( # Wait for initial response (CreateTaskResult from client) response_data = await resolver.wait() create_result = CreateTaskResult.model_validate(response_data) - client_task_id = create_result.task.taskId + client_task_id = create_result.task.task_id # Poll the client's task using session.experimental async for _ in self._session.experimental.poll_task(client_task_id): diff --git a/src/mcp/server/experimental/task_result_handler.py b/src/mcp/server/experimental/task_result_handler.py index 0b869216e8..4d763ef0e6 100644 --- a/src/mcp/server/experimental/task_result_handler.py +++ b/src/mcp/server/experimental/task_result_handler.py @@ -1,5 +1,4 @@ -""" -TaskResultHandler - Integrated handler for tasks/result endpoint. +"""TaskResultHandler - Integrated handler for tasks/result endpoint. This implements the dequeue-send-wait pattern from the MCP Tasks spec: 1. Dequeue all pending messages for the task @@ -27,7 +26,6 @@ ErrorData, GetTaskPayloadRequest, GetTaskPayloadResult, - JSONRPCMessage, RelatedTaskMetadata, RequestId, ) @@ -36,8 +34,7 @@ class TaskResultHandler: - """ - Handler for tasks/result that implements the message queue pattern. + """Handler for tasks/result that implements the message queue pattern. This handler: 1. Dequeues pending messages (elicitations, notifications) for the task @@ -75,8 +72,7 @@ async def send_message( session: ServerSession, message: SessionMessage, ) -> None: - """ - Send a message via the session. + """Send a message via the session. This is a helper for delivering queued task messages. """ @@ -88,8 +84,7 @@ async def handle( session: ServerSession, request_id: RequestId, ) -> GetTaskPayloadResult: - """ - Handle a tasks/result request. + """Handle a tasks/result request. This implements the dequeue-send-wait loop: 1. Dequeue all pending messages @@ -106,17 +101,12 @@ async def handle( Returns: GetTaskPayloadResult with the task's final payload """ - task_id = request.params.taskId + task_id = request.params.task_id while True: task = await self._store.get_task(task_id) if task is None: - raise McpError( - ErrorData( - code=INVALID_PARAMS, - message=f"Task not found: {task_id}", - ) - ) + raise McpError(ErrorData(code=INVALID_PARAMS, message=f"Task not found: {task_id}")) await self._deliver_queued_messages(task_id, session, request_id) @@ -126,7 +116,7 @@ async def handle( # GetTaskPayloadResult is a Result with extra="allow" # The stored result contains the actual payload data # Per spec: tasks/result MUST include _meta with related-task metadata - related_task = RelatedTaskMetadata(taskId=task_id) + related_task = RelatedTaskMetadata(task_id=task_id) related_task_meta: dict[str, Any] = {RELATED_TASK_METADATA_KEY: related_task.model_dump(by_alias=True)} if result is not None: result_data = result.model_dump(by_alias=True) @@ -144,8 +134,7 @@ async def _deliver_queued_messages( session: ServerSession, request_id: RequestId, ) -> None: - """ - Dequeue and send all pending messages for a task. + """Dequeue and send all pending messages for a task. Each message is sent via the session's write stream with relatedRequestId set so responses route back to this stream. @@ -166,14 +155,13 @@ async def _deliver_queued_messages( # Send the message with relatedRequestId for routing session_message = SessionMessage( - message=JSONRPCMessage(message.message), + message=message.message, metadata=ServerMessageMetadata(related_request_id=request_id), ) await self.send_message(session, session_message) async def _wait_for_task_update(self, task_id: str) -> None: - """ - Wait for task to be updated (status change or new message). + """Wait for task to be updated (status change or new message). Races between store update and queue message - first one wins. """ @@ -199,8 +187,7 @@ async def wait_for_queue() -> None: tg.start_soon(wait_for_queue) def route_response(self, request_id: RequestId, response: dict[str, Any]) -> bool: - """ - Route a response back to the waiting resolver. + """Route a response back to the waiting resolver. This is called when a response arrives for a queued request. @@ -218,8 +205,7 @@ def route_response(self, request_id: RequestId, response: dict[str, Any]) -> boo return False def route_error(self, request_id: RequestId, error: ErrorData) -> bool: - """ - Route an error back to the waiting resolver. + """Route an error back to the waiting resolver. Args: request_id: The request ID from the error response diff --git a/src/mcp/server/experimental/task_support.py b/src/mcp/server/experimental/task_support.py index dbb2ed6d2b..23b5d9cc89 100644 --- a/src/mcp/server/experimental/task_support.py +++ b/src/mcp/server/experimental/task_support.py @@ -1,5 +1,4 @@ -""" -TaskSupport - Configuration for experimental task support. +"""TaskSupport - Configuration for experimental task support. This module provides the TaskSupport class which encapsulates all the infrastructure needed for task-augmented requests: store, queue, and handler. @@ -21,8 +20,7 @@ @dataclass class TaskSupport: - """ - Configuration for experimental task support. + """Configuration for experimental task support. Encapsulates the task store, message queue, result handler, and task group for spawning background work. @@ -65,8 +63,7 @@ def task_group(self) -> TaskGroup: @asynccontextmanager async def run(self) -> AsyncIterator[None]: - """ - Run the task support lifecycle. + """Run the task support lifecycle. This creates a task group for spawning background task work. Called automatically by Server.run(). @@ -84,8 +81,7 @@ async def run(self) -> AsyncIterator[None]: self._task_group = None def configure_session(self, session: ServerSession) -> None: - """ - Configure a session for task support. + """Configure a session for task support. This registers the result handler as a response router so that responses to queued requests (elicitation, sampling) are routed @@ -100,8 +96,7 @@ def configure_session(self, session: ServerSession) -> None: @classmethod def in_memory(cls) -> "TaskSupport": - """ - Create in-memory task support. + """Create in-memory task support. Suitable for development, testing, and single-process servers. For distributed systems, provide custom store and queue implementations. diff --git a/src/mcp/server/fastmcp/__init__.py b/src/mcp/server/fastmcp/__init__.py index a89902cfd7..2feecf1e9b 100644 --- a/src/mcp/server/fastmcp/__init__.py +++ b/src/mcp/server/fastmcp/__init__.py @@ -1,11 +1,8 @@ """FastMCP - A more ergonomic interface for MCP servers.""" -from importlib.metadata import version - from mcp.types import Icon from .server import Context, FastMCP from .utilities.types import Audio, Image -__version__ = version("mcp") __all__ = ["FastMCP", "Context", "Image", "Audio", "Icon"] diff --git a/src/mcp/server/fastmcp/resources/base.py b/src/mcp/server/fastmcp/resources/base.py index 557775eab5..b91a0e1203 100644 --- a/src/mcp/server/fastmcp/resources/base.py +++ b/src/mcp/server/fastmcp/resources/base.py @@ -1,14 +1,12 @@ """Base classes and interfaces for FastMCP resources.""" import abc -from typing import Annotated +from typing import Any from pydantic import ( - AnyUrl, BaseModel, ConfigDict, Field, - UrlConstraints, ValidationInfo, field_validator, ) @@ -21,7 +19,7 @@ class Resource(BaseModel, abc.ABC): model_config = ConfigDict(validate_default=True) - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] = Field(default=..., description="URI of the resource") + uri: str = Field(default=..., description="URI of the resource") name: str | None = Field(description="Name of the resource", default=None) title: str | None = Field(description="Human-readable title of the resource", default=None) description: str | None = Field(description="Description of the resource", default=None) @@ -32,6 +30,7 @@ class Resource(BaseModel, abc.ABC): ) icons: list[Icon] | None = Field(default=None, description="Optional list of icons for this resource") annotations: Annotations | None = Field(default=None, description="Optional annotations for the resource") + meta: dict[str, Any] | None = Field(default=None, description="Optional metadata for this resource") @field_validator("name", mode="before") @classmethod diff --git a/src/mcp/server/fastmcp/resources/resource_manager.py b/src/mcp/server/fastmcp/resources/resource_manager.py index 2e7dc171bc..20f67bbe42 100644 --- a/src/mcp/server/fastmcp/resources/resource_manager.py +++ b/src/mcp/server/fastmcp/resources/resource_manager.py @@ -64,6 +64,7 @@ def add_template( mime_type: str | None = None, icons: list[Icon] | None = None, annotations: Annotations | None = None, + meta: dict[str, Any] | None = None, ) -> ResourceTemplate: """Add a template from a function.""" template = ResourceTemplate.from_function( @@ -75,6 +76,7 @@ def add_template( mime_type=mime_type, icons=icons, annotations=annotations, + meta=meta, ) self._templates[template.uri_template] = template return template diff --git a/src/mcp/server/fastmcp/resources/templates.py b/src/mcp/server/fastmcp/resources/templates.py index a98d37f0ac..14e2ca4bc5 100644 --- a/src/mcp/server/fastmcp/resources/templates.py +++ b/src/mcp/server/fastmcp/resources/templates.py @@ -6,6 +6,7 @@ import re from collections.abc import Callable from typing import TYPE_CHECKING, Any +from urllib.parse import unquote from pydantic import BaseModel, Field, validate_call @@ -30,6 +31,7 @@ class ResourceTemplate(BaseModel): mime_type: str = Field(default="text/plain", description="MIME type of the resource content") icons: list[Icon] | None = Field(default=None, description="Optional list of icons for the resource template") annotations: Annotations | None = Field(default=None, description="Optional annotations for the resource template") + meta: dict[str, Any] | None = Field(default=None, description="Optional metadata for this resource template") fn: Callable[..., Any] = Field(exclude=True) parameters: dict[str, Any] = Field(description="JSON schema for function parameters") context_kwarg: str | None = Field(None, description="Name of the kwarg that should receive context") @@ -45,6 +47,7 @@ def from_function( mime_type: str | None = None, icons: list[Icon] | None = None, annotations: Annotations | None = None, + meta: dict[str, Any] | None = None, context_kwarg: str | None = None, ) -> ResourceTemplate: """Create a template from a function.""" @@ -74,18 +77,23 @@ def from_function( mime_type=mime_type or "text/plain", icons=icons, annotations=annotations, + meta=meta, fn=fn, parameters=parameters, context_kwarg=context_kwarg, ) def matches(self, uri: str) -> dict[str, Any] | None: - """Check if URI matches template and extract parameters.""" + """Check if URI matches template and extract parameters. + + Extracted parameters are URL-decoded to handle percent-encoded characters. + """ # Convert template to regex pattern pattern = self.uri_template.replace("{", "(?P<").replace("}", ">[^/]+)") match = re.match(f"^{pattern}$", uri) if match: - return match.groupdict() + # URL-decode all extracted parameter values + return {key: unquote(value) for key, value in match.groupdict().items()} return None async def create_resource( @@ -112,6 +120,7 @@ async def create_resource( mime_type=self.mime_type, icons=self.icons, annotations=self.annotations, + meta=self.meta, fn=lambda: result, # Capture result in closure ) except Exception as e: diff --git a/src/mcp/server/fastmcp/resources/types.py b/src/mcp/server/fastmcp/resources/types.py index 680e72dc09..791442f87e 100644 --- a/src/mcp/server/fastmcp/resources/types.py +++ b/src/mcp/server/fastmcp/resources/types.py @@ -11,7 +11,7 @@ import httpx import pydantic import pydantic_core -from pydantic import AnyUrl, Field, ValidationInfo, validate_call +from pydantic import Field, ValidationInfo, validate_call from mcp.server.fastmcp.resources.base import Resource from mcp.types import Annotations, Icon @@ -83,6 +83,7 @@ def from_function( mime_type: str | None = None, icons: list[Icon] | None = None, annotations: Annotations | None = None, + meta: dict[str, Any] | None = None, ) -> "FunctionResource": """Create a FunctionResource from a function.""" func_name = name or fn.__name__ @@ -93,7 +94,7 @@ def from_function( fn = validate_call(fn) return cls( - uri=AnyUrl(uri), + uri=uri, name=func_name, title=title, description=description or fn.__doc__ or "", @@ -101,6 +102,7 @@ def from_function( fn=fn, icons=icons, annotations=annotations, + meta=meta, ) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index f74b65557f..27295e8bf5 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -1,19 +1,12 @@ """FastMCP - A more ergonomic interface for MCP servers.""" -from __future__ import annotations as _annotations +from __future__ import annotations import inspect import re -from collections.abc import ( - AsyncIterator, - Awaitable, - Callable, - Collection, - Iterable, - Sequence, -) +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence from contextlib import AbstractAsyncContextManager, asynccontextmanager -from typing import Any, Generic, Literal +from typing import Any, Generic, Literal, TypeVar, overload import anyio import pydantic_core @@ -29,25 +22,11 @@ from starlette.types import Receive, Scope, Send from mcp.server.auth.middleware.auth_context import AuthContextMiddleware -from mcp.server.auth.middleware.bearer_auth import ( - BearerAuthBackend, - RequireAuthMiddleware, -) -from mcp.server.auth.provider import ( - OAuthAuthorizationServerProvider, - ProviderTokenVerifier, - TokenVerifier, -) +from mcp.server.auth.middleware.bearer_auth import BearerAuthBackend, RequireAuthMiddleware +from mcp.server.auth.provider import OAuthAuthorizationServerProvider, ProviderTokenVerifier, TokenVerifier from mcp.server.auth.settings import AuthSettings -from mcp.server.elicitation import ( - ElicitationResult, - ElicitSchemaModelT, - UrlElicitationResult, - elicit_with_validation, -) -from mcp.server.elicitation import ( - elicit_url as _elicit_url, -) +from mcp.server.elicitation import ElicitationResult, ElicitSchemaModelT, UrlElicitationResult, elicit_with_validation +from mcp.server.elicitation import elicit_url as _elicit_url from mcp.server.fastmcp.exceptions import ResourceError from mcp.server.fastmcp.prompts import Prompt, PromptManager from mcp.server.fastmcp.resources import FunctionResource, Resource, ResourceManager @@ -65,7 +44,7 @@ from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings from mcp.shared.context import LifespanContextT, RequestContext, RequestT -from mcp.types import Annotations, AnyFunction, ContentBlock, GetPromptResult, Icon, ToolAnnotations +from mcp.types import Annotations, ContentBlock, GetPromptResult, Icon, ToolAnnotations from mcp.types import Prompt as MCPPrompt from mcp.types import PromptArgument as MCPPromptArgument from mcp.types import Resource as MCPResource @@ -74,6 +53,8 @@ logger = get_logger(__name__) +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) + class Settings(BaseSettings, Generic[LifespanResultT]): """FastMCP server settings. @@ -94,19 +75,6 @@ class Settings(BaseSettings, Generic[LifespanResultT]): debug: bool log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] - # HTTP settings - host: str - port: int - mount_path: str - sse_path: str - message_path: str - streamable_http_path: str - - # StreamableHTTP settings - json_response: bool - stateless_http: bool - """Define if the server should create a new transport per request.""" - # resource settings warn_on_duplicate_resources: bool @@ -116,18 +84,11 @@ class Settings(BaseSettings, Generic[LifespanResultT]): # prompt settings warn_on_duplicate_prompts: bool - # TODO(Marcelo): Investigate if this is used. If it is, it's probably a good idea to remove it. - dependencies: list[str] - """A list of dependencies to install in the server environment.""" - lifespan: Callable[[FastMCP[LifespanResultT]], AbstractAsyncContextManager[LifespanResultT]] | None """A async context manager that will be called when the server is started.""" auth: AuthSettings | None - # Transport security settings (DNS rebinding protection) - transport_security: TransportSecuritySettings | None - def lifespan_wrapper( app: FastMCP[LifespanResultT], @@ -144,69 +105,45 @@ async def wrap( class FastMCP(Generic[LifespanResultT]): - def __init__( # noqa: PLR0913 + def __init__( self, name: str | None = None, + title: str | None = None, + description: str | None = None, instructions: str | None = None, website_url: str | None = None, icons: list[Icon] | None = None, - auth_server_provider: (OAuthAuthorizationServerProvider[Any, Any, Any] | None) = None, + version: str | None = None, + auth_server_provider: OAuthAuthorizationServerProvider[Any, Any, Any] | None = None, token_verifier: TokenVerifier | None = None, - event_store: EventStore | None = None, - retry_interval: int | None = None, *, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", - host: str = "127.0.0.1", - port: int = 8000, - mount_path: str = "/", - sse_path: str = "/sse", - message_path: str = "/messages/", - streamable_http_path: str = "/mcp", - json_response: bool = False, - stateless_http: bool = False, warn_on_duplicate_resources: bool = True, warn_on_duplicate_tools: bool = True, warn_on_duplicate_prompts: bool = True, - dependencies: Collection[str] = (), - lifespan: (Callable[[FastMCP[LifespanResultT]], AbstractAsyncContextManager[LifespanResultT]] | None) = None, + lifespan: Callable[[FastMCP[LifespanResultT]], AbstractAsyncContextManager[LifespanResultT]] | None = None, auth: AuthSettings | None = None, - transport_security: TransportSecuritySettings | None = None, ): - # Auto-enable DNS rebinding protection for localhost (IPv4 and IPv6) - if transport_security is None and host in ("127.0.0.1", "localhost", "::1"): - transport_security = TransportSecuritySettings( - enable_dns_rebinding_protection=True, - allowed_hosts=["127.0.0.1:*", "localhost:*", "[::1]:*"], - allowed_origins=["http://127.0.0.1:*", "http://localhost:*", "http://[::1]:*"], - ) - self.settings = Settings( debug=debug, log_level=log_level, - host=host, - port=port, - mount_path=mount_path, - sse_path=sse_path, - message_path=message_path, - streamable_http_path=streamable_http_path, - json_response=json_response, - stateless_http=stateless_http, warn_on_duplicate_resources=warn_on_duplicate_resources, warn_on_duplicate_tools=warn_on_duplicate_tools, warn_on_duplicate_prompts=warn_on_duplicate_prompts, - dependencies=list(dependencies), lifespan=lifespan, auth=auth, - transport_security=transport_security, ) self._mcp_server = MCPServer( name=name or "FastMCP", + title=title, + description=description, instructions=instructions, website_url=website_url, icons=icons, + version=version, # TODO(Marcelo): It seems there's a type mismatch between the lifespan type from an FastMCP and Server. # We need to create a Lifespan type that is a generic on the server type, like Starlette does. lifespan=(lifespan_wrapper(self, self.settings.lifespan) if self.settings.lifespan else default_lifespan), # type: ignore @@ -229,11 +166,7 @@ def __init__( # noqa: PLR0913 # Create token verifier from provider if needed (backwards compatibility) if auth_server_provider and not token_verifier: # pragma: no cover self._token_verifier = ProviderTokenVerifier(auth_server_provider) - self._event_store = event_store - self._retry_interval = retry_interval self._custom_starlette_routes: list[Route] = [] - self.dependencies = self.settings.dependencies - self._session_manager: StreamableHTTPSessionManager | None = None # Set up MCP protocol handlers self._setup_handlers() @@ -245,6 +178,14 @@ def __init__( # noqa: PLR0913 def name(self) -> str: return self._mcp_server.name + @property + def title(self) -> str | None: + return self._mcp_server.title + + @property + def description(self) -> str | None: + return self._mcp_server.description + @property def instructions(self) -> str | None: return self._mcp_server.instructions @@ -257,6 +198,10 @@ def website_url(self) -> str | None: def icons(self) -> list[Icon] | None: return self._mcp_server.icons + @property + def version(self) -> str | None: + return self._mcp_server.version + @property def session_manager(self) -> StreamableHTTPSessionManager: """Get the StreamableHTTP session manager. @@ -267,25 +212,48 @@ def session_manager(self) -> StreamableHTTPSessionManager: Raises: RuntimeError: If called before streamable_http_app() has been called. """ - if self._session_manager is None: # pragma: no cover - raise RuntimeError( - "Session manager can only be accessed after" - "calling streamable_http_app()." - "The session manager is created lazily" - "to avoid unnecessary initialization." - ) - return self._session_manager # pragma: no cover + return self._mcp_server.session_manager # pragma: no cover + + @overload + def run(self, transport: Literal["stdio"] = ...) -> None: ... + + @overload + def run( + self, + transport: Literal["sse"], + *, + host: str = ..., + port: int = ..., + sse_path: str = ..., + message_path: str = ..., + transport_security: TransportSecuritySettings | None = ..., + ) -> None: ... + + @overload + def run( + self, + transport: Literal["streamable-http"], + *, + host: str = ..., + port: int = ..., + streamable_http_path: str = ..., + json_response: bool = ..., + stateless_http: bool = ..., + event_store: EventStore | None = ..., + retry_interval: int | None = ..., + transport_security: TransportSecuritySettings | None = ..., + ) -> None: ... def run( self, transport: Literal["stdio", "sse", "streamable-http"] = "stdio", - mount_path: str | None = None, + **kwargs: Any, ) -> None: """Run the FastMCP server. Note this is a synchronous function. Args: transport: Transport protocol to use ("stdio", "sse", or "streamable-http") - mount_path: Optional mount path for SSE transport + **kwargs: Transport-specific options (see overloads for details) """ TRANSPORTS = Literal["stdio", "sse", "streamable-http"] if transport not in TRANSPORTS.__args__: # type: ignore # pragma: no cover @@ -295,9 +263,9 @@ def run( case "stdio": anyio.run(self.run_stdio_async) case "sse": # pragma: no cover - anyio.run(lambda: self.run_sse_async(mount_path)) + anyio.run(lambda: self.run_sse_async(**kwargs)) case "streamable-http": # pragma: no cover - anyio.run(self.run_streamable_http_async) + anyio.run(lambda: self.run_streamable_http_async(**kwargs)) def _setup_handlers(self) -> None: """Set up core MCP protocol handlers.""" @@ -320,8 +288,8 @@ async def list_tools(self) -> list[MCPTool]: name=info.name, title=info.title, description=info.description, - inputSchema=info.parameters, - outputSchema=info.output_schema, + input_schema=info.parameters, + output_schema=info.output_schema, annotations=info.annotations, icons=info.icons, _meta=info.meta, @@ -330,8 +298,7 @@ async def list_tools(self) -> list[MCPTool]: ] def get_context(self) -> Context[ServerSession, LifespanResultT, Request]: - """ - Returns a Context object. Note that the context will only be valid + """Returns a Context object. Note that the context will only be valid during a request; outside a request, most methods will error. """ try: @@ -355,9 +322,10 @@ async def list_resources(self) -> list[MCPResource]: name=resource.name or "", title=resource.title, description=resource.description, - mimeType=resource.mime_type, + mime_type=resource.mime_type, icons=resource.icons, annotations=resource.annotations, + _meta=resource.meta, ) for resource in resources ] @@ -366,13 +334,14 @@ async def list_resource_templates(self) -> list[MCPResourceTemplate]: templates = self._resource_manager.list_templates() return [ MCPResourceTemplate( - uriTemplate=template.uri_template, + uri_template=template.uri_template, name=template.name, title=template.title, description=template.description, - mimeType=template.mime_type, + mime_type=template.mime_type, icons=template.icons, annotations=template.annotations, + _meta=template.meta, ) for template in templates ] @@ -387,14 +356,14 @@ async def read_resource(self, uri: AnyUrl | str) -> Iterable[ReadResourceContent try: content = await resource.read() - return [ReadResourceContents(content=content, mime_type=resource.mime_type)] + return [ReadResourceContents(content=content, mime_type=resource.mime_type, meta=resource.meta)] except Exception as e: # pragma: no cover logger.exception(f"Error reading resource {uri}") raise ResourceError(str(e)) def add_tool( self, - fn: AnyFunction, + fn: Callable[..., Any], name: str | None = None, title: str | None = None, description: str | None = None, @@ -450,7 +419,7 @@ def tool( icons: list[Icon] | None = None, meta: dict[str, Any] | None = None, structured_output: bool | None = None, - ) -> Callable[[AnyFunction], AnyFunction]: + ) -> Callable[[_CallableT], _CallableT]: """Decorator to register a tool. Tools can optionally request a Context object by adding a parameter with the @@ -488,7 +457,7 @@ async def async_tool(x: int, context: Context) -> str: "The @tool decorator was used incorrectly. Did you forget to call it? Use @tool() instead of @tool" ) - def decorator(fn: AnyFunction) -> AnyFunction: + def decorator(fn: _CallableT) -> _CallableT: self.add_tool( fn, name=name, @@ -539,7 +508,8 @@ def resource( mime_type: str | None = None, icons: list[Icon] | None = None, annotations: Annotations | None = None, - ) -> Callable[[AnyFunction], AnyFunction]: + meta: dict[str, Any] | None = None, + ) -> Callable[[_CallableT], _CallableT]: """Decorator to register a function as a resource. The function will be called when the resource is read to generate its content. @@ -557,6 +527,7 @@ def resource( title: Optional human-readable title for the resource description: Optional description of the resource mime_type: Optional MIME type for the resource + meta: Optional metadata dictionary for the resource Example: @server.resource("resource://my-resource") @@ -584,7 +555,7 @@ async def get_weather(city: str) -> str: "Did you forget to call it? Use @resource('uri') instead of @resource" ) - def decorator(fn: AnyFunction) -> AnyFunction: + def decorator(fn: _CallableT) -> _CallableT: # Check if this should be a template sig = inspect.signature(fn) has_uri_params = "{" in uri and "}" in uri @@ -615,6 +586,7 @@ def decorator(fn: AnyFunction) -> AnyFunction: mime_type=mime_type, icons=icons, annotations=annotations, + meta=meta, ) else: # Register as regular resource @@ -627,6 +599,7 @@ def decorator(fn: AnyFunction) -> AnyFunction: mime_type=mime_type, icons=icons, annotations=annotations, + meta=meta, ) self.add_resource(resource) return fn @@ -647,7 +620,7 @@ def prompt( title: str | None = None, description: str | None = None, icons: list[Icon] | None = None, - ) -> Callable[[AnyFunction], AnyFunction]: + ) -> Callable[[_CallableT], _CallableT]: """Decorator to register a prompt. Args: @@ -689,7 +662,7 @@ async def analyze_file(path: str) -> list[Message]: "Did you forget to call it? Use @prompt() instead of @prompt" ) - def decorator(func: AnyFunction) -> AnyFunction: + def decorator(func: _CallableT) -> _CallableT: prompt = Prompt.from_function(func, name=name, title=title, description=description, icons=icons) self.add_prompt(prompt) return func @@ -703,8 +676,7 @@ def custom_route( name: str | None = None, include_in_schema: bool = True, ): - """ - Decorator to register a custom HTTP route on the FastMCP server. + """Decorator to register a custom HTTP route on the FastMCP server. Allows adding arbitrary HTTP endpoints outside the standard MCP protocol, which can be useful for OAuth callbacks, health checks, or admin APIs. @@ -753,100 +725,98 @@ async def run_stdio_async(self) -> None: self._mcp_server.create_initialization_options(), ) - async def run_sse_async(self, mount_path: str | None = None) -> None: # pragma: no cover + async def run_sse_async( # pragma: no cover + self, + *, + host: str = "127.0.0.1", + port: int = 8000, + sse_path: str = "/sse", + message_path: str = "/messages/", + transport_security: TransportSecuritySettings | None = None, + ) -> None: """Run the server using SSE transport.""" import uvicorn - starlette_app = self.sse_app(mount_path) + starlette_app = self.sse_app( + sse_path=sse_path, + message_path=message_path, + transport_security=transport_security, + host=host, + ) config = uvicorn.Config( starlette_app, - host=self.settings.host, - port=self.settings.port, + host=host, + port=port, log_level=self.settings.log_level.lower(), ) server = uvicorn.Server(config) await server.serve() - async def run_streamable_http_async(self) -> None: # pragma: no cover + async def run_streamable_http_async( # pragma: no cover + self, + *, + host: str = "127.0.0.1", + port: int = 8000, + streamable_http_path: str = "/mcp", + json_response: bool = False, + stateless_http: bool = False, + event_store: EventStore | None = None, + retry_interval: int | None = None, + transport_security: TransportSecuritySettings | None = None, + ) -> None: """Run the server using StreamableHTTP transport.""" import uvicorn - starlette_app = self.streamable_http_app() + starlette_app = self.streamable_http_app( + streamable_http_path=streamable_http_path, + json_response=json_response, + stateless_http=stateless_http, + event_store=event_store, + retry_interval=retry_interval, + transport_security=transport_security, + host=host, + ) config = uvicorn.Config( starlette_app, - host=self.settings.host, - port=self.settings.port, + host=host, + port=port, log_level=self.settings.log_level.lower(), ) server = uvicorn.Server(config) await server.serve() - def _normalize_path(self, mount_path: str, endpoint: str) -> str: - """ - Combine mount path and endpoint to return a normalized path. - - Args: - mount_path: The mount path (e.g. "/github" or "/") - endpoint: The endpoint path (e.g. "/messages/") - - Returns: - Normalized path (e.g. "/github/messages/") - """ - # Special case: root path - if mount_path == "/": - return endpoint - - # Remove trailing slash from mount path - if mount_path.endswith("/"): - mount_path = mount_path[:-1] - - # Ensure endpoint starts with slash - if not endpoint.startswith("/"): - endpoint = "/" + endpoint - - # Combine paths - return mount_path + endpoint - - def sse_app(self, mount_path: str | None = None) -> Starlette: + def sse_app( + self, + *, + sse_path: str = "/sse", + message_path: str = "/messages/", + transport_security: TransportSecuritySettings | None = None, + host: str = "127.0.0.1", + ) -> Starlette: """Return an instance of the SSE server app.""" - from starlette.middleware import Middleware - from starlette.routing import Mount, Route - - # Update mount_path in settings if provided - if mount_path is not None: - self.settings.mount_path = mount_path - - # Create normalized endpoint considering the mount path - normalized_message_endpoint = self._normalize_path(self.settings.mount_path, self.settings.message_path) - - # Set up auth context and dependencies + # Auto-enable DNS rebinding protection for localhost (IPv4 and IPv6) + if transport_security is None and host in ("127.0.0.1", "localhost", "::1"): + transport_security = TransportSecuritySettings( + enable_dns_rebinding_protection=True, + allowed_hosts=["127.0.0.1:*", "localhost:*", "[::1]:*"], + allowed_origins=["http://127.0.0.1:*", "http://localhost:*", "http://[::1]:*"], + ) - sse = SseServerTransport( - normalized_message_endpoint, - security_settings=self.settings.transport_security, - ) + sse = SseServerTransport(message_path, security_settings=transport_security) async def handle_sse(scope: Scope, receive: Receive, send: Send): # pragma: no cover # Add client ID from auth context into request context if available - async with sse.connect_sse( - scope, - receive, - send, - ) as streams: - await self._mcp_server.run( - streams[0], - streams[1], - self._mcp_server.create_initialization_options(), - ) + async with sse.connect_sse(scope, receive, send) as streams: + await self._mcp_server.run(streams[0], streams[1], self._mcp_server.create_initialization_options()) return Response() # Create routes routes: list[Route | Mount] = [] middleware: list[Middleware] = [] - required_scopes = [] + required_scopes: list[str] = [] # Set up auth if configured if self.settings.auth: # pragma: no cover @@ -892,14 +862,14 @@ async def handle_sse(scope: Scope, receive: Receive, send: Send): # pragma: no # Auth is enabled, wrap the endpoints with RequireAuthMiddleware routes.append( Route( - self.settings.sse_path, + sse_path, endpoint=RequireAuthMiddleware(handle_sse, required_scopes, resource_metadata_url), methods=["GET"], ) ) routes.append( Mount( - self.settings.message_path, + message_path, app=RequireAuthMiddleware(sse.handle_post_message, required_scopes, resource_metadata_url), ) ) @@ -912,14 +882,14 @@ async def sse_endpoint(request: Request) -> Response: routes.append( Route( - self.settings.sse_path, + sse_path, endpoint=sse_endpoint, methods=["GET"], ) ) routes.append( Mount( - self.settings.message_path, + message_path, app=sse.handle_post_message, ) ) @@ -941,101 +911,31 @@ async def sse_endpoint(request: Request) -> Response: # Create Starlette app with routes and middleware return Starlette(debug=self.settings.debug, routes=routes, middleware=middleware) - def streamable_http_app(self) -> Starlette: + def streamable_http_app( + self, + *, + streamable_http_path: str = "/mcp", + json_response: bool = False, + stateless_http: bool = False, + event_store: EventStore | None = None, + retry_interval: int | None = None, + transport_security: TransportSecuritySettings | None = None, + host: str = "127.0.0.1", + ) -> Starlette: """Return an instance of the StreamableHTTP server app.""" - from starlette.middleware import Middleware - - # Create session manager on first call (lazy initialization) - if self._session_manager is None: # pragma: no branch - self._session_manager = StreamableHTTPSessionManager( - app=self._mcp_server, - event_store=self._event_store, - retry_interval=self._retry_interval, - json_response=self.settings.json_response, - stateless=self.settings.stateless_http, # Use the stateless setting - security_settings=self.settings.transport_security, - ) - - # Create the ASGI handler - streamable_http_app = StreamableHTTPASGIApp(self._session_manager) - - # Create routes - routes: list[Route | Mount] = [] - middleware: list[Middleware] = [] - required_scopes = [] - - # Set up auth if configured - if self.settings.auth: # pragma: no cover - required_scopes = self.settings.auth.required_scopes or [] - - # Add auth middleware if token verifier is available - if self._token_verifier: - middleware = [ - Middleware( - AuthenticationMiddleware, - backend=BearerAuthBackend(self._token_verifier), - ), - Middleware(AuthContextMiddleware), - ] - - # Add auth endpoints if auth server provider is configured - if self._auth_server_provider: - from mcp.server.auth.routes import create_auth_routes - - routes.extend( - create_auth_routes( - provider=self._auth_server_provider, - issuer_url=self.settings.auth.issuer_url, - service_documentation_url=self.settings.auth.service_documentation_url, - client_registration_options=self.settings.auth.client_registration_options, - revocation_options=self.settings.auth.revocation_options, - ) - ) - - # Set up routes with or without auth - if self._token_verifier: # pragma: no cover - # Determine resource metadata URL - resource_metadata_url = None - if self.settings.auth and self.settings.auth.resource_server_url: - from mcp.server.auth.routes import build_resource_metadata_url - - # Build compliant metadata URL for WWW-Authenticate header - resource_metadata_url = build_resource_metadata_url(self.settings.auth.resource_server_url) - - routes.append( - Route( - self.settings.streamable_http_path, - endpoint=RequireAuthMiddleware(streamable_http_app, required_scopes, resource_metadata_url), - ) - ) - else: - # Auth is disabled, no wrapper needed - routes.append( - Route( - self.settings.streamable_http_path, - endpoint=streamable_http_app, - ) - ) - - # Add protected resource metadata endpoint if configured as RS - if self.settings.auth and self.settings.auth.resource_server_url: # pragma: no cover - from mcp.server.auth.routes import create_protected_resource_routes - - routes.extend( - create_protected_resource_routes( - resource_url=self.settings.auth.resource_server_url, - authorization_servers=[self.settings.auth.issuer_url], - scopes_supported=self.settings.auth.required_scopes, - ) - ) - - routes.extend(self._custom_starlette_routes) - - return Starlette( + return self._mcp_server.streamable_http_app( + streamable_http_path=streamable_http_path, + json_response=json_response, + stateless_http=stateless_http, + event_store=event_store, + retry_interval=retry_interval, + transport_security=transport_security, + host=host, + auth=self.settings.auth, + token_verifier=self._token_verifier, + auth_server_provider=self._auth_server_provider, + custom_starlette_routes=self._custom_starlette_routes, debug=self.settings.debug, - routes=routes, - middleware=middleware, - lifespan=lambda app: self.session_manager.run(), ) async def list_prompts(self) -> list[MCPPrompt]: @@ -1077,18 +977,6 @@ async def get_prompt(self, name: str, arguments: dict[str, Any] | None = None) - raise ValueError(str(e)) -class StreamableHTTPASGIApp: - """ - ASGI application for Streamable HTTP server transport. - """ - - def __init__(self, session_manager: StreamableHTTPSessionManager): - self.session_manager = session_manager - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: # pragma: no cover - await self.session_manager.handle_request(scope, receive, send) - - class Context(BaseModel, Generic[ServerSessionT, LifespanContextT, RequestT]): """Context object providing access to MCP capabilities. @@ -1161,7 +1049,7 @@ async def report_progress(self, progress: float, total: float | None = None, mes total: Optional total value e.g. 100 message: Optional message e.g. Starting render... """ - progress_token = self.request_context.meta.progressToken if self.request_context.meta else None + progress_token = self.request_context.meta.progress_token if self.request_context.meta else None if progress_token is None: # pragma: no cover return @@ -1200,7 +1088,7 @@ async def elicit( Args: schema: A Pydantic model class defining the expected response structure, according to the specification, - only primive types are allowed. + only primitive types are allowed. message: Optional message to present to the user. If not provided, will use a default message based on the schema @@ -1260,6 +1148,7 @@ async def log( message: str, *, logger_name: str | None = None, + extra: dict[str, Any] | None = None, ) -> None: """Send a log message to the client. @@ -1267,11 +1156,17 @@ async def log( level: Log level (debug, info, warning, error) message: Log message logger_name: Optional logger name - **extra: Additional structured data to include + extra: Optional dictionary with additional structured data to include """ + + if extra: + log_data = {"message": message, **extra} + else: + log_data = message + await self.request_context.session.send_log_message( level=level, - data=message, + data=log_data, logger=logger_name, related_request_id=self.request_id, ) @@ -1326,18 +1221,20 @@ async def close_standalone_sse_stream(self) -> None: await self._request_context.close_standalone_sse_stream() # Convenience methods for common log levels - async def debug(self, message: str, **extra: Any) -> None: + async def debug(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: """Send a debug log message.""" - await self.log("debug", message, **extra) + await self.log("debug", message, logger_name=logger_name, extra=extra) - async def info(self, message: str, **extra: Any) -> None: + async def info(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: """Send an info log message.""" - await self.log("info", message, **extra) + await self.log("info", message, logger_name=logger_name, extra=extra) - async def warning(self, message: str, **extra: Any) -> None: + async def warning( + self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None + ) -> None: """Send a warning log message.""" - await self.log("warning", message, **extra) + await self.log("warning", message, logger_name=logger_name, extra=extra) - async def error(self, message: str, **extra: Any) -> None: + async def error(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: """Send an error log message.""" - await self.log("error", message, **extra) + await self.log("error", message, logger_name=logger_name, extra=extra) diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 1ae6d90d19..b784d0f53d 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -1,4 +1,4 @@ -from __future__ import annotations as _annotations +from __future__ import annotations import functools import inspect diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index 095753de69..0d3d9d52a4 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -1,4 +1,4 @@ -from __future__ import annotations as _annotations +from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING, Any diff --git a/src/mcp/server/fastmcp/utilities/context_injection.py b/src/mcp/server/fastmcp/utilities/context_injection.py index 66d0cbaa0c..f1aeda39ec 100644 --- a/src/mcp/server/fastmcp/utilities/context_injection.py +++ b/src/mcp/server/fastmcp/utilities/context_injection.py @@ -25,7 +25,8 @@ def find_context_parameter(fn: Callable[..., Any]) -> str | None: # Get type hints to properly resolve string annotations try: hints = typing.get_type_hints(fn) - except Exception: + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + except Exception: # pragma: no cover # If we can't resolve type hints, we can't find the context parameter return None diff --git a/src/mcp/server/fastmcp/utilities/func_metadata.py b/src/mcp/server/fastmcp/utilities/func_metadata.py index fa443d2fcb..95b3a3274b 100644 --- a/src/mcp/server/fastmcp/utilities/func_metadata.py +++ b/src/mcp/server/fastmcp/utilities/func_metadata.py @@ -1,3 +1,4 @@ +import functools import inspect import json from collections.abc import Awaitable, Callable, Sequence @@ -5,15 +6,10 @@ from types import GenericAlias from typing import Annotated, Any, cast, get_args, get_origin, get_type_hints +import anyio +import anyio.to_thread import pydantic_core -from pydantic import ( - BaseModel, - ConfigDict, - Field, - RootModel, - WithJsonSchema, - create_model, -) +from pydantic import BaseModel, ConfigDict, Field, WithJsonSchema, create_model from pydantic.fields import FieldInfo from pydantic.json_schema import GenerateJsonSchema, JsonSchemaWarningKind from typing_extensions import is_typeddict @@ -60,9 +56,7 @@ def model_dump_one_level(self) -> dict[str, Any]: kwargs[output_name] = value return kwargs - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) + model_config = ConfigDict(arbitrary_types_allowed=True) class FuncMetadata(BaseModel): @@ -92,11 +86,10 @@ async def call_fn_with_arg_validation( if fn_is_async: return await fn(**arguments_parsed_dict) else: - return fn(**arguments_parsed_dict) + return await anyio.to_thread.run_sync(functools.partial(fn, **arguments_parsed_dict)) def convert_result(self, result: Any) -> Any: - """ - Convert the result of a function call to the appropriate format for + """Convert the result of a function call to the appropriate format for the lowlevel server tool call handler: - If output_model is None, return the unstructured content directly. @@ -113,7 +106,7 @@ def convert_result(self, result: Any) -> Any: if isinstance(result, CallToolResult): if self.output_schema is not None: assert self.output_model is not None, "Output model must be set if output schema is defined" - self.output_model.model_validate(result.structuredContent) + self.output_model.model_validate(result.structured_content) return result unstructured_content = _convert_to_content(result) @@ -485,6 +478,8 @@ def _create_wrapped_model(func_name: str, annotation: Any) -> type[BaseModel]: def _create_dict_model(func_name: str, dict_annotation: Any) -> type[BaseModel]: """Create a RootModel for dict[str, T] types.""" + # TODO(Marcelo): We should not rely on RootModel for this. + from pydantic import RootModel # noqa: TID251 class DictModel(RootModel[dict_annotation]): pass @@ -496,11 +491,8 @@ class DictModel(RootModel[dict_annotation]): return DictModel -def _convert_to_content( - result: Any, -) -> Sequence[ContentBlock]: - """ - Convert a result to a sequence of content objects. +def _convert_to_content(result: Any) -> Sequence[ContentBlock]: + """Convert a result to a sequence of content objects. Note: This conversion logic comes from previous versions of FastMCP and is being retained for purposes of backwards compatibility. It produces different unstructured diff --git a/src/mcp/server/fastmcp/utilities/logging.py b/src/mcp/server/fastmcp/utilities/logging.py index 4b47d3b882..2da0cab32c 100644 --- a/src/mcp/server/fastmcp/utilities/logging.py +++ b/src/mcp/server/fastmcp/utilities/logging.py @@ -36,8 +36,4 @@ def configure_logging( if not handlers: # pragma: no cover handlers.append(logging.StreamHandler()) - logging.basicConfig( - level=level, - format="%(message)s", - handlers=handlers, - ) + logging.basicConfig(level=level, format="%(message)s", handlers=handlers) diff --git a/src/mcp/server/fastmcp/utilities/types.py b/src/mcp/server/fastmcp/utilities/types.py index d6928ca3f8..a1445de196 100644 --- a/src/mcp/server/fastmcp/utilities/types.py +++ b/src/mcp/server/fastmcp/utilities/types.py @@ -51,7 +51,7 @@ def to_image_content(self) -> ImageContent: else: # pragma: no cover raise ValueError("No image data available") - return ImageContent(type="image", data=data, mimeType=self._mime_type) + return ImageContent(type="image", data=data, mime_type=self._mime_type) class Audio: @@ -98,4 +98,4 @@ def to_audio_content(self) -> AudioContent: else: # pragma: no cover raise ValueError("No audio data available") - return AudioContent(type="audio", data=data, mimeType=self._mime_type) + return AudioContent(type="audio", data=data, mime_type=self._mime_type) diff --git a/src/mcp/server/lowlevel/experimental.py b/src/mcp/server/lowlevel/experimental.py index 0e6655b3de..49387daad7 100644 --- a/src/mcp/server/lowlevel/experimental.py +++ b/src/mcp/server/lowlevel/experimental.py @@ -31,6 +31,7 @@ ServerResult, ServerTasksCapability, ServerTasksRequestsCapability, + TasksCallCapability, TasksCancelCapability, TasksListCapability, TasksToolsCapability, @@ -79,7 +80,7 @@ def update_capabilities(self, capabilities: ServerCapabilities) -> None: capabilities.tasks.cancel = TasksCancelCapability() capabilities.tasks.requests = ServerTasksRequestsCapability( - tools=TasksToolsCapability() + tools=TasksToolsCapability(call=TasksCallCapability()) ) # assuming always supported for now def enable_tasks( @@ -87,8 +88,7 @@ def enable_tasks( store: TaskStore | None = None, queue: TaskMessageQueue | None = None, ) -> TaskSupport: - """ - Enable experimental task support. + """Enable experimental task support. This sets up the task infrastructure and auto-registers default handlers for tasks/get, tasks/result, tasks/list, and tasks/cancel. @@ -133,24 +133,22 @@ def _register_default_task_handlers(self) -> None: if GetTaskRequest not in self._request_handlers: async def _default_get_task(req: GetTaskRequest) -> ServerResult: - task = await support.store.get_task(req.params.taskId) + task = await support.store.get_task(req.params.task_id) if task is None: raise McpError( ErrorData( code=INVALID_PARAMS, - message=f"Task not found: {req.params.taskId}", + message=f"Task not found: {req.params.task_id}", ) ) - return ServerResult( - GetTaskResult( - taskId=task.taskId, - status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, - ttl=task.ttl, - pollInterval=task.pollInterval, - ) + return GetTaskResult( + task_id=task.task_id, + status=task.status, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, + ttl=task.ttl, + poll_interval=task.poll_interval, ) self._request_handlers[GetTaskRequest] = _default_get_task @@ -158,29 +156,29 @@ async def _default_get_task(req: GetTaskRequest) -> ServerResult: # Register get_task_result handler if not already registered if GetTaskPayloadRequest not in self._request_handlers: - async def _default_get_task_result(req: GetTaskPayloadRequest) -> ServerResult: + async def _default_get_task_result(req: GetTaskPayloadRequest) -> GetTaskPayloadResult: ctx = self._server.request_context result = await support.handler.handle(req, ctx.session, ctx.request_id) - return ServerResult(result) + return result self._request_handlers[GetTaskPayloadRequest] = _default_get_task_result # Register list_tasks handler if not already registered if ListTasksRequest not in self._request_handlers: - async def _default_list_tasks(req: ListTasksRequest) -> ServerResult: + async def _default_list_tasks(req: ListTasksRequest) -> ListTasksResult: cursor = req.params.cursor if req.params else None tasks, next_cursor = await support.store.list_tasks(cursor) - return ServerResult(ListTasksResult(tasks=tasks, nextCursor=next_cursor)) + return ListTasksResult(tasks=tasks, next_cursor=next_cursor) self._request_handlers[ListTasksRequest] = _default_list_tasks # Register cancel_task handler if not already registered if CancelTaskRequest not in self._request_handlers: - async def _default_cancel_task(req: CancelTaskRequest) -> ServerResult: - result = await cancel_task(support.store, req.params.taskId) - return ServerResult(result) + async def _default_cancel_task(req: CancelTaskRequest) -> CancelTaskResult: + result = await cancel_task(support.store, req.params.task_id) + return result self._request_handlers[CancelTaskRequest] = _default_cancel_task @@ -201,9 +199,9 @@ def decorator( logger.debug("Registering handler for ListTasksRequest") wrapper = create_call_wrapper(func, ListTasksRequest) - async def handler(req: ListTasksRequest) -> ServerResult: + async def handler(req: ListTasksRequest) -> ListTasksResult: result = await wrapper(req) - return ServerResult(result) + return result self._request_handlers[ListTasksRequest] = handler return func @@ -226,9 +224,9 @@ def decorator( logger.debug("Registering handler for GetTaskRequest") wrapper = create_call_wrapper(func, GetTaskRequest) - async def handler(req: GetTaskRequest) -> ServerResult: + async def handler(req: GetTaskRequest) -> GetTaskResult: result = await wrapper(req) - return ServerResult(result) + return result self._request_handlers[GetTaskRequest] = handler return func @@ -252,9 +250,9 @@ def decorator( logger.debug("Registering handler for GetTaskPayloadRequest") wrapper = create_call_wrapper(func, GetTaskPayloadRequest) - async def handler(req: GetTaskPayloadRequest) -> ServerResult: + async def handler(req: GetTaskPayloadRequest) -> GetTaskPayloadResult: result = await wrapper(req) - return ServerResult(result) + return result self._request_handlers[GetTaskPayloadRequest] = handler return func @@ -278,9 +276,9 @@ def decorator( logger.debug("Registering handler for CancelTaskRequest") wrapper = create_call_wrapper(func, CancelTaskRequest) - async def handler(req: CancelTaskRequest) -> ServerResult: + async def handler(req: CancelTaskRequest) -> CancelTaskResult: result = await wrapper(req) - return ServerResult(result) + return result self._request_handlers[CancelTaskRequest] = handler return func diff --git a/src/mcp/server/lowlevel/func_inspection.py b/src/mcp/server/lowlevel/func_inspection.py index 6231aa8954..d176970902 100644 --- a/src/mcp/server/lowlevel/func_inspection.py +++ b/src/mcp/server/lowlevel/func_inspection.py @@ -7,8 +7,7 @@ def create_call_wrapper(func: Callable[..., R], request_type: type[T]) -> Callable[[T], R]: - """ - Create a wrapper function that knows how to call func with the request object. + """Create a wrapper function that knows how to call func with the request object. Returns a wrapper function that takes the request and calls func appropriately. diff --git a/src/mcp/server/lowlevel/helper_types.py b/src/mcp/server/lowlevel/helper_types.py index 3d09b25056..fecc716db6 100644 --- a/src/mcp/server/lowlevel/helper_types.py +++ b/src/mcp/server/lowlevel/helper_types.py @@ -1,4 +1,5 @@ from dataclasses import dataclass +from typing import Any @dataclass @@ -7,3 +8,4 @@ class ReadResourceContents: content: str | bytes mime_type: str | None = None + meta: dict[str, Any] | None = None diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 3fc2d497d1..6bea4126ff 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -1,5 +1,4 @@ -""" -MCP Server Module +"""MCP Server Module This module provides a framework for creating an MCP (Model Context Protocol) server. It allows you to easily define and handle various types of requests and notifications @@ -65,7 +64,7 @@ async def main(): messages from the client. """ -from __future__ import annotations as _annotations +from __future__ import annotations import base64 import contextvars @@ -74,21 +73,33 @@ async def main(): import warnings from collections.abc import AsyncIterator, Awaitable, Callable, Iterable from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager +from importlib.metadata import version as importlib_version from typing import Any, Generic, TypeAlias, cast import anyio import jsonschema from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from pydantic import AnyUrl +from starlette.applications import Starlette +from starlette.middleware import Middleware +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.routing import Mount, Route from typing_extensions import TypeVar import mcp.types as types +from mcp.server.auth.middleware.auth_context import AuthContextMiddleware +from mcp.server.auth.middleware.bearer_auth import BearerAuthBackend, RequireAuthMiddleware +from mcp.server.auth.provider import OAuthAuthorizationServerProvider, TokenVerifier +from mcp.server.auth.routes import build_resource_metadata_url, create_auth_routes, create_protected_resource_routes +from mcp.server.auth.settings import AuthSettings from mcp.server.experimental.request_context import Experimental from mcp.server.lowlevel.experimental import ExperimentalHandlers from mcp.server.lowlevel.func_inspection import create_call_wrapper from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession +from mcp.server.streamable_http import EventStore +from mcp.server.streamable_http_manager import StreamableHTTPASGIApp, StreamableHTTPSessionManager +from mcp.server.transport_security import TransportSecuritySettings from mcp.shared.context import RequestContext from mcp.shared.exceptions import McpError, UrlElicitationRequiredError from mcp.shared.message import ServerMessageMetadata, SessionMessage @@ -139,6 +150,8 @@ def __init__( self, name: str, version: str | None = None, + title: str | None = None, + description: str | None = None, instructions: str | None = None, website_url: str | None = None, icons: list[types.Icon] | None = None, @@ -149,6 +162,8 @@ def __init__( ): self.name = name self.version = version + self.title = title + self.description = description self.instructions = instructions self.website_url = website_url self.icons = icons @@ -159,6 +174,7 @@ def __init__( self.notification_handlers: dict[type, Callable[..., Awaitable[None]]] = {} self._tool_cache: dict[str, types.Tool] = {} self._experimental_handlers: ExperimentalHandlers | None = None + self._session_manager: StreamableHTTPSessionManager | None = None logger.debug("Initializing server %r", name) def create_initialization_options( @@ -170,9 +186,7 @@ def create_initialization_options( def pkg_version(package: str) -> str: try: - from importlib.metadata import version - - return version(package) + return importlib_version(package) except Exception: # pragma: no cover pass @@ -181,6 +195,8 @@ def pkg_version(package: str) -> str: return InitializationOptions( server_name=self.name, server_version=self.version if self.version else pkg_version("mcp"), + title=self.title, + description=self.description, capabilities=self.get_capabilities( notification_options or NotificationOptions(), experimental_capabilities or {}, @@ -204,17 +220,17 @@ def get_capabilities( # Set prompt capabilities if handler exists if types.ListPromptsRequest in self.request_handlers: - prompts_capability = types.PromptsCapability(listChanged=notification_options.prompts_changed) + prompts_capability = types.PromptsCapability(list_changed=notification_options.prompts_changed) # Set resource capabilities if handler exists if types.ListResourcesRequest in self.request_handlers: resources_capability = types.ResourcesCapability( - subscribe=False, listChanged=notification_options.resources_changed + subscribe=False, list_changed=notification_options.resources_changed ) # Set tool capabilities if handler exists if types.ListToolsRequest in self.request_handlers: - tools_capability = types.ToolsCapability(listChanged=notification_options.tools_changed) + tools_capability = types.ToolsCapability(list_changed=notification_options.tools_changed) # Set logging capabilities if handler exists if types.SetLevelRequest in self.request_handlers: # pragma: no cover @@ -255,6 +271,20 @@ def experimental(self) -> ExperimentalHandlers: self._experimental_handlers = ExperimentalHandlers(self, self.request_handlers, self.notification_handlers) return self._experimental_handlers + @property + def session_manager(self) -> StreamableHTTPSessionManager: + """Get the StreamableHTTP session manager. + + Raises: + RuntimeError: If called before streamable_http_app() has been called. + """ + if self._session_manager is None: # pragma: no cover + raise RuntimeError( + "Session manager can only be accessed after calling streamable_http_app(). " + "The session manager is created lazily to avoid unnecessary initialization." + ) + return self._session_manager # pragma: no cover + def list_prompts(self): def decorator( func: Callable[[], Awaitable[list[types.Prompt]]] @@ -268,10 +298,10 @@ async def handler(req: types.ListPromptsRequest): result = await wrapper(req) # Handle both old style (list[Prompt]) and new style (ListPromptsResult) if isinstance(result, types.ListPromptsResult): - return types.ServerResult(result) + return result else: # Old style returns list[Prompt] - return types.ServerResult(types.ListPromptsResult(prompts=result)) + return types.ListPromptsResult(prompts=result) self.request_handlers[types.ListPromptsRequest] = handler return func @@ -286,7 +316,7 @@ def decorator( async def handler(req: types.GetPromptRequest): prompt_get = await func(req.params.name, req.params.arguments) - return types.ServerResult(prompt_get) + return prompt_get self.request_handlers[types.GetPromptRequest] = handler return func @@ -306,10 +336,10 @@ async def handler(req: types.ListResourcesRequest): result = await wrapper(req) # Handle both old style (list[Resource]) and new style (ListResourcesResult) if isinstance(result, types.ListResourcesResult): - return types.ServerResult(result) + return result else: # Old style returns list[Resource] - return types.ServerResult(types.ListResourcesResult(resources=result)) + return types.ListResourcesResult(resources=result) self.request_handlers[types.ListResourcesRequest] = handler return func @@ -322,7 +352,7 @@ def decorator(func: Callable[[], Awaitable[list[types.ResourceTemplate]]]): async def handler(_: Any): templates = await func() - return types.ServerResult(types.ListResourceTemplatesResult(resourceTemplates=templates)) + return types.ListResourceTemplatesResult(resource_templates=templates) self.request_handlers[types.ListResourceTemplatesRequest] = handler return func @@ -331,26 +361,30 @@ async def handler(_: Any): def read_resource(self): def decorator( - func: Callable[[AnyUrl], Awaitable[str | bytes | Iterable[ReadResourceContents]]], + func: Callable[[str], Awaitable[str | bytes | Iterable[ReadResourceContents]]], ): logger.debug("Registering handler for ReadResourceRequest") async def handler(req: types.ReadResourceRequest): result = await func(req.params.uri) - def create_content(data: str | bytes, mime_type: str | None): + def create_content(data: str | bytes, mime_type: str | None, meta: dict[str, Any] | None = None): + # Note: ResourceContents uses Field(alias="_meta"), so we must use the alias key + meta_kwargs: dict[str, Any] = {"_meta": meta} if meta is not None else {} match data: case str() as data: return types.TextResourceContents( uri=req.params.uri, text=data, - mimeType=mime_type or "text/plain", + mime_type=mime_type or "text/plain", + **meta_kwargs, ) case bytes() as data: # pragma: no cover return types.BlobResourceContents( uri=req.params.uri, blob=base64.b64encode(data).decode(), - mimeType=mime_type or "application/octet-stream", + mime_type=mime_type or "application/octet-stream", + **meta_kwargs, ) match result: @@ -364,20 +398,17 @@ def create_content(data: str | bytes, mime_type: str | None): content = create_content(data, None) case Iterable() as contents: contents_list = [ - create_content(content_item.content, content_item.mime_type) for content_item in contents - ] - return types.ServerResult( - types.ReadResourceResult( - contents=contents_list, + create_content( + content_item.content, content_item.mime_type, getattr(content_item, "meta", None) ) - ) + for content_item in contents + ] + return types.ReadResourceResult(contents=contents_list) case _: # pragma: no cover raise ValueError(f"Unexpected return type from read_resource: {type(result)}") - return types.ServerResult( # pragma: no cover - types.ReadResourceResult( - contents=[content], - ) + return types.ReadResourceResult( # pragma: no cover + contents=[content], ) self.request_handlers[types.ReadResourceRequest] = handler @@ -391,7 +422,7 @@ def decorator(func: Callable[[types.LoggingLevel], Awaitable[None]]): async def handler(req: types.SetLevelRequest): await func(req.params.level) - return types.ServerResult(types.EmptyResult()) + return types.EmptyResult() self.request_handlers[types.SetLevelRequest] = handler return func @@ -399,12 +430,12 @@ async def handler(req: types.SetLevelRequest): return decorator def subscribe_resource(self): # pragma: no cover - def decorator(func: Callable[[AnyUrl], Awaitable[None]]): + def decorator(func: Callable[[str], Awaitable[None]]): logger.debug("Registering handler for SubscribeRequest") async def handler(req: types.SubscribeRequest): await func(req.params.uri) - return types.ServerResult(types.EmptyResult()) + return types.EmptyResult() self.request_handlers[types.SubscribeRequest] = handler return func @@ -412,12 +443,12 @@ async def handler(req: types.SubscribeRequest): return decorator def unsubscribe_resource(self): # pragma: no cover - def decorator(func: Callable[[AnyUrl], Awaitable[None]]): + def decorator(func: Callable[[str], Awaitable[None]]): logger.debug("Registering handler for UnsubscribeRequest") async def handler(req: types.UnsubscribeRequest): await func(req.params.uri) - return types.ServerResult(types.EmptyResult()) + return types.EmptyResult() self.request_handlers[types.UnsubscribeRequest] = handler return func @@ -442,7 +473,7 @@ async def handler(req: types.ListToolsRequest): for tool in result.tools: validate_and_warn_tool_name(tool.name) self._tool_cache[tool.name] = tool - return types.ServerResult(result) + return result else: # Old style returns list[Tool] # Clear and refresh the entire tool cache @@ -450,20 +481,18 @@ async def handler(req: types.ListToolsRequest): for tool in result: validate_and_warn_tool_name(tool.name) self._tool_cache[tool.name] = tool - return types.ServerResult(types.ListToolsResult(tools=result)) + return types.ListToolsResult(tools=result) self.request_handlers[types.ListToolsRequest] = handler return func return decorator - def _make_error_result(self, error_message: str) -> types.ServerResult: - """Create a ServerResult with an error CallToolResult.""" - return types.ServerResult( - types.CallToolResult( - content=[types.TextContent(type="text", text=error_message)], - isError=True, - ) + def _make_error_result(self, error_message: str) -> types.CallToolResult: + """Create a CallToolResult with an error.""" + return types.CallToolResult( + content=[types.TextContent(type="text", text=error_message)], + is_error=True, ) async def _get_cached_tool_definition(self, tool_name: str) -> types.Tool | None: @@ -499,7 +528,7 @@ def call_tool(self, *, validate_input: bool = True): def decorator( func: Callable[ - ..., + [str, dict[str, Any]], Awaitable[ UnstructuredContent | StructuredContent @@ -520,7 +549,7 @@ async def handler(req: types.CallToolRequest): # input validation if validate_input and tool: try: - jsonschema.validate(instance=arguments, schema=tool.inputSchema) + jsonschema.validate(instance=arguments, schema=tool.input_schema) except jsonschema.ValidationError as e: return self._make_error_result(f"Input validation error: {e.message}") @@ -531,10 +560,10 @@ async def handler(req: types.CallToolRequest): unstructured_content: UnstructuredContent maybe_structured_content: StructuredContent | None if isinstance(results, types.CallToolResult): - return types.ServerResult(results) + return results elif isinstance(results, types.CreateTaskResult): # Task-augmented execution returns task info instead of result - return types.ServerResult(results) + return results elif isinstance(results, tuple) and len(results) == 2: # tool returned both structured and unstructured content unstructured_content, maybe_structured_content = cast(CombinationContent, results) @@ -550,24 +579,22 @@ async def handler(req: types.CallToolRequest): return self._make_error_result(f"Unexpected return type from tool: {type(results).__name__}") # output validation - if tool and tool.outputSchema is not None: + if tool and tool.output_schema is not None: if maybe_structured_content is None: return self._make_error_result( "Output validation error: outputSchema defined but no structured output returned" ) else: try: - jsonschema.validate(instance=maybe_structured_content, schema=tool.outputSchema) + jsonschema.validate(instance=maybe_structured_content, schema=tool.output_schema) except jsonschema.ValidationError as e: return self._make_error_result(f"Output validation error: {e.message}") # result - return types.ServerResult( - types.CallToolResult( - content=list(unstructured_content), - structuredContent=maybe_structured_content, - isError=False, - ) + return types.CallToolResult( + content=list(unstructured_content), + structured_content=maybe_structured_content, + is_error=False, ) except UrlElicitationRequiredError: # Re-raise UrlElicitationRequiredError so it can be properly handled @@ -589,7 +616,7 @@ def decorator( async def handler(req: types.ProgressNotification): await func( - req.params.progressToken, + req.params.progress_token, req.params.progress, req.params.total, req.params.message, @@ -617,12 +644,10 @@ def decorator( async def handler(req: types.CompleteRequest): completion = await func(req.params.ref, req.params.argument, req.params.context) - return types.ServerResult( - types.CompleteResult( - completion=completion - if completion is not None - else types.Completion(values=[], total=None, hasMore=None), - ) + return types.CompleteResult( + completion=completion + if completion is not None + else types.Completion(values=[], total=None, has_more=None), ) self.request_handlers[types.CompleteRequest] = handler @@ -684,11 +709,11 @@ async def _handle_message( ): with warnings.catch_warnings(record=True) as w: match message: - case RequestResponder(request=types.ClientRequest(root=req)) as responder: + case RequestResponder() as responder: with responder: - await self._handle_request(message, req, session, lifespan_context, raise_exceptions) - case types.ClientNotification(root=notify): - await self._handle_notification(notify) + await self._handle_request( + message, responder.request, session, lifespan_context, raise_exceptions + ) case Exception(): # pragma: no cover logger.error(f"Received exception from stream: {message}") await session.send_log_message( @@ -698,6 +723,8 @@ async def _handle_message( ) if raise_exceptions: raise message + case _: + await self._handle_notification(message) for warning in w: # pragma: no cover logger.info("Warning: %s: %s", warning.category.__name__, warning.message) @@ -705,7 +732,7 @@ async def _handle_message( async def _handle_request( self, message: RequestResponder[types.ClientRequest, types.ServerResult], - req: types.ClientRequestType, + req: types.ClientRequest, session: ServerSession, lifespan_context: LifespanResultT, raise_exceptions: bool, @@ -791,6 +818,118 @@ async def _handle_notification(self, notify: Any): except Exception: # pragma: no cover logger.exception("Uncaught exception in notification handler") + def streamable_http_app( + self, + *, + streamable_http_path: str = "/mcp", + json_response: bool = False, + stateless_http: bool = False, + event_store: EventStore | None = None, + retry_interval: int | None = None, + transport_security: TransportSecuritySettings | None = None, + host: str = "127.0.0.1", + auth: AuthSettings | None = None, + token_verifier: TokenVerifier | None = None, + auth_server_provider: (OAuthAuthorizationServerProvider[Any, Any, Any] | None) = None, + custom_starlette_routes: list[Route] | None = None, + debug: bool = False, + ) -> Starlette: + """Return an instance of the StreamableHTTP server app.""" + # Auto-enable DNS rebinding protection for localhost (IPv4 and IPv6) + if transport_security is None and host in ("127.0.0.1", "localhost", "::1"): + transport_security = TransportSecuritySettings( + enable_dns_rebinding_protection=True, + allowed_hosts=["127.0.0.1:*", "localhost:*", "[::1]:*"], + allowed_origins=["http://127.0.0.1:*", "http://localhost:*", "http://[::1]:*"], + ) + + session_manager = StreamableHTTPSessionManager( + app=self, + event_store=event_store, + retry_interval=retry_interval, + json_response=json_response, + stateless=stateless_http, + security_settings=transport_security, + ) + self._session_manager = session_manager + + # Create the ASGI handler + streamable_http_app = StreamableHTTPASGIApp(session_manager) + + # Create routes + routes: list[Route | Mount] = [] + middleware: list[Middleware] = [] + required_scopes: list[str] = [] + + # Set up auth if configured + if auth: # pragma: no cover + required_scopes = auth.required_scopes or [] + + # Add auth middleware if token verifier is available + if token_verifier: + middleware = [ + Middleware( + AuthenticationMiddleware, + backend=BearerAuthBackend(token_verifier), + ), + Middleware(AuthContextMiddleware), + ] + + # Add auth endpoints if auth server provider is configured + if auth_server_provider: + routes.extend( + create_auth_routes( + provider=auth_server_provider, + issuer_url=auth.issuer_url, + service_documentation_url=auth.service_documentation_url, + client_registration_options=auth.client_registration_options, + revocation_options=auth.revocation_options, + ) + ) + + # Set up routes with or without auth + if token_verifier: # pragma: no cover + # Determine resource metadata URL + resource_metadata_url = None + if auth and auth.resource_server_url: + # Build compliant metadata URL for WWW-Authenticate header + resource_metadata_url = build_resource_metadata_url(auth.resource_server_url) + + routes.append( + Route( + streamable_http_path, + endpoint=RequireAuthMiddleware(streamable_http_app, required_scopes, resource_metadata_url), + ) + ) + else: + # Auth is disabled, no wrapper needed + routes.append( + Route( + streamable_http_path, + endpoint=streamable_http_app, + ) + ) + + # Add protected resource metadata endpoint if configured as RS + if auth and auth.resource_server_url: # pragma: no cover + routes.extend( + create_protected_resource_routes( + resource_url=auth.resource_server_url, + authorization_servers=[auth.issuer_url], + scopes_supported=auth.required_scopes, + ) + ) + + if custom_starlette_routes: # pragma: no cover + routes.extend(custom_starlette_routes) + + return Starlette( + debug=debug, + routes=routes, + middleware=middleware, + lifespan=lambda app: session_manager.run(), + ) + async def _ping_handler(request: types.PingRequest) -> types.ServerResult: - return types.ServerResult(types.EmptyResult()) + return types.EmptyResult() diff --git a/src/mcp/server/models.py b/src/mcp/server/models.py index ddf716cb95..a6cd093d97 100644 --- a/src/mcp/server/models.py +++ b/src/mcp/server/models.py @@ -1,5 +1,4 @@ -""" -This module provides simpler types to use with the server for managing prompts +"""This module provides simpler types to use with the server for managing prompts and tools. """ @@ -14,6 +13,8 @@ class InitializationOptions(BaseModel): server_name: str server_version: str + title: str | None = None + description: str | None = None capabilities: ServerCapabilities instructions: str | None = None website_url: str | None = None diff --git a/src/mcp/server/session.py b/src/mcp/server/session.py index 8f0baa3e9c..5a70ee02e0 100644 --- a/src/mcp/server/session.py +++ b/src/mcp/server/session.py @@ -1,5 +1,4 @@ -""" -ServerSession Module +"""ServerSession Module This module provides the ServerSession class, which manages communication between the server and client in the MCP (Model Context Protocol) framework. It is most commonly @@ -43,12 +42,13 @@ async def handle_list_prompts(ctx: RequestContext) -> list[types.Prompt]: import anyio import anyio.lowlevel from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from pydantic import AnyUrl +from pydantic import AnyUrl, TypeAdapter import mcp.types as types from mcp.server.experimental.session_features import ExperimentalServerSessionFeatures from mcp.server.models import InitializationOptions from mcp.server.validation import validate_sampling_tools, validate_tool_use_result_messages +from mcp.shared.exceptions import StatelessModeNotSupported from mcp.shared.experimental.tasks.capabilities import check_tasks_capability from mcp.shared.experimental.tasks.helpers import RELATED_TASK_METADATA_KEY from mcp.shared.message import ServerMessageMetadata, SessionMessage @@ -92,7 +92,8 @@ def __init__( init_options: InitializationOptions, stateless: bool = False, ) -> None: - super().__init__(read_stream, write_stream, types.ClientRequest, types.ClientNotification) + super().__init__(read_stream, write_stream) + self._stateless = stateless self._initialization_state = ( InitializationState.Initialized if stateless else InitializationState.NotInitialized ) @@ -103,6 +104,14 @@ def __init__( ](0) self._exit_stack.push_async_callback(lambda: self._incoming_message_stream_reader.aclose()) + @property + def _receive_request_adapter(self) -> TypeAdapter[types.ClientRequest]: + return types.client_request_adapter + + @property + def _receive_notification_adapter(self) -> TypeAdapter[types.ClientNotification]: + return types.client_notification_adapter + @property def client_params(self) -> types.InitializeRequestParams | None: return self._client_params # pragma: no cover @@ -127,7 +136,7 @@ def check_client_capability(self, capability: types.ClientCapabilities) -> bool: if capability.roots is not None: if client_caps.roots is None: return False - if capability.roots.listChanged and not client_caps.roots.listChanged: + if capability.roots.list_changed and not client_caps.roots.list_changed: return False if capability.sampling is not None: @@ -161,27 +170,27 @@ async def _receive_loop(self) -> None: await super()._receive_loop() async def _received_request(self, responder: RequestResponder[types.ClientRequest, types.ServerResult]): - match responder.request.root: + match responder.request: case types.InitializeRequest(params=params): - requested_version = params.protocolVersion + requested_version = params.protocol_version self._initialization_state = InitializationState.Initializing self._client_params = params with responder: await responder.respond( - types.ServerResult( - types.InitializeResult( - protocolVersion=requested_version - if requested_version in SUPPORTED_PROTOCOL_VERSIONS - else types.LATEST_PROTOCOL_VERSION, - capabilities=self._init_options.capabilities, - serverInfo=types.Implementation( - name=self._init_options.server_name, - version=self._init_options.server_version, - websiteUrl=self._init_options.website_url, - icons=self._init_options.icons, - ), - instructions=self._init_options.instructions, - ) + types.InitializeResult( + protocol_version=requested_version + if requested_version in SUPPORTED_PROTOCOL_VERSIONS + else types.LATEST_PROTOCOL_VERSION, + capabilities=self._init_options.capabilities, + server_info=types.Implementation( + name=self._init_options.server_name, + title=self._init_options.title, + description=self._init_options.description, + version=self._init_options.server_version, + website_url=self._init_options.website_url, + icons=self._init_options.icons, + ), + instructions=self._init_options.instructions, ) ) self._initialization_state = InitializationState.Initialized @@ -195,7 +204,7 @@ async def _received_request(self, responder: RequestResponder[types.ClientReques async def _received_notification(self, notification: types.ClientNotification) -> None: # Need this to avoid ASYNC910 await anyio.lowlevel.checkpoint() - match notification.root: + match notification: case types.InitializedNotification(): self._initialization_state = InitializationState.Initialized case _: @@ -211,25 +220,21 @@ async def send_log_message( ) -> None: """Send a log message notification.""" await self.send_notification( - types.ServerNotification( - types.LoggingMessageNotification( - params=types.LoggingMessageNotificationParams( - level=level, - data=data, - logger=logger, - ), - ) + types.LoggingMessageNotification( + params=types.LoggingMessageNotificationParams( + level=level, + data=data, + logger=logger, + ), ), related_request_id, ) - async def send_resource_updated(self, uri: AnyUrl) -> None: # pragma: no cover + async def send_resource_updated(self, uri: str | AnyUrl) -> None: # pragma: no cover """Send a resource updated notification.""" await self.send_notification( - types.ServerNotification( - types.ResourceUpdatedNotification( - params=types.ResourceUpdatedNotificationParams(uri=uri), - ) + types.ResourceUpdatedNotification( + params=types.ResourceUpdatedNotificationParams(uri=str(uri)), ) ) @@ -311,26 +316,27 @@ async def create_message( Raises: McpError: If tools are provided but client doesn't support them. ValueError: If tool_use or tool_result message structure is invalid. + StatelessModeNotSupported: If called in stateless HTTP mode. """ + if self._stateless: + raise StatelessModeNotSupported(method="sampling") client_caps = self._client_params.capabilities if self._client_params else None validate_sampling_tools(client_caps, tools, tool_choice) validate_tool_use_result_messages(messages) - request = types.ServerRequest( - types.CreateMessageRequest( - params=types.CreateMessageRequestParams( - messages=messages, - systemPrompt=system_prompt, - includeContext=include_context, - temperature=temperature, - maxTokens=max_tokens, - stopSequences=stop_sequences, - metadata=metadata, - modelPreferences=model_preferences, - tools=tools, - toolChoice=tool_choice, - ), - ) + request = types.CreateMessageRequest( + params=types.CreateMessageRequestParams( + messages=messages, + system_prompt=system_prompt, + include_context=include_context, + temperature=temperature, + max_tokens=max_tokens, + stop_sequences=stop_sequences, + metadata=metadata, + model_preferences=model_preferences, + tools=tools, + tool_choice=tool_choice, + ), ) metadata_obj = ServerMessageMetadata(related_request_id=related_request_id) @@ -349,22 +355,24 @@ async def create_message( async def list_roots(self) -> types.ListRootsResult: """Send a roots/list request.""" + if self._stateless: + raise StatelessModeNotSupported(method="list_roots") return await self.send_request( - types.ServerRequest(types.ListRootsRequest()), + types.ListRootsRequest(), types.ListRootsResult, ) async def elicit( self, message: str, - requestedSchema: types.ElicitRequestedSchema, + requested_schema: types.ElicitRequestedSchema, related_request_id: types.RequestId | None = None, ) -> types.ElicitResult: """Send a form mode elicitation/create request. Args: message: The message to present to the user - requestedSchema: Schema defining the expected response structure + requested_schema: Schema defining the expected response structure related_request_id: Optional ID of the request that triggered this elicitation Returns: @@ -374,32 +382,35 @@ async def elicit( This method is deprecated in favor of elicit_form(). It remains for backward compatibility but new code should use elicit_form(). """ - return await self.elicit_form(message, requestedSchema, related_request_id) + return await self.elicit_form(message, requested_schema, related_request_id) async def elicit_form( self, message: str, - requestedSchema: types.ElicitRequestedSchema, + requested_schema: types.ElicitRequestedSchema, related_request_id: types.RequestId | None = None, ) -> types.ElicitResult: """Send a form mode elicitation/create request. Args: message: The message to present to the user - requestedSchema: Schema defining the expected response structure + requested_schema: Schema defining the expected response structure related_request_id: Optional ID of the request that triggered this elicitation Returns: The client's response with form data + + Raises: + StatelessModeNotSupported: If called in stateless HTTP mode. """ + if self._stateless: + raise StatelessModeNotSupported(method="elicitation") return await self.send_request( - types.ServerRequest( - types.ElicitRequest( - params=types.ElicitRequestFormParams( - message=message, - requestedSchema=requestedSchema, - ), - ) + types.ElicitRequest( + params=types.ElicitRequestFormParams( + message=message, + requested_schema=requested_schema, + ), ), types.ElicitResult, metadata=ServerMessageMetadata(related_request_id=related_request_id), @@ -425,16 +436,19 @@ async def elicit_url( Returns: The client's response indicating acceptance, decline, or cancellation + + Raises: + StatelessModeNotSupported: If called in stateless HTTP mode. """ + if self._stateless: + raise StatelessModeNotSupported(method="elicitation") return await self.send_request( - types.ServerRequest( - types.ElicitRequest( - params=types.ElicitRequestURLParams( - message=message, - url=url, - elicitationId=elicitation_id, - ), - ) + types.ElicitRequest( + params=types.ElicitRequestURLParams( + message=message, + url=url, + elicitation_id=elicitation_id, + ), ), types.ElicitResult, metadata=ServerMessageMetadata(related_request_id=related_request_id), @@ -443,7 +457,7 @@ async def elicit_url( async def send_ping(self) -> types.EmptyResult: # pragma: no cover """Send a ping request.""" return await self.send_request( - types.ServerRequest(types.PingRequest()), + types.PingRequest(), types.EmptyResult, ) @@ -457,30 +471,28 @@ async def send_progress_notification( ) -> None: """Send a progress notification.""" await self.send_notification( - types.ServerNotification( - types.ProgressNotification( - params=types.ProgressNotificationParams( - progressToken=progress_token, - progress=progress, - total=total, - message=message, - ), - ) + types.ProgressNotification( + params=types.ProgressNotificationParams( + progress_token=progress_token, + progress=progress, + total=total, + message=message, + ), ), related_request_id, ) async def send_resource_list_changed(self) -> None: # pragma: no cover """Send a resource list changed notification.""" - await self.send_notification(types.ServerNotification(types.ResourceListChangedNotification())) + await self.send_notification(types.ResourceListChangedNotification()) async def send_tool_list_changed(self) -> None: # pragma: no cover """Send a tool list changed notification.""" - await self.send_notification(types.ServerNotification(types.ToolListChangedNotification())) + await self.send_notification(types.ToolListChangedNotification()) async def send_prompt_list_changed(self) -> None: # pragma: no cover """Send a prompt list changed notification.""" - await self.send_notification(types.ServerNotification(types.PromptListChangedNotification())) + await self.send_notification(types.PromptListChangedNotification()) async def send_elicit_complete( self, @@ -498,10 +510,8 @@ async def send_elicit_complete( related_request_id: Optional ID of the request that triggered this """ await self.send_notification( - types.ServerNotification( - types.ElicitCompleteNotification( - params=types.ElicitCompleteNotificationParams(elicitationId=elicitation_id) - ) + types.ElicitCompleteNotification( + params=types.ElicitCompleteNotificationParams(elicitation_id=elicitation_id) ), related_request_id, ) @@ -509,7 +519,7 @@ async def send_elicit_complete( def _build_elicit_form_request( self, message: str, - requestedSchema: types.ElicitRequestedSchema, + requested_schema: types.ElicitRequestedSchema, related_task_id: str | None = None, task: types.TaskMetadata | None = None, ) -> types.JSONRPCRequest: @@ -517,7 +527,7 @@ def _build_elicit_form_request( Args: message: The message to present to the user - requestedSchema: Schema defining the expected response structure + requested_schema: Schema defining the expected response structure related_task_id: If provided, adds io.modelcontextprotocol/related-task metadata task: If provided, makes this a task-augmented request @@ -526,7 +536,7 @@ def _build_elicit_form_request( """ params = types.ElicitRequestFormParams( message=message, - requestedSchema=requestedSchema, + requested_schema=requested_schema, task=task, ) params_data = params.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -537,7 +547,7 @@ def _build_elicit_form_request( if "_meta" not in params_data: # pragma: no cover params_data["_meta"] = {} params_data["_meta"][RELATED_TASK_METADATA_KEY] = types.RelatedTaskMetadata( - taskId=related_task_id + task_id=related_task_id ).model_dump(by_alias=True) request_id = f"task-{related_task_id}-{id(params)}" if related_task_id else self._request_id @@ -572,7 +582,7 @@ def _build_elicit_url_request( params = types.ElicitRequestURLParams( message=message, url=url, - elicitationId=elicitation_id, + elicitation_id=elicitation_id, ) params_data = params.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -582,7 +592,7 @@ def _build_elicit_url_request( if "_meta" not in params_data: # pragma: no cover params_data["_meta"] = {} params_data["_meta"][RELATED_TASK_METADATA_KEY] = types.RelatedTaskMetadata( - taskId=related_task_id + task_id=related_task_id ).model_dump(by_alias=True) request_id = f"task-{related_task_id}-{id(params)}" if related_task_id else self._request_id @@ -633,15 +643,15 @@ def _build_create_message_request( """ params = types.CreateMessageRequestParams( messages=messages, - systemPrompt=system_prompt, - includeContext=include_context, + system_prompt=system_prompt, + include_context=include_context, temperature=temperature, - maxTokens=max_tokens, - stopSequences=stop_sequences, + max_tokens=max_tokens, + stop_sequences=stop_sequences, metadata=metadata, - modelPreferences=model_preferences, + model_preferences=model_preferences, tools=tools, - toolChoice=tool_choice, + tool_choice=tool_choice, task=task, ) params_data = params.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -652,7 +662,7 @@ def _build_create_message_request( if "_meta" not in params_data: # pragma: no cover params_data["_meta"] = {} params_data["_meta"][RELATED_TASK_METADATA_KEY] = types.RelatedTaskMetadata( - taskId=related_task_id + task_id=related_task_id ).model_dump(by_alias=True) request_id = f"task-{related_task_id}-{id(params)}" if related_task_id else self._request_id @@ -685,7 +695,5 @@ async def _handle_incoming(self, req: ServerRequestResponder) -> None: await self._incoming_message_stream_writer.send(req) @property - def incoming_messages( - self, - ) -> MemoryObjectReceiveStream[ServerRequestResponder]: + def incoming_messages(self) -> MemoryObjectReceiveStream[ServerRequestResponder]: return self._incoming_message_stream_reader diff --git a/src/mcp/server/sse.py b/src/mcp/server/sse.py index 19af93fd16..ea0c8db4a5 100644 --- a/src/mcp/server/sse.py +++ b/src/mcp/server/sse.py @@ -1,5 +1,4 @@ -""" -SSE Server Transport Module +"""SSE Server Transport Module This module implements a Server-Sent Events (SSE) transport layer for MCP servers. @@ -62,8 +61,7 @@ async def handle_sse(request): class SseServerTransport: - """ - SSE server transport for MCP. This class provides _two_ ASGI applications, + """SSE server transport for MCP. This class provides _two_ ASGI applications, suitable to be used with a framework like Starlette and a server like Hypercorn: 1. connect_sse() is an ASGI application which receives incoming GET requests, @@ -78,8 +76,7 @@ class SseServerTransport: _security: TransportSecurityMiddleware def __init__(self, endpoint: str, security_settings: TransportSecuritySettings | None = None) -> None: - """ - Creates a new SSE server transport, which will direct the client to POST + """Creates a new SSE server transport, which will direct the client to POST messages to the relative path given. Args: @@ -180,8 +177,7 @@ async def sse_writer(): async with anyio.create_task_group() as tg: async def response_wrapper(scope: Scope, receive: Receive, send: Send): - """ - The EventSourceResponse returning signals a client close / disconnect. + """The EventSourceResponse returning signals a client close / disconnect. In this case we close our side of the streams to signal the client that the connection has been closed. """ @@ -231,7 +227,7 @@ async def handle_post_message(self, scope: Scope, receive: Receive, send: Send) logger.debug(f"Received JSON: {body}") try: - message = types.JSONRPCMessage.model_validate_json(body) + message = types.jsonrpc_message_adapter.validate_json(body, by_name=False) logger.debug(f"Validated client message: {message}") except ValidationError as err: logger.exception("Failed to parse message") diff --git a/src/mcp/server/stdio.py b/src/mcp/server/stdio.py index bcb9247abb..531404f21b 100644 --- a/src/mcp/server/stdio.py +++ b/src/mcp/server/stdio.py @@ -1,5 +1,4 @@ -""" -Stdio Server Transport Module +"""Stdio Server Transport Module This module provides functionality for creating an stdio-based transport layer that can be used to communicate with an MCP client through standard input/output @@ -35,8 +34,7 @@ async def stdio_server( stdin: anyio.AsyncFile[str] | None = None, stdout: anyio.AsyncFile[str] | None = None, ): - """ - Server transport for stdio: this communicates with an MCP client by reading + """Server transport for stdio: this communicates with an MCP client by reading from the current process' stdin and writing to stdout. """ # Purposely not using context managers for these, as we don't want to close @@ -62,7 +60,7 @@ async def stdin_reader(): async with read_stream_writer: async for line in stdin: try: - message = types.JSONRPCMessage.model_validate_json(line) + message = types.jsonrpc_message_adapter.validate_json(line, by_name=False) except Exception as exc: # pragma: no cover await read_stream_writer.send(exc) continue diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index 2613b530c4..b37a85746d 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -1,5 +1,4 @@ -""" -StreamableHTTP Server Transport Module +"""StreamableHTTP Server Transport Module This module implements an HTTP transport layer with Streamable HTTP. @@ -7,7 +6,6 @@ responses, with streaming support for long-running operations. """ -import json import logging import re from abc import ABC, abstractmethod @@ -18,6 +16,7 @@ from typing import Any import anyio +import pydantic_core from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import ValidationError from sse_starlette import EventSourceResponse @@ -25,10 +24,7 @@ from starlette.responses import Response from starlette.types import Receive, Scope, Send -from mcp.server.transport_security import ( - TransportSecurityMiddleware, - TransportSecuritySettings, -) +from mcp.server.transport_security import TransportSecurityMiddleware, TransportSecuritySettings from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.version import SUPPORTED_PROTOCOL_VERSIONS from mcp.types import ( @@ -43,6 +39,7 @@ JSONRPCRequest, JSONRPCResponse, RequestId, + jsonrpc_message_adapter, ) logger = logging.getLogger(__name__) @@ -71,9 +68,7 @@ @dataclass class EventMessage: - """ - A JSONRPCMessage with an optional event ID for stream resumability. - """ + """A JSONRPCMessage with an optional event ID for stream resumability.""" message: JSONRPCMessage event_id: str | None = None @@ -83,14 +78,11 @@ class EventMessage: class EventStore(ABC): - """ - Interface for resumability support via event storage. - """ + """Interface for resumability support via event storage.""" @abstractmethod async def store_event(self, stream_id: StreamId, message: JSONRPCMessage | None) -> EventId: - """ - Stores an event for later retrieval. + """Stores an event for later retrieval. Args: stream_id: ID of the stream the event belongs to @@ -107,8 +99,7 @@ async def replay_events_after( last_event_id: EventId, send_callback: EventCallback, ) -> StreamId | None: - """ - Replays events that occurred after the specified event ID. + """Replays events that occurred after the specified event ID. Args: last_event_id: The ID of the last event the client received @@ -121,8 +112,7 @@ async def replay_events_after( class StreamableHTTPServerTransport: - """ - HTTP server transport with event streaming support for MCP. + """HTTP server transport with event streaming support for MCP. Handles JSON-RPC messages in HTTP POST requests with SSE streaming. Supports optional JSON responses and session management. @@ -143,8 +133,7 @@ def __init__( security_settings: TransportSecuritySettings | None = None, retry_interval: int | None = None, ) -> None: - """ - Initialize a new StreamableHTTP server transport. + """Initialize a new StreamableHTTP server transport. Args: mcp_session_id: Optional session identifier for this connection. @@ -310,10 +299,7 @@ def _create_error_response( error_response = JSONRPCError( jsonrpc="2.0", id="server-error", # We don't have a request ID for general errors - error=ErrorData( - code=error_code, - message=error_message, - ), + error=ErrorData(code=error_code, message=error_message), ) return Response( @@ -464,14 +450,14 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re body = await request.body() try: - raw_message = json.loads(body) - except json.JSONDecodeError as e: + raw_message = pydantic_core.from_json(body) + except ValueError as e: response = self._create_error_response(f"Parse error: {str(e)}", HTTPStatus.BAD_REQUEST, PARSE_ERROR) await response(scope, receive, send) return try: # pragma: no cover - message = JSONRPCMessage.model_validate(raw_message) + message = jsonrpc_message_adapter.validate_python(raw_message, by_name=False) except ValidationError as e: # pragma: no cover response = self._create_error_response( f"Validation error: {str(e)}", @@ -482,9 +468,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re return # Check if this is an initialization request - is_initialization_request = ( - isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize" - ) # pragma: no cover + is_initialization_request = isinstance(message, JSONRPCRequest) and message.method == "initialize" if is_initialization_request: # pragma: no cover # Check if the server already has an established session @@ -504,7 +488,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re return # For notifications and responses only, return 202 Accepted - if not isinstance(message.root, JSONRPCRequest): # pragma: no cover + if not isinstance(message, JSONRPCRequest): # pragma: no cover # Create response object and send it response = self._create_json_response( None, @@ -523,13 +507,13 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # For initialize requests, get from request params. # For other requests, get from header (already validated). protocol_version = ( - str(message.root.params.get("protocolVersion", DEFAULT_NEGOTIATED_VERSION)) - if is_initialization_request and message.root.params + str(message.params.get("protocolVersion", DEFAULT_NEGOTIATED_VERSION)) + if is_initialization_request and message.params else request.headers.get(MCP_PROTOCOL_VERSION_HEADER, DEFAULT_NEGOTIATED_VERSION) ) # Extract the request ID outside the try block for proper scope - request_id = str(message.root.id) # pragma: no cover + request_id = str(message.id) # pragma: no cover # Register this stream for the request ID self._request_streams[request_id] = anyio.create_memory_object_stream[EventMessage](0) # pragma: no cover request_stream_reader = self._request_streams[request_id][1] # pragma: no cover @@ -547,12 +531,12 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # Use similar approach to SSE writer for consistency async for event_message in request_stream_reader: # If it's a response, this is what we're waiting for - if isinstance(event_message.message.root, JSONRPCResponse | JSONRPCError): + if isinstance(event_message.message, JSONRPCResponse | JSONRPCError): response_message = event_message.message break # For notifications and request, keep waiting else: - logger.debug(f"received: {event_message.message.root.method}") + logger.debug(f"received: {event_message.message.method}") # At this point we should have a response if response_message: @@ -598,10 +582,7 @@ async def sse_writer(): await sse_stream_writer.send(event_data) # If response, remove from pending streams and close - if isinstance( - event_message.message.root, - JSONRPCResponse | JSONRPCError, - ): + if isinstance(event_message.message, JSONRPCResponse | JSONRPCError): break except anyio.ClosedResourceError: # Expected when close_sse_stream() is called @@ -655,8 +636,7 @@ async def sse_writer(): return async def _handle_get_request(self, request: Request, send: Send) -> None: # pragma: no cover - """ - Handle GET request to establish SSE. + """Handle GET request to establish SSE. This allows the server to communicate to the client without the client first sending data via HTTP POST. The server can send JSON-RPC requests @@ -875,8 +855,7 @@ async def _validate_protocol_version(self, request: Request, send: Send) -> bool return True async def _replay_events(self, last_event_id: str, request: Request, send: Send) -> None: # pragma: no cover - """ - Replays events that would have been sent after the specified event ID. + """Replays events that would have been sent after the specified event ID. Only used when resumability is enabled. """ event_store = self._event_store @@ -995,8 +974,8 @@ async def message_router(): # pragma: no cover message = session_message.message target_request_id = None # Check if this is a response - if isinstance(message.root, JSONRPCResponse | JSONRPCError): - response_id = str(message.root.id) + if isinstance(message, JSONRPCResponse | JSONRPCError): + response_id = str(message.id) # If this response is for an existing request stream, # send it there target_request_id = response_id @@ -1033,7 +1012,7 @@ async def message_router(): # pragma: no cover self._request_streams.pop(request_stream_id, None) else: logger.debug( - f"""Request stream {request_stream_id} not found + f"""Request stream {request_stream_id} not found for message. Still processing message as the client might reconnect and replay.""" ) diff --git a/src/mcp/server/streamable_http_manager.py b/src/mcp/server/streamable_http_manager.py index 50d2aefa29..964c52b6f5 100644 --- a/src/mcp/server/streamable_http_manager.py +++ b/src/mcp/server/streamable_http_manager.py @@ -6,7 +6,7 @@ import logging from collections.abc import AsyncIterator from http import HTTPStatus -from typing import Any +from typing import TYPE_CHECKING, Any from uuid import uuid4 import anyio @@ -15,20 +15,22 @@ from starlette.responses import Response from starlette.types import Receive, Scope, Send -from mcp.server.lowlevel.server import Server as MCPServer from mcp.server.streamable_http import ( MCP_SESSION_ID_HEADER, EventStore, StreamableHTTPServerTransport, ) from mcp.server.transport_security import TransportSecuritySettings +from mcp.types import INVALID_REQUEST, ErrorData, JSONRPCError + +if TYPE_CHECKING: + from mcp.server.lowlevel.server import Server as MCPServer logger = logging.getLogger(__name__) class StreamableHTTPSessionManager: - """ - Manages StreamableHTTP sessions with optional resumability via event store. + """Manages StreamableHTTP sessions with optional resumability via event store. This class abstracts away the complexity of session management, event storage, and request handling for StreamableHTTP transports. It handles: @@ -84,8 +86,7 @@ def __init__( @contextlib.asynccontextmanager async def run(self) -> AsyncIterator[None]: - """ - Run the session manager with proper lifecycle management. + """Run the session manager with proper lifecycle management. This creates and manages the task group for all session operations. @@ -129,8 +130,7 @@ async def handle_request( receive: Receive, send: Send, ) -> None: - """ - Process ASGI request with proper session handling and transport setup. + """Process ASGI request with proper session handling and transport setup. Dispatches to the appropriate handler based on stateless mode. @@ -154,8 +154,7 @@ async def _handle_stateless_request( receive: Receive, send: Send, ) -> None: - """ - Process request in stateless mode - creating a new transport for each request. + """Process request in stateless mode - creating a new transport for each request. Args: scope: ASGI scope @@ -203,8 +202,7 @@ async def _handle_stateful_request( receive: Receive, send: Send, ) -> None: - """ - Process request in stateful mode - maintaining session state between requests. + """Process request in stateful mode - maintaining session state between requests. Args: scope: ASGI scope @@ -276,10 +274,28 @@ async def run_server(*, task_status: TaskStatus[None] = anyio.TASK_STATUS_IGNORE # Handle the HTTP request and return the response await http_transport.handle_request(scope, receive, send) - else: # pragma: no cover - # Invalid session ID + else: + # Unknown or expired session ID - return 404 per MCP spec + # TODO: Align error code once spec clarifies + # See: https://github.com/modelcontextprotocol/python-sdk/issues/1821 + error_response = JSONRPCError( + jsonrpc="2.0", + id="server-error", + error=ErrorData(code=INVALID_REQUEST, message="Session not found"), + ) response = Response( - "Bad Request: No valid session ID provided", - status_code=HTTPStatus.BAD_REQUEST, + content=error_response.model_dump_json(by_alias=True, exclude_none=True), + status_code=HTTPStatus.NOT_FOUND, + media_type="application/json", ) await response(scope, receive, send) + + +class StreamableHTTPASGIApp: + """ASGI application for Streamable HTTP server transport.""" + + def __init__(self, session_manager: StreamableHTTPSessionManager): + self.session_manager = session_manager + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: # pragma: no cover + await self.session_manager.handle_request(scope, receive, send) diff --git a/src/mcp/server/validation.py b/src/mcp/server/validation.py index 2ccd7056bd..cfd663d43b 100644 --- a/src/mcp/server/validation.py +++ b/src/mcp/server/validation.py @@ -1,5 +1,4 @@ -""" -Shared validation functions for server requests. +"""Shared validation functions for server requests. This module provides validation logic for sampling and elicitation requests that is shared across normal and task-augmented code paths. @@ -17,8 +16,7 @@ def check_sampling_tools_capability(client_caps: ClientCapabilities | None) -> bool: - """ - Check if the client supports sampling tools capability. + """Check if the client supports sampling tools capability. Args: client_caps: The client's declared capabilities @@ -40,8 +38,7 @@ def validate_sampling_tools( tools: list[Tool] | None, tool_choice: ToolChoice | None, ) -> None: - """ - Validate that the client supports sampling tools if tools are being used. + """Validate that the client supports sampling tools if tools are being used. Args: client_caps: The client's declared capabilities @@ -53,17 +50,11 @@ def validate_sampling_tools( """ if tools is not None or tool_choice is not None: if not check_sampling_tools_capability(client_caps): - raise McpError( - ErrorData( - code=INVALID_PARAMS, - message="Client does not support sampling tools capability", - ) - ) + raise McpError(ErrorData(code=INVALID_PARAMS, message="Client does not support sampling tools capability")) def validate_tool_use_result_messages(messages: list[SamplingMessage]) -> None: - """ - Validate tool_use/tool_result message structure per SEP-1577. + """Validate tool_use/tool_result message structure per SEP-1577. This validation ensures: 1. Messages with tool_result content contain ONLY tool_result content @@ -99,6 +90,6 @@ def validate_tool_use_result_messages(messages: list[SamplingMessage]) -> None: if has_previous_tool_use and previous_content: tool_use_ids = {c.id for c in previous_content if c.type == "tool_use"} - tool_result_ids = {c.toolUseId for c in last_content if c.type == "tool_result"} + tool_result_ids = {c.tool_use_id for c in last_content if c.type == "tool_result"} if tool_use_ids != tool_result_ids: raise ValueError("ids of tool_result blocks and tool_use blocks from previous message do not match") diff --git a/src/mcp/server/websocket.py b/src/mcp/server/websocket.py index 5d5efd16e9..9df3e25c87 100644 --- a/src/mcp/server/websocket.py +++ b/src/mcp/server/websocket.py @@ -15,8 +15,7 @@ @asynccontextmanager # pragma: no cover async def websocket_server(scope: Scope, receive: Receive, send: Send): - """ - WebSocket server transport for MCP. This is an ASGI application, suitable to be + """WebSocket server transport for MCP. This is an ASGI application, suitable to be used with a framework like Starlette and a server like Hypercorn. """ @@ -37,7 +36,7 @@ async def ws_reader(): async with read_stream_writer: async for msg in websocket.iter_text(): try: - client_message = types.JSONRPCMessage.model_validate_json(msg) + client_message = types.jsonrpc_message_adapter.validate_json(msg, by_name=False) except ValidationError as exc: await read_stream_writer.send(exc) continue diff --git a/src/mcp/shared/auth.py b/src/mcp/shared/auth.py index d3290997e5..bf03a8b8dd 100644 --- a/src/mcp/shared/auth.py +++ b/src/mcp/shared/auth.py @@ -4,9 +4,7 @@ class OAuthToken(BaseModel): - """ - See https://datatracker.ietf.org/doc/html/rfc6749#section-5.1 - """ + """See https://datatracker.ietf.org/doc/html/rfc6749#section-5.1""" access_token: str token_type: Literal["Bearer"] = "Bearer" @@ -35,8 +33,7 @@ def __init__(self, message: str): class OAuthClientMetadata(BaseModel): - """ - RFC 7591 OAuth 2.0 Dynamic Client Registration metadata. + """RFC 7591 OAuth 2.0 Dynamic Client Registration metadata. See https://datatracker.ietf.org/doc/html/rfc7591#section-2 for the full specification. """ @@ -94,8 +91,7 @@ def validate_redirect_uri(self, redirect_uri: AnyUrl | None) -> AnyUrl: class OAuthClientInformationFull(OAuthClientMetadata): - """ - RFC 7591 OAuth 2.0 Dynamic Client Registration full response + """RFC 7591 OAuth 2.0 Dynamic Client Registration full response (client information plus metadata). """ @@ -106,8 +102,7 @@ class OAuthClientInformationFull(OAuthClientMetadata): class OAuthMetadata(BaseModel): - """ - RFC 8414 OAuth 2.0 Authorization Server Metadata. + """RFC 8414 OAuth 2.0 Authorization Server Metadata. See https://datatracker.ietf.org/doc/html/rfc8414#section-2 """ @@ -136,8 +131,7 @@ class OAuthMetadata(BaseModel): class ProtectedResourceMetadata(BaseModel): - """ - RFC 9728 OAuth 2.0 Protected Resource Metadata. + """RFC 9728 OAuth 2.0 Protected Resource Metadata. See https://datatracker.ietf.org/doc/html/rfc9728#section-2 """ diff --git a/src/mcp/shared/context.py b/src/mcp/shared/context.py index 5cf6588c9e..f54a2efabc 100644 --- a/src/mcp/shared/context.py +++ b/src/mcp/shared/context.py @@ -1,6 +1,4 @@ -""" -Request context for MCP handlers. -""" +"""Request context for MCP handlers.""" from dataclasses import dataclass, field from typing import Any, Generic diff --git a/src/mcp/shared/exceptions.py b/src/mcp/shared/exceptions.py index 4943114912..d8bc17b7ab 100644 --- a/src/mcp/shared/exceptions.py +++ b/src/mcp/shared/exceptions.py @@ -6,9 +6,7 @@ class McpError(Exception): - """ - Exception type raised when an error arrives over an MCP connection. - """ + """Exception type raised when an error arrives over an MCP connection.""" error: ErrorData @@ -18,9 +16,25 @@ def __init__(self, error: ErrorData): self.error = error -class UrlElicitationRequiredError(McpError): +class StatelessModeNotSupported(RuntimeError): + """Raised when attempting to use a method that is not supported in stateless mode. + + Server-to-client requests (sampling, elicitation, list_roots) are not + supported in stateless HTTP mode because there is no persistent connection + for bidirectional communication. """ - Specialized error for when a tool requires URL mode elicitation(s) before proceeding. + + def __init__(self, method: str): + super().__init__( + f"Cannot use {method} in stateless HTTP mode. " + "Stateless mode does not support server-to-client requests. " + "Use stateful mode (stateless_http=False) to enable this feature." + ) + self.method = method + + +class UrlElicitationRequiredError(McpError): + """Specialized error for when a tool requires URL mode elicitation(s) before proceeding. Servers can raise this error from tool handlers to indicate that the client must complete one or more URL elicitations before the request can be processed. @@ -31,7 +45,7 @@ class UrlElicitationRequiredError(McpError): mode="url", message="Authorization required for your files", url="https://example.com/oauth/authorize", - elicitationId="auth-001" + elicitation_id="auth-001" ) ]) """ diff --git a/src/mcp/shared/experimental/__init__.py b/src/mcp/shared/experimental/__init__.py index 9b1b1479cb..fa6940acc6 100644 --- a/src/mcp/shared/experimental/__init__.py +++ b/src/mcp/shared/experimental/__init__.py @@ -1,5 +1,4 @@ -""" -Pure experimental MCP features (no server dependencies). +"""Pure experimental MCP features (no server dependencies). WARNING: These APIs are experimental and may change without notice. diff --git a/src/mcp/shared/experimental/tasks/__init__.py b/src/mcp/shared/experimental/tasks/__init__.py index 37d81af50b..52793e408b 100644 --- a/src/mcp/shared/experimental/tasks/__init__.py +++ b/src/mcp/shared/experimental/tasks/__init__.py @@ -1,5 +1,4 @@ -""" -Pure task state management for MCP. +"""Pure task state management for MCP. WARNING: These APIs are experimental and may change without notice. diff --git a/src/mcp/shared/experimental/tasks/capabilities.py b/src/mcp/shared/experimental/tasks/capabilities.py index 307fcdd6e5..ec9e53e854 100644 --- a/src/mcp/shared/experimental/tasks/capabilities.py +++ b/src/mcp/shared/experimental/tasks/capabilities.py @@ -1,5 +1,4 @@ -""" -Tasks capability checking utilities. +"""Tasks capability checking utilities. This module provides functions for checking and requiring task-related capabilities. All tasks capability logic is centralized here to keep @@ -21,8 +20,7 @@ def check_tasks_capability( required: ClientTasksCapability, client: ClientTasksCapability, ) -> bool: - """ - Check if client's tasks capability matches the required capability. + """Check if client's tasks capability matches the required capability. Args: required: The capability being checked for @@ -48,8 +46,8 @@ def check_tasks_capability( if required.requests.sampling is not None: if client.requests.sampling is None: return False - if required.requests.sampling.createMessage is not None: - if client.requests.sampling.createMessage is None: + if required.requests.sampling.create_message is not None: + if client.requests.sampling.create_message is None: return False return True @@ -74,12 +72,11 @@ def has_task_augmented_sampling(caps: ClientCapabilities) -> bool: return False if caps.tasks.requests.sampling is None: return False - return caps.tasks.requests.sampling.createMessage is not None + return caps.tasks.requests.sampling.create_message is not None def require_task_augmented_elicitation(client_caps: ClientCapabilities | None) -> None: - """ - Raise McpError if client doesn't support task-augmented elicitation. + """Raise McpError if client doesn't support task-augmented elicitation. Args: client_caps: The client's declared capabilities, or None if not initialized @@ -97,8 +94,7 @@ def require_task_augmented_elicitation(client_caps: ClientCapabilities | None) - def require_task_augmented_sampling(client_caps: ClientCapabilities | None) -> None: - """ - Raise McpError if client doesn't support task-augmented sampling. + """Raise McpError if client doesn't support task-augmented sampling. Args: client_caps: The client's declared capabilities, or None if not initialized diff --git a/src/mcp/shared/experimental/tasks/context.py b/src/mcp/shared/experimental/tasks/context.py index 12d159515c..ed0d2b91b6 100644 --- a/src/mcp/shared/experimental/tasks/context.py +++ b/src/mcp/shared/experimental/tasks/context.py @@ -1,5 +1,4 @@ -""" -TaskContext - Pure task state management. +"""TaskContext - Pure task state management. This module provides TaskContext, which manages task state without any server/session dependencies. It can be used standalone for distributed @@ -11,8 +10,7 @@ class TaskContext: - """ - Pure task state management - no session dependencies. + """Pure task state management - no session dependencies. This class handles: - Task state (status, result) @@ -41,7 +39,7 @@ def __init__(self, task: Task, store: TaskStore): @property def task_id(self) -> str: """The task identifier.""" - return self._task.taskId + return self._task.task_id @property def task(self) -> Task: @@ -54,8 +52,7 @@ def is_cancelled(self) -> bool: return self._cancelled def request_cancellation(self) -> None: - """ - Request cancellation of this task. + """Request cancellation of this task. This sets is_cancelled=True. Task work should check this periodically and exit gracefully if set. @@ -63,8 +60,7 @@ def request_cancellation(self) -> None: self._cancelled = True async def update_status(self, message: str) -> None: - """ - Update the task's status message. + """Update the task's status message. Args: message: The new status message @@ -75,8 +71,7 @@ async def update_status(self, message: str) -> None: ) async def complete(self, result: Result) -> None: - """ - Mark the task as completed with the given result. + """Mark the task as completed with the given result. Args: result: The task result @@ -88,8 +83,7 @@ async def complete(self, result: Result) -> None: ) async def fail(self, error: str) -> None: - """ - Mark the task as failed with an error message. + """Mark the task as failed with an error message. Args: error: The error message diff --git a/src/mcp/shared/experimental/tasks/helpers.py b/src/mcp/shared/experimental/tasks/helpers.py index 5c87f9ef87..95055be828 100644 --- a/src/mcp/shared/experimental/tasks/helpers.py +++ b/src/mcp/shared/experimental/tasks/helpers.py @@ -1,5 +1,4 @@ -""" -Helper functions for pure task management. +"""Helper functions for pure task management. These helpers work with pure TaskContext and don't require server dependencies. For server-integrated task helpers, use mcp.server.experimental. @@ -36,8 +35,7 @@ def is_terminal(status: TaskStatus) -> bool: - """ - Check if a task status represents a terminal state. + """Check if a task status represents a terminal state. Terminal states are those where the task has finished and will not change. @@ -54,8 +52,7 @@ async def cancel_task( store: TaskStore, task_id: str, ) -> CancelTaskResult: - """ - Cancel a task with spec-compliant validation. + """Cancel a task with spec-compliant validation. Per spec: "Receivers MUST reject cancellation of terminal status tasks with -32602 (Invalid params)" @@ -111,8 +108,7 @@ def create_task_state( metadata: TaskMetadata, task_id: str | None = None, ) -> Task: - """ - Create a Task object with initial state. + """Create a Task object with initial state. This is a helper for TaskStore implementations. @@ -125,12 +121,12 @@ def create_task_state( """ now = datetime.now(timezone.utc) return Task( - taskId=task_id or generate_task_id(), + task_id=task_id or generate_task_id(), status=TASK_STATUS_WORKING, - createdAt=now, - lastUpdatedAt=now, + created_at=now, + last_updated_at=now, ttl=metadata.ttl, - pollInterval=500, # Default 500ms poll interval + poll_interval=500, # Default 500ms poll interval ) @@ -139,8 +135,7 @@ async def task_execution( task_id: str, store: TaskStore, ) -> AsyncIterator[TaskContext]: - """ - Context manager for safe task execution (pure, no server dependencies). + """Context manager for safe task execution (pure, no server dependencies). Loads a task from the store and provides a TaskContext for the work. If an unhandled exception occurs, the task is automatically marked as failed diff --git a/src/mcp/shared/experimental/tasks/in_memory_task_store.py b/src/mcp/shared/experimental/tasks/in_memory_task_store.py index 7b630ce6e2..42f4fb7035 100644 --- a/src/mcp/shared/experimental/tasks/in_memory_task_store.py +++ b/src/mcp/shared/experimental/tasks/in_memory_task_store.py @@ -1,5 +1,4 @@ -""" -In-memory implementation of TaskStore for demonstration purposes. +"""In-memory implementation of TaskStore for demonstration purposes. This implementation stores all tasks in memory and provides automatic cleanup based on the TTL duration specified in the task metadata using lazy expiration. @@ -29,8 +28,7 @@ class StoredTask: class InMemoryTaskStore(TaskStore): - """ - A simple in-memory implementation of TaskStore. + """A simple in-memory implementation of TaskStore. Features: - Automatic TTL-based cleanup (lazy expiration) @@ -79,14 +77,14 @@ async def create_task( task = create_task_state(metadata, task_id) - if task.taskId in self._tasks: - raise ValueError(f"Task with ID {task.taskId} already exists") + if task.task_id in self._tasks: + raise ValueError(f"Task with ID {task.task_id} already exists") stored = StoredTask( task=task, expires_at=self._calculate_expiry(metadata.ttl), ) - self._tasks[task.taskId] = stored + self._tasks[task.task_id] = stored # Return a copy to prevent external modification return Task(**task.model_dump()) @@ -124,10 +122,10 @@ async def update_task( status_changed = True if status_message is not None: - stored.task.statusMessage = status_message + stored.task.status_message = status_message - # Update lastUpdatedAt on any change - stored.task.lastUpdatedAt = datetime.now(timezone.utc) + # Update last_updated_at on any change + stored.task.last_updated_at = datetime.now(timezone.utc) # If task is now terminal and has TTL, reset expiry timer if status is not None and is_terminal(status) and stored.task.ttl is not None: diff --git a/src/mcp/shared/experimental/tasks/message_queue.py b/src/mcp/shared/experimental/tasks/message_queue.py index 69b6609887..018c2b7b26 100644 --- a/src/mcp/shared/experimental/tasks/message_queue.py +++ b/src/mcp/shared/experimental/tasks/message_queue.py @@ -1,5 +1,4 @@ -""" -TaskMessageQueue - FIFO queue for task-related messages. +"""TaskMessageQueue - FIFO queue for task-related messages. This implements the core message queue pattern from the MCP Tasks spec. When a handler needs to send a request (like elicitation) during a task-augmented @@ -25,8 +24,7 @@ @dataclass class QueuedMessage: - """ - A message queued for delivery via tasks/result. + """A message queued for delivery via tasks/result. Messages are stored with their type and a resolver for requests that expect responses. @@ -49,8 +47,7 @@ class QueuedMessage: class TaskMessageQueue(ABC): - """ - Abstract interface for task message queuing. + """Abstract interface for task message queuing. This is a FIFO queue that stores messages to be delivered via `tasks/result`. When a task-augmented handler calls elicit() or sends a notification, the @@ -65,8 +62,7 @@ class TaskMessageQueue(ABC): @abstractmethod async def enqueue(self, task_id: str, message: QueuedMessage) -> None: - """ - Add a message to the queue for a task. + """Add a message to the queue for a task. Args: task_id: The task identifier @@ -75,8 +71,7 @@ async def enqueue(self, task_id: str, message: QueuedMessage) -> None: @abstractmethod async def dequeue(self, task_id: str) -> QueuedMessage | None: - """ - Remove and return the next message from the queue. + """Remove and return the next message from the queue. Args: task_id: The task identifier @@ -87,8 +82,7 @@ async def dequeue(self, task_id: str) -> QueuedMessage | None: @abstractmethod async def peek(self, task_id: str) -> QueuedMessage | None: - """ - Return the next message without removing it. + """Return the next message without removing it. Args: task_id: The task identifier @@ -99,8 +93,7 @@ async def peek(self, task_id: str) -> QueuedMessage | None: @abstractmethod async def is_empty(self, task_id: str) -> bool: - """ - Check if the queue is empty for a task. + """Check if the queue is empty for a task. Args: task_id: The task identifier @@ -111,8 +104,7 @@ async def is_empty(self, task_id: str) -> bool: @abstractmethod async def clear(self, task_id: str) -> list[QueuedMessage]: - """ - Remove and return all messages from the queue. + """Remove and return all messages from the queue. This is useful for cleanup when a task is cancelled or completed. @@ -125,8 +117,7 @@ async def clear(self, task_id: str) -> list[QueuedMessage]: @abstractmethod async def wait_for_message(self, task_id: str) -> None: - """ - Wait until a message is available in the queue. + """Wait until a message is available in the queue. This blocks until either: 1. A message is enqueued for this task @@ -138,8 +129,7 @@ async def wait_for_message(self, task_id: str) -> None: @abstractmethod async def notify_message_available(self, task_id: str) -> None: - """ - Signal that a message is available for a task. + """Signal that a message is available for a task. This wakes up any coroutines waiting in wait_for_message(). @@ -149,8 +139,7 @@ async def notify_message_available(self, task_id: str) -> None: class InMemoryTaskMessageQueue(TaskMessageQueue): - """ - In-memory implementation of TaskMessageQueue. + """In-memory implementation of TaskMessageQueue. This is suitable for single-process servers. For distributed systems, implement TaskMessageQueue with Redis, RabbitMQ, etc. @@ -227,8 +216,7 @@ async def notify_message_available(self, task_id: str) -> None: self._events[task_id].set() def cleanup(self, task_id: str | None = None) -> None: - """ - Clean up queues and events. + """Clean up queues and events. Args: task_id: If provided, clean up only this task. Otherwise clean up all. diff --git a/src/mcp/shared/experimental/tasks/polling.py b/src/mcp/shared/experimental/tasks/polling.py index 39db2e6b68..e4e13b6640 100644 --- a/src/mcp/shared/experimental/tasks/polling.py +++ b/src/mcp/shared/experimental/tasks/polling.py @@ -1,5 +1,4 @@ -""" -Shared polling utilities for task operations. +"""Shared polling utilities for task operations. This module provides generic polling logic that works for both client→server and server→client task polling. @@ -20,8 +19,7 @@ async def poll_until_terminal( task_id: str, default_interval_ms: int = 500, ) -> AsyncIterator[GetTaskResult]: - """ - Poll a task until it reaches terminal status. + """Poll a task until it reaches terminal status. This is a generic utility that works for both client→server and server→client polling. The caller provides the get_task function appropriate for their direction. @@ -41,5 +39,5 @@ async def poll_until_terminal( if is_terminal(status.status): break - interval_ms = status.pollInterval if status.pollInterval is not None else default_interval_ms + interval_ms = status.poll_interval if status.poll_interval is not None else default_interval_ms await anyio.sleep(interval_ms / 1000) diff --git a/src/mcp/shared/experimental/tasks/resolver.py b/src/mcp/shared/experimental/tasks/resolver.py index f27425b2c6..1d233a9309 100644 --- a/src/mcp/shared/experimental/tasks/resolver.py +++ b/src/mcp/shared/experimental/tasks/resolver.py @@ -1,5 +1,4 @@ -""" -Resolver - An anyio-compatible future-like object for async result passing. +"""Resolver - An anyio-compatible future-like object for async result passing. This provides a simple way to pass a result (or exception) from one coroutine to another without depending on asyncio.Future. @@ -13,8 +12,7 @@ class Resolver(Generic[T]): - """ - A simple resolver for passing results between coroutines. + """A simple resolver for passing results between coroutines. Unlike asyncio.Future, this works with any anyio-compatible async backend. diff --git a/src/mcp/shared/experimental/tasks/store.py b/src/mcp/shared/experimental/tasks/store.py index 71fb4511b8..7de97d40ca 100644 --- a/src/mcp/shared/experimental/tasks/store.py +++ b/src/mcp/shared/experimental/tasks/store.py @@ -1,6 +1,4 @@ -""" -TaskStore - Abstract interface for task state storage. -""" +"""TaskStore - Abstract interface for task state storage.""" from abc import ABC, abstractmethod @@ -8,8 +6,7 @@ class TaskStore(ABC): - """ - Abstract interface for task state storage. + """Abstract interface for task state storage. This is a pure storage interface - it doesn't manage execution. Implementations can use in-memory storage, databases, Redis, etc. @@ -23,8 +20,7 @@ async def create_task( metadata: TaskMetadata, task_id: str | None = None, ) -> Task: - """ - Create a new task. + """Create a new task. Args: metadata: Task metadata (ttl, etc.) @@ -39,8 +35,7 @@ async def create_task( @abstractmethod async def get_task(self, task_id: str) -> Task | None: - """ - Get a task by ID. + """Get a task by ID. Args: task_id: The task identifier @@ -56,8 +51,7 @@ async def update_task( status: TaskStatus | None = None, status_message: str | None = None, ) -> Task: - """ - Update a task's status and/or message. + """Update a task's status and/or message. Args: task_id: The task identifier @@ -76,8 +70,7 @@ async def update_task( @abstractmethod async def store_result(self, task_id: str, result: Result) -> None: - """ - Store the result for a task. + """Store the result for a task. Args: task_id: The task identifier @@ -89,8 +82,7 @@ async def store_result(self, task_id: str, result: Result) -> None: @abstractmethod async def get_result(self, task_id: str) -> Result | None: - """ - Get the stored result for a task. + """Get the stored result for a task. Args: task_id: The task identifier @@ -104,8 +96,7 @@ async def list_tasks( self, cursor: str | None = None, ) -> tuple[list[Task], str | None]: - """ - List tasks with pagination. + """List tasks with pagination. Args: cursor: Optional cursor for pagination @@ -116,8 +107,7 @@ async def list_tasks( @abstractmethod async def delete_task(self, task_id: str) -> bool: - """ - Delete a task. + """Delete a task. Args: task_id: The task identifier @@ -128,8 +118,7 @@ async def delete_task(self, task_id: str) -> bool: @abstractmethod async def wait_for_update(self, task_id: str) -> None: - """ - Wait until the task status changes. + """Wait until the task status changes. This blocks until either: 1. The task status changes @@ -146,8 +135,7 @@ async def wait_for_update(self, task_id: str) -> None: @abstractmethod async def notify_update(self, task_id: str) -> None: - """ - Signal that a task has been updated. + """Signal that a task has been updated. This wakes up any coroutines waiting in wait_for_update(). diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index 06d404e311..7be607fe1f 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -1,21 +1,13 @@ -""" -In-memory transports -""" +"""In-memory transports""" from __future__ import annotations from collections.abc import AsyncGenerator from contextlib import asynccontextmanager -from datetime import timedelta -from typing import Any import anyio from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -import mcp.types as types -from mcp.client.session import ClientSession, ElicitationFnT, ListRootsFnT, LoggingFnT, MessageHandlerFnT, SamplingFnT -from mcp.server import Server -from mcp.server.fastmcp import FastMCP from mcp.shared.message import SessionMessage MessageStream = tuple[MemoryObjectReceiveStream[SessionMessage | Exception], MemoryObjectSendStream[SessionMessage]] @@ -23,8 +15,7 @@ @asynccontextmanager async def create_client_server_memory_streams() -> AsyncGenerator[tuple[MessageStream, MessageStream], None]: - """ - Creates a pair of bidirectional memory streams for client-server communication. + """Creates a pair of bidirectional memory streams for client-server communication. Returns: A tuple of (client_streams, server_streams) where each is a tuple of @@ -44,55 +35,3 @@ async def create_client_server_memory_streams() -> AsyncGenerator[tuple[MessageS server_to_client_send, ): yield client_streams, server_streams - - -@asynccontextmanager -async def create_connected_server_and_client_session( - server: Server[Any] | FastMCP, - read_timeout_seconds: timedelta | None = None, - sampling_callback: SamplingFnT | None = None, - list_roots_callback: ListRootsFnT | None = None, - logging_callback: LoggingFnT | None = None, - message_handler: MessageHandlerFnT | None = None, - client_info: types.Implementation | None = None, - raise_exceptions: bool = False, - elicitation_callback: ElicitationFnT | None = None, -) -> AsyncGenerator[ClientSession, None]: - """Creates a ClientSession that is connected to a running MCP server.""" - - # TODO(Marcelo): we should have a proper `Client` that can use this "in-memory transport", - # and we should expose a method in the `FastMCP` so we don't access a private attribute. - if isinstance(server, FastMCP): # pragma: no cover - server = server._mcp_server # type: ignore[reportPrivateUsage] - - async with create_client_server_memory_streams() as (client_streams, server_streams): - client_read, client_write = client_streams - server_read, server_write = server_streams - - # Create a cancel scope for the server task - async with anyio.create_task_group() as tg: - tg.start_soon( - lambda: server.run( - server_read, - server_write, - server.create_initialization_options(), - raise_exceptions=raise_exceptions, - ) - ) - - try: - async with ClientSession( - read_stream=client_read, - write_stream=client_write, - read_timeout_seconds=read_timeout_seconds, - sampling_callback=sampling_callback, - list_roots_callback=list_roots_callback, - logging_callback=logging_callback, - message_handler=message_handler, - client_info=client_info, - elicitation_callback=elicitation_callback, - ) as client_session: - await client_session.initialize() - yield client_session - finally: # pragma: no cover - tg.cancel_scope.cancel() diff --git a/src/mcp/shared/message.py b/src/mcp/shared/message.py index 81503eaaa7..9dedd2e5d3 100644 --- a/src/mcp/shared/message.py +++ b/src/mcp/shared/message.py @@ -1,5 +1,4 @@ -""" -Message wrapper with metadata support. +"""Message wrapper with metadata support. This module defines a wrapper type that combines JSONRPCMessage with metadata to support transport-specific features like resumability. diff --git a/src/mcp/shared/metadata_utils.py b/src/mcp/shared/metadata_utils.py index e3f49daf48..2b66996bde 100644 --- a/src/mcp/shared/metadata_utils.py +++ b/src/mcp/shared/metadata_utils.py @@ -8,8 +8,7 @@ def get_display_name(obj: Tool | Resource | Prompt | ResourceTemplate | Implementation) -> str: - """ - Get the display name for an MCP object with proper precedence. + """Get the display name for an MCP object with proper precedence. This is a client-side utility function designed to help MCP clients display human-readable names in their user interfaces. When servers provide a 'title' diff --git a/src/mcp/shared/progress.py b/src/mcp/shared/progress.py index a230c58b45..245654d109 100644 --- a/src/mcp/shared/progress.py +++ b/src/mcp/shared/progress.py @@ -48,10 +48,10 @@ def progress( ProgressContext[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT], None, ]: - if ctx.meta is None or ctx.meta.progressToken is None: # pragma: no cover + if ctx.meta is None or ctx.meta.progress_token is None: # pragma: no cover raise ValueError("No progress token provided") - progress_ctx = ProgressContext(ctx.session, ctx.meta.progressToken, total) + progress_ctx = ProgressContext(ctx.session, ctx.meta.progress_token, total) try: yield progress_ctx finally: diff --git a/src/mcp/shared/response_router.py b/src/mcp/shared/response_router.py index 31796157fe..7ec4a443c1 100644 --- a/src/mcp/shared/response_router.py +++ b/src/mcp/shared/response_router.py @@ -1,5 +1,4 @@ -""" -ResponseRouter - Protocol for pluggable response routing. +"""ResponseRouter - Protocol for pluggable response routing. This module defines a protocol for routing JSON-RPC responses to alternative handlers before falling back to the default response stream mechanism. @@ -20,8 +19,7 @@ class ResponseRouter(Protocol): - """ - Protocol for routing responses to alternative handlers. + """Protocol for routing responses to alternative handlers. Implementations check if they have a pending request for the given ID and deliver the response/error to the appropriate handler. @@ -37,8 +35,7 @@ def route_response(self, request_id, response): """ def route_response(self, request_id: RequestId, response: dict[str, Any]) -> bool: - """ - Try to route a response to a pending request handler. + """Try to route a response to a pending request handler. Args: request_id: The JSON-RPC request ID from the response @@ -50,8 +47,7 @@ def route_response(self, request_id: RequestId, response: dict[str, Any]) -> boo ... # pragma: no cover def route_error(self, request_id: RequestId, error: ErrorData) -> bool: - """ - Try to route an error to a pending request handler. + """Try to route an error to a pending request handler. Args: request_id: The JSON-RPC request ID from the error response diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index 3033acd0eb..d00fd764cd 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -1,14 +1,14 @@ +from __future__ import annotations + import logging from collections.abc import Callable from contextlib import AsyncExitStack -from datetime import timedelta from types import TracebackType from typing import Any, Generic, Protocol, TypeVar import anyio -import httpx from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from typing_extensions import Self from mcp.shared.exceptions import McpError @@ -17,13 +17,13 @@ from mcp.types import ( CONNECTION_CLOSED, INVALID_PARAMS, + REQUEST_TIMEOUT, CancelledNotification, ClientNotification, ClientRequest, ClientResult, ErrorData, JSONRPCError, - JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, @@ -73,14 +73,8 @@ def __init__( request_id: RequestId, request_meta: RequestParams.Meta | None, request: ReceiveRequestT, - session: """BaseSession[ - SendRequestT, - SendNotificationT, - SendResultT, - ReceiveRequestT, - ReceiveNotificationT - ]""", - on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any], + session: BaseSession[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT], + on_complete: Callable[[RequestResponder[ReceiveRequestT, SendResultT]], Any], message_metadata: MessageMetadata = None, ) -> None: self.request_id = request_id @@ -93,7 +87,7 @@ def __init__( self._on_complete = on_complete self._entered = False # Track if we're in a context manager - def __enter__(self) -> "RequestResponder[ReceiveRequestT, SendResultT]": + def __enter__(self) -> RequestResponder[ReceiveRequestT, SendResultT]: """Enter the context manager, enabling request cancellation tracking.""" self._entered = True self._cancel_scope = anyio.CancelScope() @@ -168,8 +162,7 @@ class BaseSession( ReceiveNotificationT, ], ): - """ - Implements an MCP "session" on top of read/write streams, including features + """Implements an MCP "session" on top of read/write streams, including features like request/response linking, notifications, and progress. This class is an async context manager that automatically starts processing @@ -180,23 +173,19 @@ class BaseSession( _request_id: int _in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]] _progress_callbacks: dict[RequestId, ProgressFnT] - _response_routers: list["ResponseRouter"] + _response_routers: list[ResponseRouter] def __init__( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], write_stream: MemoryObjectSendStream[SessionMessage], - receive_request_type: type[ReceiveRequestT], - receive_notification_type: type[ReceiveNotificationT], # If none, reading will never time out - read_timeout_seconds: timedelta | None = None, + read_timeout_seconds: float | None = None, ) -> None: self._read_stream = read_stream self._write_stream = write_stream self._response_streams = {} self._request_id = 0 - self._receive_request_type = receive_request_type - self._receive_notification_type = receive_notification_type self._session_read_timeout_seconds = read_timeout_seconds self._in_flight = {} self._progress_callbacks = {} @@ -204,14 +193,14 @@ def __init__( self._exit_stack = AsyncExitStack() def add_response_router(self, router: ResponseRouter) -> None: - """ - Register a response router to handle responses for non-standard requests. + """Register a response router to handle responses for non-standard requests. Response routers are checked in order before falling back to the default response stream mechanism. This is used by TaskResultHandler to route responses for queued task requests back to their resolvers. - WARNING: This is an experimental API that may change without notice. + !!! warning + This is an experimental API that may change without notice. Args: router: A ResponseRouter implementation @@ -241,17 +230,16 @@ async def send_request( self, request: SendRequestT, result_type: type[ReceiveResultT], - request_read_timeout_seconds: timedelta | None = None, + request_read_timeout_seconds: float | None = None, metadata: MessageMetadata = None, progress_callback: ProgressFnT | None = None, ) -> ReceiveResultT: - """ - Sends a request and wait for a response. Raises an McpError if the - response contains an error. If a request read timeout is provided, it - will take precedence over the session read timeout. + """Sends a request and wait for a response. + + Raises an McpError if the response contains an error. If a request read timeout is provided, it will take + precedence over the session read timeout. - Do not use this method to emit notifications! Use send_notification() - instead. + Do not use this method to emit notifications! Use send_notification() instead. """ request_id = self._request_id self._request_id = request_id + 1 @@ -272,20 +260,11 @@ async def send_request( self._progress_callbacks[request_id] = progress_callback try: - jsonrpc_request = JSONRPCRequest( - jsonrpc="2.0", - id=request_id, - **request_data, - ) - - await self._write_stream.send(SessionMessage(message=JSONRPCMessage(jsonrpc_request), metadata=metadata)) + jsonrpc_request = JSONRPCRequest(jsonrpc="2.0", id=request_id, **request_data) + await self._write_stream.send(SessionMessage(message=jsonrpc_request, metadata=metadata)) # request read timeout takes precedence over session read timeout - timeout = None - if request_read_timeout_seconds is not None: # pragma: no cover - timeout = request_read_timeout_seconds.total_seconds() - elif self._session_read_timeout_seconds is not None: # pragma: no cover - timeout = self._session_read_timeout_seconds.total_seconds() + timeout = request_read_timeout_seconds or self._session_read_timeout_seconds try: with anyio.fail_after(timeout): @@ -293,11 +272,10 @@ async def send_request( except TimeoutError: raise McpError( ErrorData( - code=httpx.codes.REQUEST_TIMEOUT, + code=REQUEST_TIMEOUT, message=( - f"Timed out while waiting for response to " - f"{request.__class__.__name__}. Waited " - f"{timeout} seconds." + f"Timed out while waiting for response to {request.__class__.__name__}. " + f"Waited {timeout} seconds." ), ) ) @@ -305,7 +283,7 @@ async def send_request( if isinstance(response_or_error, JSONRPCError): raise McpError(response_or_error.error) else: - return result_type.model_validate(response_or_error.result) + return result_type.model_validate(response_or_error.result, by_name=False) finally: self._response_streams.pop(request_id, None) @@ -318,10 +296,7 @@ async def send_notification( notification: SendNotificationT, related_request_id: RequestId | None = None, ) -> None: - """ - Emits a notification, which is a one-way message that does not expect - a response. - """ + """Emits a notification, which is a one-way message that does not expect a response.""" # Some transport implementations may need to set the related_request_id # to attribute to the notifications to the request that triggered them. jsonrpc_notification = JSONRPCNotification( @@ -329,7 +304,7 @@ async def send_notification( **notification.model_dump(by_alias=True, mode="json", exclude_none=True), ) session_message = SessionMessage( # pragma: no cover - message=JSONRPCMessage(jsonrpc_notification), + message=jsonrpc_notification, metadata=ServerMessageMetadata(related_request_id=related_request_id) if related_request_id else None, ) await self._write_stream.send(session_message) @@ -337,7 +312,7 @@ async def send_notification( async def _send_response(self, request_id: RequestId, response: SendResultT | ErrorData) -> None: if isinstance(response, ErrorData): jsonrpc_error = JSONRPCError(jsonrpc="2.0", id=request_id, error=response) - session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_error)) + session_message = SessionMessage(message=jsonrpc_error) await self._write_stream.send(session_message) else: jsonrpc_response = JSONRPCResponse( @@ -345,28 +320,33 @@ async def _send_response(self, request_id: RequestId, response: SendResultT | Er id=request_id, result=response.model_dump(by_alias=True, mode="json", exclude_none=True), ) - session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_response)) + session_message = SessionMessage(message=jsonrpc_response) await self._write_stream.send(session_message) + @property + def _receive_request_adapter(self) -> TypeAdapter[ReceiveRequestT]: + """Each subclass must provide its own request adapter.""" + raise NotImplementedError + + @property + def _receive_notification_adapter(self) -> TypeAdapter[ReceiveNotificationT]: + raise NotImplementedError + async def _receive_loop(self) -> None: - async with ( - self._read_stream, - self._write_stream, - ): + async with self._read_stream, self._write_stream: try: async for message in self._read_stream: if isinstance(message, Exception): # pragma: no cover await self._handle_incoming(message) - elif isinstance(message.message.root, JSONRPCRequest): + elif isinstance(message.message, JSONRPCRequest): try: - validated_request = self._receive_request_type.model_validate( - message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) + validated_request = self._receive_request_adapter.validate_python( + message.message.model_dump(by_alias=True, mode="json", exclude_none=True), + by_name=False, ) responder = RequestResponder( - request_id=message.message.root.id, - request_meta=validated_request.root.params.meta - if validated_request.root.params - else None, + request_id=message.message.id, + request_meta=validated_request.params.meta if validated_request.params else None, request=validated_request, session=self, on_complete=lambda r: self._in_flight.pop(r.request_id, None), @@ -377,58 +357,53 @@ async def _receive_loop(self) -> None: if not responder._completed: # type: ignore[reportPrivateUsage] await self._handle_incoming(responder) - except Exception as e: + except Exception: # For request validation errors, send a proper JSON-RPC error # response instead of crashing the server - logging.warning(f"Failed to validate request: {e}") - logging.debug(f"Message that failed validation: {message.message.root}") + logging.warning("Failed to validate request", exc_info=True) + logging.debug(f"Message that failed validation: {message.message}") error_response = JSONRPCError( jsonrpc="2.0", - id=message.message.root.id, - error=ErrorData( - code=INVALID_PARAMS, - message="Invalid request parameters", - data="", - ), + id=message.message.id, + error=ErrorData(code=INVALID_PARAMS, message="Invalid request parameters", data=""), ) - session_message = SessionMessage(message=JSONRPCMessage(error_response)) + session_message = SessionMessage(message=error_response) await self._write_stream.send(session_message) - elif isinstance(message.message.root, JSONRPCNotification): + elif isinstance(message.message, JSONRPCNotification): try: - notification = self._receive_notification_type.model_validate( - message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) + notification = self._receive_notification_adapter.validate_python( + message.message.model_dump(by_alias=True, mode="json", exclude_none=True), + by_name=False, ) # Handle cancellation notifications - if isinstance(notification.root, CancelledNotification): - cancelled_id = notification.root.params.requestId + if isinstance(notification, CancelledNotification): + cancelled_id = notification.params.request_id if cancelled_id in self._in_flight: # pragma: no branch await self._in_flight[cancelled_id].cancel() else: # Handle progress notifications callback - if isinstance(notification.root, ProgressNotification): # pragma: no cover - progress_token = notification.root.params.progressToken + if isinstance(notification, ProgressNotification): # pragma: no cover + progress_token = notification.params.progress_token # If there is a progress callback for this token, # call it with the progress information if progress_token in self._progress_callbacks: callback = self._progress_callbacks[progress_token] try: await callback( - notification.root.params.progress, - notification.root.params.total, - notification.root.params.message, - ) - except Exception as e: - logging.error( - "Progress callback raised an exception: %s", - e, + notification.params.progress, + notification.params.total, + notification.params.message, ) + except Exception: + logging.exception("Progress callback raised an exception") await self._received_notification(notification) await self._handle_incoming(notification) - except Exception as e: # pragma: no cover + except Exception: # pragma: no cover # For other validation errors, log and continue logging.warning( - f"Failed to validate notification: {e}. Message was: {message.message.root}" + f"Failed to validate notification:. Message was: {message.message}", + exc_info=True, ) else: # Response or error await self._handle_response(message) @@ -456,8 +431,7 @@ async def _receive_loop(self) -> None: self._response_streams.clear() def _normalize_request_id(self, response_id: RequestId) -> RequestId: - """ - Normalize a response ID to match how request IDs are stored. + """Normalize a response ID to match how request IDs are stored. Since the client always sends integer IDs, we normalize string IDs to integers when possible. This matches the TypeScript SDK approach: @@ -477,33 +451,30 @@ def _normalize_request_id(self, response_id: RequestId) -> RequestId: return response_id async def _handle_response(self, message: SessionMessage) -> None: - """ - Handle an incoming response or error message. + """Handle an incoming response or error message. Checks response routers first (e.g., for task-related responses), then falls back to the normal response stream mechanism. """ - root = message.message.root - # This check is always true at runtime: the caller (_receive_loop) only invokes # this method in the else branch after checking for JSONRPCRequest and # JSONRPCNotification. However, the type checker can't infer this from the # method signature, so we need this guard for type narrowing. - if not isinstance(root, JSONRPCResponse | JSONRPCError): + if not isinstance(message.message, JSONRPCResponse | JSONRPCError): return # pragma: no cover # Normalize response ID to handle type mismatches (e.g., "0" vs 0) - response_id = self._normalize_request_id(root.id) + response_id = self._normalize_request_id(message.message.id) # First, check response routers (e.g., TaskResultHandler) - if isinstance(root, JSONRPCError): + if isinstance(message.message, JSONRPCError): # Route error to routers for router in self._response_routers: - if router.route_error(response_id, root.error): + if router.route_error(response_id, message.message.error): return # Handled else: # Route success response to routers - response_data: dict[str, Any] = root.result or {} + response_data: dict[str, Any] = message.message.result or {} for router in self._response_routers: if router.route_response(response_id, response_data): return # Handled @@ -511,13 +482,12 @@ async def _handle_response(self, message: SessionMessage) -> None: # Fall back to normal response streams stream = self._response_streams.pop(response_id, None) if stream: # pragma: no cover - await stream.send(root) + await stream.send(message.message) else: # pragma: no cover await self._handle_incoming(RuntimeError(f"Received response with an unknown request ID: {message}")) async def _received_request(self, responder: RequestResponder[ReceiveRequestT, SendResultT]) -> None: - """ - Can be overridden by subclasses to handle a request without needing to + """Can be overridden by subclasses to handle a request without needing to listen on the message stream. If the request is responded to within this method, it will not be @@ -525,8 +495,7 @@ async def _received_request(self, responder: RequestResponder[ReceiveRequestT, S """ async def _received_notification(self, notification: ReceiveNotificationT) -> None: - """ - Can be overridden by subclasses to handle a notification without needing + """Can be overridden by subclasses to handle a notification without needing to listen on the message stream. """ @@ -537,14 +506,9 @@ async def send_progress_notification( total: float | None = None, message: str | None = None, ) -> None: - """ - Sends a progress notification for a request that is currently being - processed. - """ + """Sends a progress notification for a request that is currently being processed.""" async def _handle_incoming( - self, - req: RequestResponder[ReceiveRequestT, SendResultT] | ReceiveNotificationT | Exception, + self, req: RequestResponder[ReceiveRequestT, SendResultT] | ReceiveNotificationT | Exception ) -> None: """A generic handler for incoming messages. Overwritten by subclasses.""" - pass # pragma: no cover diff --git a/src/mcp/types/__init__.py b/src/mcp/types/__init__.py new file mode 100644 index 0000000000..c4df66f8d7 --- /dev/null +++ b/src/mcp/types/__init__.py @@ -0,0 +1,416 @@ +"""MCP types package.""" + +# Re-export everything from _types for backward compatibility +from mcp.types._types import ( + DEFAULT_NEGOTIATED_VERSION, + LATEST_PROTOCOL_VERSION, + TASK_FORBIDDEN, + TASK_OPTIONAL, + TASK_REQUIRED, + TASK_STATUS_CANCELLED, + TASK_STATUS_COMPLETED, + TASK_STATUS_FAILED, + TASK_STATUS_INPUT_REQUIRED, + TASK_STATUS_WORKING, + Annotations, + AudioContent, + BaseMetadata, + BlobResourceContents, + CallToolRequest, + CallToolRequestParams, + CallToolResult, + CancelledNotification, + CancelledNotificationParams, + CancelTaskRequest, + CancelTaskRequestParams, + CancelTaskResult, + ClientCapabilities, + ClientNotification, + ClientRequest, + ClientResult, + ClientTasksCapability, + ClientTasksRequestsCapability, + CompleteRequest, + CompleteRequestParams, + CompleteResult, + Completion, + CompletionArgument, + CompletionContext, + CompletionsCapability, + ContentBlock, + CreateMessageRequest, + CreateMessageRequestParams, + CreateMessageResult, + CreateMessageResultWithTools, + CreateTaskResult, + Cursor, + ElicitationCapability, + ElicitationRequiredErrorData, + ElicitCompleteNotification, + ElicitCompleteNotificationParams, + ElicitRequest, + ElicitRequestedSchema, + ElicitRequestFormParams, + ElicitRequestParams, + ElicitRequestURLParams, + ElicitResult, + EmbeddedResource, + EmptyResult, + FormElicitationCapability, + GetPromptRequest, + GetPromptRequestParams, + GetPromptResult, + GetTaskPayloadRequest, + GetTaskPayloadRequestParams, + GetTaskPayloadResult, + GetTaskRequest, + GetTaskRequestParams, + GetTaskResult, + Icon, + ImageContent, + Implementation, + IncludeContext, + InitializedNotification, + InitializeRequest, + InitializeRequestParams, + InitializeResult, + ListPromptsRequest, + ListPromptsResult, + ListResourcesRequest, + ListResourcesResult, + ListResourceTemplatesRequest, + ListResourceTemplatesResult, + ListRootsRequest, + ListRootsResult, + ListTasksRequest, + ListTasksResult, + ListToolsRequest, + ListToolsResult, + LoggingCapability, + LoggingLevel, + LoggingMessageNotification, + LoggingMessageNotificationParams, + MCPModel, + MethodT, + ModelHint, + ModelPreferences, + Notification, + NotificationParams, + NotificationParamsT, + PaginatedRequest, + PaginatedRequestParams, + PaginatedResult, + PingRequest, + ProgressNotification, + ProgressNotificationParams, + ProgressToken, + Prompt, + PromptArgument, + PromptListChangedNotification, + PromptMessage, + PromptReference, + PromptsCapability, + ReadResourceRequest, + ReadResourceRequestParams, + ReadResourceResult, + RelatedTaskMetadata, + Request, + RequestParams, + RequestParamsT, + Resource, + ResourceContents, + ResourceLink, + ResourceListChangedNotification, + ResourcesCapability, + ResourceTemplate, + ResourceTemplateReference, + ResourceUpdatedNotification, + ResourceUpdatedNotificationParams, + Result, + Role, + Root, + RootsCapability, + RootsListChangedNotification, + SamplingCapability, + SamplingContent, + SamplingContextCapability, + SamplingMessage, + SamplingMessageContentBlock, + SamplingToolsCapability, + ServerCapabilities, + ServerNotification, + ServerRequest, + ServerResult, + ServerTasksCapability, + ServerTasksRequestsCapability, + SetLevelRequest, + SetLevelRequestParams, + StopReason, + SubscribeRequest, + SubscribeRequestParams, + Task, + TaskExecutionMode, + TaskMetadata, + TasksCallCapability, + TasksCancelCapability, + TasksCreateElicitationCapability, + TasksCreateMessageCapability, + TasksElicitationCapability, + TasksListCapability, + TasksSamplingCapability, + TaskStatus, + TaskStatusNotification, + TaskStatusNotificationParams, + TasksToolsCapability, + TextContent, + TextResourceContents, + Tool, + ToolAnnotations, + ToolChoice, + ToolExecution, + ToolListChangedNotification, + ToolResultContent, + ToolsCapability, + ToolUseContent, + UnsubscribeRequest, + UnsubscribeRequestParams, + UrlElicitationCapability, + client_notification_adapter, + client_request_adapter, + client_result_adapter, + server_notification_adapter, + server_request_adapter, + server_result_adapter, +) + +# Re-export JSONRPC types +from mcp.types.jsonrpc import ( + CONNECTION_CLOSED, + INTERNAL_ERROR, + INVALID_PARAMS, + INVALID_REQUEST, + METHOD_NOT_FOUND, + PARSE_ERROR, + REQUEST_TIMEOUT, + URL_ELICITATION_REQUIRED, + ErrorData, + JSONRPCError, + JSONRPCMessage, + JSONRPCNotification, + JSONRPCRequest, + JSONRPCResponse, + RequestId, + jsonrpc_message_adapter, +) + +__all__ = [ + # Protocol version constants + "LATEST_PROTOCOL_VERSION", + "DEFAULT_NEGOTIATED_VERSION", + # Task execution mode constants + "TASK_FORBIDDEN", + "TASK_OPTIONAL", + "TASK_REQUIRED", + # Task status constants + "TASK_STATUS_CANCELLED", + "TASK_STATUS_COMPLETED", + "TASK_STATUS_FAILED", + "TASK_STATUS_INPUT_REQUIRED", + "TASK_STATUS_WORKING", + # Type aliases and variables + "ContentBlock", + "Cursor", + "ElicitRequestedSchema", + "ElicitRequestParams", + "IncludeContext", + "LoggingLevel", + "MethodT", + "NotificationParamsT", + "ProgressToken", + "RequestParamsT", + "Role", + "SamplingContent", + "SamplingMessageContentBlock", + "StopReason", + "TaskExecutionMode", + "TaskStatus", + # Base classes + "MCPModel", + "BaseMetadata", + "Request", + "Notification", + "Result", + "RequestParams", + "NotificationParams", + "PaginatedRequest", + "PaginatedRequestParams", + "PaginatedResult", + "EmptyResult", + # Capabilities + "ClientCapabilities", + "ClientTasksCapability", + "ClientTasksRequestsCapability", + "CompletionsCapability", + "ElicitationCapability", + "FormElicitationCapability", + "LoggingCapability", + "PromptsCapability", + "ResourcesCapability", + "RootsCapability", + "SamplingCapability", + "SamplingContextCapability", + "SamplingToolsCapability", + "ServerCapabilities", + "ServerTasksCapability", + "ServerTasksRequestsCapability", + "TasksCancelCapability", + "TasksCallCapability", + "TasksCreateElicitationCapability", + "TasksCreateMessageCapability", + "TasksElicitationCapability", + "TasksListCapability", + "TasksSamplingCapability", + "TasksToolsCapability", + "ToolsCapability", + "UrlElicitationCapability", + # Content types + "Annotations", + "AudioContent", + "BlobResourceContents", + "EmbeddedResource", + "Icon", + "ImageContent", + "ResourceContents", + "ResourceLink", + "TextContent", + "TextResourceContents", + "ToolResultContent", + "ToolUseContent", + # Entity types + "Completion", + "CompletionArgument", + "CompletionContext", + "Implementation", + "ModelHint", + "ModelPreferences", + "Prompt", + "PromptArgument", + "PromptMessage", + "PromptReference", + "Resource", + "ResourceTemplate", + "ResourceTemplateReference", + "Root", + "SamplingMessage", + "Task", + "TaskMetadata", + "RelatedTaskMetadata", + "Tool", + "ToolAnnotations", + "ToolChoice", + "ToolExecution", + # Requests + "CallToolRequest", + "CallToolRequestParams", + "CancelTaskRequest", + "CancelTaskRequestParams", + "CompleteRequest", + "CompleteRequestParams", + "CreateMessageRequest", + "CreateMessageRequestParams", + "ElicitRequest", + "ElicitRequestFormParams", + "ElicitRequestURLParams", + "GetPromptRequest", + "GetPromptRequestParams", + "GetTaskPayloadRequest", + "GetTaskPayloadRequestParams", + "GetTaskRequest", + "GetTaskRequestParams", + "InitializeRequest", + "InitializeRequestParams", + "ListPromptsRequest", + "ListResourcesRequest", + "ListResourceTemplatesRequest", + "ListRootsRequest", + "ListTasksRequest", + "ListToolsRequest", + "PingRequest", + "ReadResourceRequest", + "ReadResourceRequestParams", + "SetLevelRequest", + "SetLevelRequestParams", + "SubscribeRequest", + "SubscribeRequestParams", + "UnsubscribeRequest", + "UnsubscribeRequestParams", + # Results + "CallToolResult", + "CancelTaskResult", + "CompleteResult", + "CreateMessageResult", + "CreateMessageResultWithTools", + "CreateTaskResult", + "ElicitResult", + "ElicitationRequiredErrorData", + "GetPromptResult", + "GetTaskPayloadResult", + "GetTaskResult", + "InitializeResult", + "ListPromptsResult", + "ListResourcesResult", + "ListResourceTemplatesResult", + "ListRootsResult", + "ListTasksResult", + "ListToolsResult", + "ReadResourceResult", + # Notifications + "CancelledNotification", + "CancelledNotificationParams", + "ElicitCompleteNotification", + "ElicitCompleteNotificationParams", + "InitializedNotification", + "LoggingMessageNotification", + "LoggingMessageNotificationParams", + "ProgressNotification", + "ProgressNotificationParams", + "PromptListChangedNotification", + "ResourceListChangedNotification", + "ResourceUpdatedNotification", + "ResourceUpdatedNotificationParams", + "RootsListChangedNotification", + "TaskStatusNotification", + "TaskStatusNotificationParams", + "ToolListChangedNotification", + # Union types for request/response routing + "ClientNotification", + "ClientRequest", + "ClientResult", + "ServerNotification", + "ServerRequest", + "ServerResult", + # Type adapters + "client_notification_adapter", + "client_request_adapter", + "client_result_adapter", + "server_notification_adapter", + "server_request_adapter", + "server_result_adapter", + # JSON-RPC types + "CONNECTION_CLOSED", + "INTERNAL_ERROR", + "INVALID_PARAMS", + "INVALID_REQUEST", + "METHOD_NOT_FOUND", + "PARSE_ERROR", + "REQUEST_TIMEOUT", + "URL_ELICITATION_REQUIRED", + "ErrorData", + "JSONRPCError", + "JSONRPCMessage", + "JSONRPCNotification", + "JSONRPCRequest", + "JSONRPCResponse", + "RequestId", + "jsonrpc_message_adapter", +] diff --git a/src/mcp/types.py b/src/mcp/types/_types.py similarity index 76% rename from src/mcp/types.py rename to src/mcp/types/_types.py index 654c00660b..f63d3ebac1 100644 --- a/src/mcp/types.py +++ b/src/mcp/types/_types.py @@ -1,28 +1,12 @@ -from collections.abc import Callable +from __future__ import annotations + from datetime import datetime from typing import Annotated, Any, Final, Generic, Literal, TypeAlias, TypeVar -from pydantic import BaseModel, ConfigDict, Field, FileUrl, RootModel -from pydantic.networks import AnyUrl, UrlConstraints -from typing_extensions import deprecated - -""" -Model Context Protocol bindings for Python +from pydantic import BaseModel, ConfigDict, Field, FileUrl, TypeAdapter +from pydantic.alias_generators import to_camel -These bindings were generated from https://github.com/modelcontextprotocol/specification, -using Claude, with a prompt something like the following: - -Generate idiomatic Python bindings for this schema for MCP, or the "Model Context -Protocol." The schema is defined in TypeScript, but there's also a JSON Schema version -for reference. - -* For the bindings, let's use Pydantic V2 models. -* Each model should allow extra fields everywhere, by specifying `model_config = - ConfigDict(extra='allow')`. Do this in every case, instead of a custom base class. -* Union types should be represented with a Pydantic `RootModel`. -* Define additional model classes instead of using dictionaries. Do this even if they're - not separate types in the schema. -""" +from mcp.types.jsonrpc import RequestId LATEST_PROTOCOL_VERSION = "2025-11-25" @@ -37,8 +21,6 @@ ProgressToken = str | int Cursor = str Role = Literal["user", "assistant"] -RequestId = Annotated[int, Field(strict=True)] | str -AnyFunction: TypeAlias = Callable[..., Any] TaskExecutionMode = Literal["forbidden", "optional", "required"] TASK_FORBIDDEN: Final[Literal["forbidden"]] = "forbidden" @@ -46,21 +28,25 @@ TASK_REQUIRED: Final[Literal["required"]] = "required" -class TaskMetadata(BaseModel): - """ - Metadata for augmenting a request with task execution. +class MCPModel(BaseModel): + """Base class for all MCP protocol types. Allows extra fields for forward compatibility.""" + + # TODO(Marcelo): The extra="allow" should be only on specific types e.g. `Meta`, not on the base class. + model_config = ConfigDict(extra="allow", alias_generator=to_camel, populate_by_name=True) + + +class TaskMetadata(MCPModel): + """Metadata for augmenting a request with task execution. Include this in the `task` field of the request parameters. """ - model_config = ConfigDict(extra="allow") - ttl: Annotated[int, Field(strict=True)] | None = None """Requested duration in milliseconds to retain task from creation.""" -class RequestParams(BaseModel): - class Meta(BaseModel): - progressToken: ProgressToken | None = None +class RequestParams(MCPModel): + class Meta(MCPModel): + progress_token: ProgressToken | None = None """ If specified, the caller requests out-of-band progress notifications for this request (as represented by notifications/progress). The value of this @@ -68,8 +54,6 @@ class Meta(BaseModel): notifications. The receiver is not obligated to provide these notifications. """ - model_config = ConfigDict(extra="allow") - task: TaskMetadata | None = None """ If specified, the caller is requesting task-augmented execution for this request. @@ -91,9 +75,9 @@ class PaginatedRequestParams(RequestParams): """ -class NotificationParams(BaseModel): - class Meta(BaseModel): - model_config = ConfigDict(extra="allow") +class NotificationParams(MCPModel): + class Meta(MCPModel): + pass meta: Meta | None = Field(alias="_meta", default=None) """ @@ -107,12 +91,11 @@ class Meta(BaseModel): MethodT = TypeVar("MethodT", bound=str) -class Request(BaseModel, Generic[RequestParamsT, MethodT]): +class Request(MCPModel, Generic[RequestParamsT, MethodT]): """Base class for JSON-RPC requests.""" method: MethodT params: RequestParamsT - model_config = ConfigDict(extra="allow") class PaginatedRequest(Request[PaginatedRequestParams | None, MethodT], Generic[MethodT]): @@ -122,15 +105,14 @@ class PaginatedRequest(Request[PaginatedRequestParams | None, MethodT], Generic[ params: PaginatedRequestParams | None = None -class Notification(BaseModel, Generic[NotificationParamsT, MethodT]): +class Notification(MCPModel, Generic[NotificationParamsT, MethodT]): """Base class for JSON-RPC notifications.""" method: MethodT params: NotificationParamsT - model_config = ConfigDict(extra="allow") -class Result(BaseModel): +class Result(MCPModel): """Base class for JSON-RPC results.""" meta: dict[str, Any] | None = Field(alias="_meta", default=None) @@ -138,97 +120,21 @@ class Result(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class PaginatedResult(Result): - nextCursor: Cursor | None = None + next_cursor: Cursor | None = None """ An opaque token representing the pagination position after the last returned result. If present, there may be more results available. """ -class JSONRPCRequest(Request[dict[str, Any] | None, str]): - """A request that expects a response.""" - - jsonrpc: Literal["2.0"] - id: RequestId - method: str - params: dict[str, Any] | None = None - - -class JSONRPCNotification(Notification[dict[str, Any] | None, str]): - """A notification which does not expect a response.""" - - jsonrpc: Literal["2.0"] - params: dict[str, Any] | None = None - - -class JSONRPCResponse(BaseModel): - """A successful (non-error) response to a request.""" - - jsonrpc: Literal["2.0"] - id: RequestId - result: dict[str, Any] - model_config = ConfigDict(extra="allow") - - -# MCP-specific error codes in the range [-32000, -32099] -URL_ELICITATION_REQUIRED = -32042 -"""Error code indicating that a URL mode elicitation is required before the request can be processed.""" - -# SDK error codes -CONNECTION_CLOSED = -32000 -# REQUEST_TIMEOUT = -32001 # the typescript sdk uses this - -# Standard JSON-RPC error codes -PARSE_ERROR = -32700 -INVALID_REQUEST = -32600 -METHOD_NOT_FOUND = -32601 -INVALID_PARAMS = -32602 -INTERNAL_ERROR = -32603 - - -class ErrorData(BaseModel): - """Error information for JSON-RPC error responses.""" - - code: int - """The error type that occurred.""" - - message: str - """ - A short description of the error. The message SHOULD be limited to a concise single - sentence. - """ - - data: Any | None = None - """ - Additional information about the error. The value of this member is defined by the - sender (e.g. detailed error information, nested errors etc.). - """ - - model_config = ConfigDict(extra="allow") - - -class JSONRPCError(BaseModel): - """A response to a request that indicates an error occurred.""" - - jsonrpc: Literal["2.0"] - id: str | int - error: ErrorData - model_config = ConfigDict(extra="allow") - - -class JSONRPCMessage(RootModel[JSONRPCRequest | JSONRPCNotification | JSONRPCResponse | JSONRPCError]): - pass - - class EmptyResult(Result): """A response that indicates success but carries no data.""" -class BaseMetadata(BaseModel): +class BaseMetadata(MCPModel): """Base class for entities with name and optional title fields.""" name: str @@ -245,78 +151,69 @@ class BaseMetadata(BaseModel): """ -class Icon(BaseModel): +class Icon(MCPModel): """An icon for display in user interfaces.""" src: str """URL or data URI for the icon.""" - mimeType: str | None = None + mime_type: str | None = None """Optional MIME type for the icon.""" sizes: list[str] | None = None """Optional list of strings specifying icon dimensions (e.g., ["48x48", "96x96"]).""" - model_config = ConfigDict(extra="allow") - class Implementation(BaseMetadata): """Describes the name and version of an MCP implementation.""" version: str - websiteUrl: str | None = None + title: str | None = None + """An optional human-readable title for this implementation.""" + + description: str | None = None + """An optional human-readable description of what this implementation does.""" + + website_url: str | None = None """An optional URL of the website for this implementation.""" icons: list[Icon] | None = None """An optional list of icons for this implementation.""" - model_config = ConfigDict(extra="allow") - -class RootsCapability(BaseModel): +class RootsCapability(MCPModel): """Capability for root operations.""" - listChanged: bool | None = None + list_changed: bool | None = None """Whether the client supports notifications for changes to the roots list.""" - model_config = ConfigDict(extra="allow") -class SamplingContextCapability(BaseModel): - """ - Capability for context inclusion during sampling. +class SamplingContextCapability(MCPModel): + """Capability for context inclusion during sampling. Indicates support for non-'none' values in the includeContext parameter. SOFT-DEPRECATED: New implementations should use tools parameter instead. """ - model_config = ConfigDict(extra="allow") - -class SamplingToolsCapability(BaseModel): - """ - Capability indicating support for tool calling during sampling. +class SamplingToolsCapability(MCPModel): + """Capability indicating support for tool calling during sampling. When present in ClientCapabilities.sampling, indicates that the client supports the tools and toolChoice parameters in sampling requests. """ - model_config = ConfigDict(extra="allow") - -class FormElicitationCapability(BaseModel): +class FormElicitationCapability(MCPModel): """Capability for form mode elicitation.""" - model_config = ConfigDict(extra="allow") - -class UrlElicitationCapability(BaseModel): +class UrlElicitationCapability(MCPModel): """Capability for URL mode elicitation.""" - model_config = ConfigDict(extra="allow") - -class ElicitationCapability(BaseModel): +class ElicitationCapability(MCPModel): """Capability for elicitation operations. Clients must support at least one mode (form or url). @@ -328,13 +225,9 @@ class ElicitationCapability(BaseModel): url: UrlElicitationCapability | None = None """Present if the client supports URL mode elicitation.""" - model_config = ConfigDict(extra="allow") - -class SamplingCapability(BaseModel): - """ - Sampling capability structure, allowing fine-grained capability advertisement. - """ +class SamplingCapability(MCPModel): + """Sampling capability structure, allowing fine-grained capability advertisement.""" context: SamplingContextCapability | None = None """ @@ -346,64 +239,47 @@ class SamplingCapability(BaseModel): Present if the client supports tools and toolChoice parameters in sampling requests. Presence indicates full tool calling support during sampling. """ - model_config = ConfigDict(extra="allow") -class TasksListCapability(BaseModel): +class TasksListCapability(MCPModel): """Capability for tasks listing operations.""" - model_config = ConfigDict(extra="allow") - -class TasksCancelCapability(BaseModel): +class TasksCancelCapability(MCPModel): """Capability for tasks cancel operations.""" - model_config = ConfigDict(extra="allow") - -class TasksCreateMessageCapability(BaseModel): +class TasksCreateMessageCapability(MCPModel): """Capability for tasks create messages.""" - model_config = ConfigDict(extra="allow") - -class TasksSamplingCapability(BaseModel): +class TasksSamplingCapability(MCPModel): """Capability for tasks sampling operations.""" - model_config = ConfigDict(extra="allow") - - createMessage: TasksCreateMessageCapability | None = None + create_message: TasksCreateMessageCapability | None = None -class TasksCreateElicitationCapability(BaseModel): +class TasksCreateElicitationCapability(MCPModel): """Capability for tasks create elicitation operations.""" - model_config = ConfigDict(extra="allow") - -class TasksElicitationCapability(BaseModel): +class TasksElicitationCapability(MCPModel): """Capability for tasks elicitation operations.""" - model_config = ConfigDict(extra="allow") - create: TasksCreateElicitationCapability | None = None -class ClientTasksRequestsCapability(BaseModel): +class ClientTasksRequestsCapability(MCPModel): """Capability for tasks requests operations.""" - model_config = ConfigDict(extra="allow") - sampling: TasksSamplingCapability | None = None elicitation: TasksElicitationCapability | None = None -class ClientTasksCapability(BaseModel): +class ClientTasksCapability(MCPModel): """Capability for client tasks operations.""" - model_config = ConfigDict(extra="allow") - list: TasksListCapability | None = None """Whether this client supports tasks/list.""" @@ -414,7 +290,7 @@ class ClientTasksCapability(BaseModel): """Specifies which request types can be augmented with tasks.""" -class ClientCapabilities(BaseModel): +class ClientCapabilities(MCPModel): """Capabilities a client may support.""" experimental: dict[str, dict[str, Any]] | None = None @@ -431,79 +307,63 @@ class ClientCapabilities(BaseModel): tasks: ClientTasksCapability | None = None """Present if the client supports task-augmented requests.""" - model_config = ConfigDict(extra="allow") - -class PromptsCapability(BaseModel): +class PromptsCapability(MCPModel): """Capability for prompts operations.""" - listChanged: bool | None = None + list_changed: bool | None = None """Whether this server supports notifications for changes to the prompt list.""" - model_config = ConfigDict(extra="allow") -class ResourcesCapability(BaseModel): +class ResourcesCapability(MCPModel): """Capability for resources operations.""" subscribe: bool | None = None """Whether this server supports subscribing to resource updates.""" - listChanged: bool | None = None + list_changed: bool | None = None """Whether this server supports notifications for changes to the resource list.""" - model_config = ConfigDict(extra="allow") -class ToolsCapability(BaseModel): +class ToolsCapability(MCPModel): """Capability for tools operations.""" - listChanged: bool | None = None + list_changed: bool | None = None """Whether this server supports notifications for changes to the tool list.""" - model_config = ConfigDict(extra="allow") -class LoggingCapability(BaseModel): +class LoggingCapability(MCPModel): """Capability for logging operations.""" - model_config = ConfigDict(extra="allow") - -class CompletionsCapability(BaseModel): +class CompletionsCapability(MCPModel): """Capability for completions operations.""" - model_config = ConfigDict(extra="allow") - -class TasksCallCapability(BaseModel): +class TasksCallCapability(MCPModel): """Capability for tasks call operations.""" - model_config = ConfigDict(extra="allow") - -class TasksToolsCapability(BaseModel): +class TasksToolsCapability(MCPModel): """Capability for tasks tools operations.""" - model_config = ConfigDict(extra="allow") call: TasksCallCapability | None = None -class ServerTasksRequestsCapability(BaseModel): +class ServerTasksRequestsCapability(MCPModel): """Capability for tasks requests operations.""" - model_config = ConfigDict(extra="allow") - tools: TasksToolsCapability | None = None -class ServerTasksCapability(BaseModel): +class ServerTasksCapability(MCPModel): """Capability for server tasks operations.""" - model_config = ConfigDict(extra="allow") - list: TasksListCapability | None = None cancel: TasksCancelCapability | None = None requests: ServerTasksRequestsCapability | None = None -class ServerCapabilities(BaseModel): +class ServerCapabilities(MCPModel): """Capabilities that a server may support.""" experimental: dict[str, dict[str, Any]] | None = None @@ -520,7 +380,6 @@ class ServerCapabilities(BaseModel): """Present if the server offers autocompletion suggestions for prompts and resources.""" tasks: ServerTasksCapability | None = None """Present if the server supports task-augmented requests.""" - model_config = ConfigDict(extra="allow") TaskStatus = Literal["working", "input_required", "completed", "failed", "cancelled"] @@ -533,31 +392,27 @@ class ServerCapabilities(BaseModel): TASK_STATUS_CANCELLED: Final[Literal["cancelled"]] = "cancelled" -class RelatedTaskMetadata(BaseModel): - """ - Metadata for associating messages with a task. +class RelatedTaskMetadata(MCPModel): + """Metadata for associating messages with a task. Include this in the `_meta` field under the key `io.modelcontextprotocol/related-task`. """ - model_config = ConfigDict(extra="allow") - taskId: str + task_id: str """The task identifier this message is associated with.""" -class Task(BaseModel): +class Task(MCPModel): """Data associated with a task.""" - model_config = ConfigDict(extra="allow") - - taskId: str + task_id: str """The task identifier.""" status: TaskStatus """Current task state.""" - statusMessage: str | None = None - """ + status_message: str | None = None + """ Optional human-readable message describing the current task state. This can provide context for any status, including: - Reasons for "cancelled" status @@ -565,16 +420,16 @@ class Task(BaseModel): - Diagnostic information for "failed" status (e.g., error details, what went wrong) """ - createdAt: datetime # Pydantic will enforce ISO 8601 and re-serialize as a string later + created_at: datetime # Pydantic will enforce ISO 8601 and re-serialize as a string later """ISO 8601 timestamp when the task was created.""" - lastUpdatedAt: datetime + last_updated_at: datetime """ISO 8601 timestamp when the task was last updated.""" ttl: Annotated[int, Field(strict=True)] | None """Actual retention duration from creation in milliseconds, null for unlimited.""" - pollInterval: Annotated[int, Field(strict=True)] | None = None + poll_interval: Annotated[int, Field(strict=True)] | None = None """Suggested polling interval in milliseconds.""" @@ -585,8 +440,7 @@ class CreateTaskResult(Result): class GetTaskRequestParams(RequestParams): - model_config = ConfigDict(extra="allow") - taskId: str + task_id: str """The task identifier to query.""" @@ -603,9 +457,7 @@ class GetTaskResult(Result, Task): class GetTaskPayloadRequestParams(RequestParams): - model_config = ConfigDict(extra="allow") - - taskId: str + task_id: str """The task identifier to retrieve results for.""" @@ -617,17 +469,14 @@ class GetTaskPayloadRequest(Request[GetTaskPayloadRequestParams, Literal["tasks/ class GetTaskPayloadResult(Result): - """ - The response to a tasks/result request. + """The response to a tasks/result request. The structure matches the result type of the original request. For example, a tools/call task would return the CallToolResult structure. """ class CancelTaskRequestParams(RequestParams): - model_config = ConfigDict(extra="allow") - - taskId: str + task_id: str """The task identifier to cancel.""" @@ -659,8 +508,7 @@ class TaskStatusNotificationParams(NotificationParams, Task): class TaskStatusNotification(Notification[TaskStatusNotificationParams, Literal["notifications/tasks/status"]]): - """ - An optional notification from the receiver to the requestor, informing them that a task's status has changed. + """An optional notification from the receiver to the requestor, informing them that a task's status has changed. Receivers are not required to send these notifications """ @@ -671,16 +519,14 @@ class TaskStatusNotification(Notification[TaskStatusNotificationParams, Literal[ class InitializeRequestParams(RequestParams): """Parameters for the initialize request.""" - protocolVersion: str | int + protocol_version: str | int """The latest version of the Model Context Protocol that the client supports.""" capabilities: ClientCapabilities - clientInfo: Implementation - model_config = ConfigDict(extra="allow") + client_info: Implementation class InitializeRequest(Request[InitializeRequestParams, Literal["initialize"]]): - """ - This request is sent from the client to the server when it first connects, asking it + """This request is sent from the client to the server when it first connects, asking it to begin initialization. """ @@ -691,17 +537,16 @@ class InitializeRequest(Request[InitializeRequestParams, Literal["initialize"]]) class InitializeResult(Result): """After receiving an initialize request from the client, the server sends this.""" - protocolVersion: str | int + protocol_version: str | int """The version of the Model Context Protocol that the server wants to use.""" capabilities: ServerCapabilities - serverInfo: Implementation + server_info: Implementation instructions: str | None = None """Instructions describing how to use the server and its features.""" class InitializedNotification(Notification[NotificationParams | None, Literal["notifications/initialized"]]): - """ - This notification is sent from the client to the server after initialization has + """This notification is sent from the client to the server after initialization has finished. """ @@ -710,8 +555,7 @@ class InitializedNotification(Notification[NotificationParams | None, Literal["n class PingRequest(Request[RequestParams | None, Literal["ping"]]): - """ - A ping, issued by either the server or the client, to check that the other party is + """A ping, issued by either the server or the client, to check that the other party is still alive. """ @@ -722,7 +566,7 @@ class PingRequest(Request[RequestParams | None, Literal["ping"]]): class ProgressNotificationParams(NotificationParams): """Parameters for progress notifications.""" - progressToken: ProgressToken + progress_token: ProgressToken """ The progress token which was given in the initial request, used to associate this notification with the request that is proceeding. @@ -739,12 +583,10 @@ class ProgressNotificationParams(NotificationParams): Message related to progress. This should provide relevant human readable progress information. """ - model_config = ConfigDict(extra="allow") class ProgressNotification(Notification[ProgressNotificationParams, Literal["notifications/progress"]]): - """ - An out-of-band notification used to inform the receiver of a progress update for a + """An out-of-band notification used to inform the receiver of a progress update for a long-running request. """ @@ -758,20 +600,19 @@ class ListResourcesRequest(PaginatedRequest[Literal["resources/list"]]): method: Literal["resources/list"] = "resources/list" -class Annotations(BaseModel): +class Annotations(MCPModel): audience: list[Role] | None = None priority: Annotated[float, Field(ge=0.0, le=1.0)] | None = None - model_config = ConfigDict(extra="allow") class Resource(BaseMetadata): """A known resource that the server is capable of reading.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """The URI of this resource.""" description: str | None = None """A description of what this resource represents.""" - mimeType: str | None = None + mime_type: str | None = None """The MIME type of this resource, if known.""" size: int | None = None """ @@ -788,20 +629,19 @@ class Resource(BaseMetadata): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class ResourceTemplate(BaseMetadata): """A template description for resources available on the server.""" - uriTemplate: str + uri_template: str """ A URI template (according to RFC 6570) that can be used to construct resource URIs. """ description: str | None = None """A human-readable description of what this template is for.""" - mimeType: str | None = None + mime_type: str | None = None """ The MIME type for all resources that match this template. This should only be included if all resources matching this template have the same type. @@ -814,7 +654,6 @@ class ResourceTemplate(BaseMetadata): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class ListResourcesResult(PaginatedResult): @@ -832,18 +671,17 @@ class ListResourceTemplatesRequest(PaginatedRequest[Literal["resources/templates class ListResourceTemplatesResult(PaginatedResult): """The server's response to a resources/templates/list request from the client.""" - resourceTemplates: list[ResourceTemplate] + resource_templates: list[ResourceTemplate] class ReadResourceRequestParams(RequestParams): """Parameters for reading a resource.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """ The URI of the resource to read. The URI can use any protocol; it is up to the server how to interpret it. """ - model_config = ConfigDict(extra="allow") class ReadResourceRequest(Request[ReadResourceRequestParams, Literal["resources/read"]]): @@ -853,19 +691,18 @@ class ReadResourceRequest(Request[ReadResourceRequestParams, Literal["resources/ params: ReadResourceRequestParams -class ResourceContents(BaseModel): +class ResourceContents(MCPModel): """The contents of a specific resource or sub-resource.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """The URI of this resource.""" - mimeType: str | None = None + mime_type: str | None = None """The MIME type of this resource, if known.""" meta: dict[str, Any] | None = Field(alias="_meta", default=None) """ See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class TextResourceContents(ResourceContents): @@ -894,8 +731,7 @@ class ReadResourceResult(Result): class ResourceListChangedNotification( Notification[NotificationParams | None, Literal["notifications/resources/list_changed"]] ): - """ - An optional notification from the server to the client, informing it that the list + """An optional notification from the server to the client, informing it that the list of resources it can read from has changed. """ @@ -906,17 +742,15 @@ class ResourceListChangedNotification( class SubscribeRequestParams(RequestParams): """Parameters for subscribing to a resource.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """ The URI of the resource to subscribe to. The URI can use any protocol; it is up to the server how to interpret it. """ - model_config = ConfigDict(extra="allow") class SubscribeRequest(Request[SubscribeRequestParams, Literal["resources/subscribe"]]): - """ - Sent from the client to request resources/updated notifications from the server + """Sent from the client to request resources/updated notifications from the server whenever a particular resource changes. """ @@ -927,14 +761,12 @@ class SubscribeRequest(Request[SubscribeRequestParams, Literal["resources/subscr class UnsubscribeRequestParams(RequestParams): """Parameters for unsubscribing from a resource.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """The URI of the resource to unsubscribe from.""" - model_config = ConfigDict(extra="allow") class UnsubscribeRequest(Request[UnsubscribeRequestParams, Literal["resources/unsubscribe"]]): - """ - Sent from the client to request cancellation of resources/updated notifications from + """Sent from the client to request cancellation of resources/updated notifications from the server. """ @@ -945,19 +777,17 @@ class UnsubscribeRequest(Request[UnsubscribeRequestParams, Literal["resources/un class ResourceUpdatedNotificationParams(NotificationParams): """Parameters for resource update notifications.""" - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] + uri: str """ The URI of the resource that has been updated. This might be a sub-resource of the one that the client actually subscribed to. """ - model_config = ConfigDict(extra="allow") class ResourceUpdatedNotification( Notification[ResourceUpdatedNotificationParams, Literal["notifications/resources/updated"]] ): - """ - A notification from the server to the client, informing it that a resource has + """A notification from the server to the client, informing it that a resource has changed and may need to be read again. """ @@ -971,7 +801,7 @@ class ListPromptsRequest(PaginatedRequest[Literal["prompts/list"]]): method: Literal["prompts/list"] = "prompts/list" -class PromptArgument(BaseModel): +class PromptArgument(MCPModel): """An argument for a prompt template.""" name: str @@ -980,7 +810,6 @@ class PromptArgument(BaseModel): """A human-readable description of the argument.""" required: bool | None = None """Whether this argument must be provided.""" - model_config = ConfigDict(extra="allow") class Prompt(BaseMetadata): @@ -997,7 +826,6 @@ class Prompt(BaseMetadata): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class ListPromptsResult(PaginatedResult): @@ -1013,7 +841,6 @@ class GetPromptRequestParams(RequestParams): """The name of the prompt or prompt template.""" arguments: dict[str, str] | None = None """Arguments to use for templating the prompt.""" - model_config = ConfigDict(extra="allow") class GetPromptRequest(Request[GetPromptRequestParams, Literal["prompts/get"]]): @@ -1023,10 +850,10 @@ class GetPromptRequest(Request[GetPromptRequestParams, Literal["prompts/get"]]): params: GetPromptRequestParams -class TextContent(BaseModel): +class TextContent(MCPModel): """Text content for a message.""" - type: Literal["text"] + type: Literal["text"] = "text" text: str """The text content of the message.""" annotations: Annotations | None = None @@ -1035,16 +862,15 @@ class TextContent(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") -class ImageContent(BaseModel): +class ImageContent(MCPModel): """Image content for a message.""" - type: Literal["image"] + type: Literal["image"] = "image" data: str """The base64-encoded image data.""" - mimeType: str + mime_type: str """ The MIME type of the image. Different providers may support different image types. @@ -1055,16 +881,15 @@ class ImageContent(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") -class AudioContent(BaseModel): +class AudioContent(MCPModel): """Audio content for a message.""" - type: Literal["audio"] + type: Literal["audio"] = "audio" data: str """The base64-encoded audio data.""" - mimeType: str + mime_type: str """ The MIME type of the audio. Different providers may support different audio types. @@ -1075,19 +900,17 @@ class AudioContent(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") -class ToolUseContent(BaseModel): - """ - Content representing an assistant's request to invoke a tool. +class ToolUseContent(MCPModel): + """Content representing an assistant's request to invoke a tool. This content type appears in assistant messages when the LLM wants to call a tool during sampling. The server should execute the tool and return a ToolResultContent in the next user message. """ - type: Literal["tool_use"] + type: Literal["tool_use"] = "tool_use" """Discriminator for tool use content.""" name: str @@ -1104,35 +927,33 @@ class ToolUseContent(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") -class ToolResultContent(BaseModel): - """ - Content representing the result of a tool execution. +class ToolResultContent(MCPModel): + """Content representing the result of a tool execution. This content type appears in user messages as a response to a ToolUseContent from the assistant. It contains the output of executing the requested tool. """ - type: Literal["tool_result"] + type: Literal["tool_result"] = "tool_result" """Discriminator for tool result content.""" - toolUseId: str + tool_use_id: str """The unique identifier that corresponds to the tool call's id field.""" - content: list["ContentBlock"] = [] + content: list[ContentBlock] = [] """ A list of content objects representing the tool result. Defaults to empty list if not provided. """ - structuredContent: dict[str, Any] | None = None + structured_content: dict[str, Any] | None = None """ Optional structured tool output that matches the tool's outputSchema (if defined). """ - isError: bool | None = None + is_error: bool | None = None """Whether the tool execution resulted in an error.""" meta: dict[str, Any] | None = Field(alias="_meta", default=None) @@ -1140,7 +961,6 @@ class ToolResultContent(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") SamplingMessageContentBlock: TypeAlias = TextContent | ImageContent | AudioContent | ToolUseContent | ToolResultContent @@ -1151,7 +971,7 @@ class ToolResultContent(BaseModel): Used for backwards-compatible CreateMessageResult when tools are not used.""" -class SamplingMessage(BaseModel): +class SamplingMessage(MCPModel): """Describes a message issued to or received from an LLM API.""" role: Role @@ -1165,7 +985,6 @@ class SamplingMessage(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") @property def content_as_list(self) -> list[SamplingMessageContentBlock]: @@ -1174,15 +993,14 @@ def content_as_list(self) -> list[SamplingMessageContentBlock]: return self.content if isinstance(self.content, list) else [self.content] -class EmbeddedResource(BaseModel): - """ - The contents of a resource, embedded into a prompt or tool call result. +class EmbeddedResource(MCPModel): + """The contents of a resource, embedded into a prompt or tool call result. It is up to the client how best to render embedded resources for the benefit of the LLM and/or the user. """ - type: Literal["resource"] + type: Literal["resource"] = "resource" resource: TextResourceContents | BlobResourceContents annotations: Annotations | None = None meta: dict[str, Any] | None = Field(alias="_meta", default=None) @@ -1190,32 +1008,26 @@ class EmbeddedResource(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class ResourceLink(Resource): - """ - A resource that the server is capable of reading, included in a prompt or tool call result. + """A resource that the server is capable of reading, included in a prompt or tool call result. Note: resource links returned by tools are not guaranteed to appear in the results of `resources/list` requests. """ - type: Literal["resource_link"] + type: Literal["resource_link"] = "resource_link" ContentBlock = TextContent | ImageContent | AudioContent | ResourceLink | EmbeddedResource """A content block that can be used in prompts and tool results.""" -Content: TypeAlias = ContentBlock -# """DEPRECATED: Content is deprecated, you should use ContentBlock directly.""" - -class PromptMessage(BaseModel): +class PromptMessage(MCPModel): """Describes a message returned as part of a prompt.""" role: Role content: ContentBlock - model_config = ConfigDict(extra="allow") class GetPromptResult(Result): @@ -1229,8 +1041,7 @@ class GetPromptResult(Result): class PromptListChangedNotification( Notification[NotificationParams | None, Literal["notifications/prompts/list_changed"]] ): - """ - An optional notification from the server to the client, informing it that the list + """An optional notification from the server to the client, informing it that the list of prompts it offers has changed. """ @@ -1244,9 +1055,8 @@ class ListToolsRequest(PaginatedRequest[Literal["tools/list"]]): method: Literal["tools/list"] = "tools/list" -class ToolAnnotations(BaseModel): - """ - Additional properties describing a Tool to clients. +class ToolAnnotations(MCPModel): + """Additional properties describing a Tool to clients. NOTE: all properties in ToolAnnotations are **hints**. They are not guaranteed to provide a faithful description of @@ -1259,29 +1069,29 @@ class ToolAnnotations(BaseModel): title: str | None = None """A human-readable title for the tool.""" - readOnlyHint: bool | None = None + read_only_hint: bool | None = None """ If true, the tool does not modify its environment. Default: false """ - destructiveHint: bool | None = None + destructive_hint: bool | None = None """ If true, the tool may perform destructive updates to its environment. If false, the tool performs only additive updates. - (This property is meaningful only when `readOnlyHint == false`) + (This property is meaningful only when `read_only_hint == false`) Default: true """ - idempotentHint: bool | None = None + idempotent_hint: bool | None = None """ If true, calling the tool repeatedly with the same arguments will have no additional effect on the its environment. - (This property is meaningful only when `readOnlyHint == false`) + (This property is meaningful only when `read_only_hint == false`) Default: false """ - openWorldHint: bool | None = None + open_world_hint: bool | None = None """ If true, this tool may interact with an "open world" of external entities. If false, the tool's domain of interaction is closed. @@ -1290,15 +1100,11 @@ class ToolAnnotations(BaseModel): Default: true """ - model_config = ConfigDict(extra="allow") - -class ToolExecution(BaseModel): +class ToolExecution(MCPModel): """Execution-related properties for a tool.""" - model_config = ConfigDict(extra="allow") - - taskSupport: TaskExecutionMode | None = None + task_support: TaskExecutionMode | None = None """ Indicates whether this tool supports task-augmented execution. This allows clients to handle long-running operations through polling @@ -1317,12 +1123,12 @@ class Tool(BaseMetadata): description: str | None = None """A human-readable description of the tool.""" - inputSchema: dict[str, Any] + input_schema: dict[str, Any] """A JSON Schema object defining the expected parameters for the tool.""" - outputSchema: dict[str, Any] | None = None + output_schema: dict[str, Any] | None = None """ An optional JSON Schema object defining the structure of the tool's output - returned in the structuredContent field of a CallToolResult. + returned in the structured_content field of a CallToolResult. """ icons: list[Icon] | None = None """An optional list of icons for this tool.""" @@ -1336,8 +1142,6 @@ class Tool(BaseMetadata): execution: ToolExecution | None = None - model_config = ConfigDict(extra="allow") - class ListToolsResult(PaginatedResult): """The server's response to a tools/list request from the client.""" @@ -1350,7 +1154,6 @@ class CallToolRequestParams(RequestParams): name: str arguments: dict[str, Any] | None = None - model_config = ConfigDict(extra="allow") class CallToolRequest(Request[CallToolRequestParams, Literal["tools/call"]]): @@ -1364,14 +1167,13 @@ class CallToolResult(Result): """The server's response to a tool call.""" content: list[ContentBlock] - structuredContent: dict[str, Any] | None = None + structured_content: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" - isError: bool = False + is_error: bool = False class ToolListChangedNotification(Notification[NotificationParams | None, Literal["notifications/tools/list_changed"]]): - """ - An optional notification from the server to the client, informing it that the list + """An optional notification from the server to the client, informing it that the list of tools it offers has changed. """ @@ -1387,7 +1189,6 @@ class SetLevelRequestParams(RequestParams): level: LoggingLevel """The level of logging that the client wants to receive from the server.""" - model_config = ConfigDict(extra="allow") class SetLevelRequest(Request[SetLevelRequestParams, Literal["logging/setLevel"]]): @@ -1409,7 +1210,6 @@ class LoggingMessageNotificationParams(NotificationParams): The data to be logged, such as a string message or an object. Any JSON serializable type is allowed here. """ - model_config = ConfigDict(extra="allow") class LoggingMessageNotification(Notification[LoggingMessageNotificationParams, Literal["notifications/message"]]): @@ -1422,18 +1222,15 @@ class LoggingMessageNotification(Notification[LoggingMessageNotificationParams, IncludeContext = Literal["none", "thisServer", "allServers"] -class ModelHint(BaseModel): +class ModelHint(MCPModel): """Hints to use for model selection.""" name: str | None = None """A hint for a model name.""" - model_config = ConfigDict(extra="allow") - -class ModelPreferences(BaseModel): - """ - The server's preferences for model selection, requested by the client during +class ModelPreferences(MCPModel): + """The server's preferences for model selection, requested by the client during sampling. Because LLMs can vary along multiple dimensions, choosing the "best" model is @@ -1458,33 +1255,30 @@ class ModelPreferences(BaseModel): MAY still use the priorities to select from ambiguous matches. """ - costPriority: float | None = None + cost_priority: float | None = None """ How much to prioritize cost when selecting a model. A value of 0 means cost is not important, while a value of 1 means cost is the most important factor. """ - speedPriority: float | None = None + speed_priority: float | None = None """ How much to prioritize sampling speed (latency) when selecting a model. A value of 0 means speed is not important, while a value of 1 means speed is the most important factor. """ - intelligencePriority: float | None = None + intelligence_priority: float | None = None """ How much to prioritize intelligence and capabilities when selecting a model. A value of 0 means intelligence is not important, while a value of 1 means intelligence is the most important factor. """ - model_config = ConfigDict(extra="allow") - -class ToolChoice(BaseModel): - """ - Controls tool usage behavior during sampling. +class ToolChoice(MCPModel): + """Controls tool usage behavior during sampling. Allows the server to specify whether and how the LLM should use tools in its response. @@ -1498,42 +1292,39 @@ class ToolChoice(BaseModel): - "none": Model should not use tools """ - model_config = ConfigDict(extra="allow") - class CreateMessageRequestParams(RequestParams): """Parameters for creating a message.""" messages: list[SamplingMessage] - modelPreferences: ModelPreferences | None = None + model_preferences: ModelPreferences | None = None """ The server's preferences for which model to select. The client MAY ignore these preferences. """ - systemPrompt: str | None = None + system_prompt: str | None = None """An optional system prompt the server wants to use for sampling.""" - includeContext: IncludeContext | None = None + include_context: IncludeContext | None = None """ A request to include context from one or more MCP servers (including the caller), to be attached to the prompt. """ temperature: float | None = None - maxTokens: int + max_tokens: int """The maximum number of tokens to sample, as requested by the server.""" - stopSequences: list[str] | None = None + stop_sequences: list[str] | None = None metadata: dict[str, Any] | None = None """Optional metadata to pass through to the LLM provider.""" - tools: list["Tool"] | None = None + tools: list[Tool] | None = None """ Tool definitions for the LLM to use during sampling. Requires clientCapabilities.sampling.tools to be present. """ - toolChoice: ToolChoice | None = None + tool_choice: ToolChoice | None = None """ Controls tool usage behavior. Requires clientCapabilities.sampling.tools and the tools parameter to be present. """ - model_config = ConfigDict(extra="allow") class CreateMessageRequest(Request[CreateMessageRequestParams, Literal["sampling/createMessage"]]): @@ -1559,7 +1350,7 @@ class CreateMessageResult(Result): """Response content. Single content block (text, image, or audio).""" model: str """The name of the model that generated the message.""" - stopReason: StopReason | None = None + stop_reason: StopReason | None = None """The reason why sampling stopped, if known.""" @@ -1574,11 +1365,11 @@ class CreateMessageResultWithTools(Result): content: SamplingMessageContentBlock | list[SamplingMessageContentBlock] """ Response content. May be a single content block or an array. - May include ToolUseContent if stopReason is 'toolUse'. + May include ToolUseContent if stop_reason is 'toolUse'. """ model: str """The name of the model that generated the message.""" - stopReason: StopReason | None = None + stop_reason: StopReason | None = None """ The reason why sampling stopped, if known. 'toolUse' indicates the model wants to use a tool. @@ -1591,45 +1382,36 @@ def content_as_list(self) -> list[SamplingMessageContentBlock]: return self.content if isinstance(self.content, list) else [self.content] -class ResourceTemplateReference(BaseModel): +class ResourceTemplateReference(MCPModel): """A reference to a resource or resource template definition.""" - type: Literal["ref/resource"] + type: Literal["ref/resource"] = "ref/resource" uri: str """The URI or URI template of the resource.""" - model_config = ConfigDict(extra="allow") -@deprecated("`ResourceReference` is deprecated, you should use `ResourceTemplateReference`.") -class ResourceReference(ResourceTemplateReference): - pass - - -class PromptReference(BaseModel): +class PromptReference(MCPModel): """Identifies a prompt.""" - type: Literal["ref/prompt"] + type: Literal["ref/prompt"] = "ref/prompt" name: str """The name of the prompt or prompt template""" - model_config = ConfigDict(extra="allow") -class CompletionArgument(BaseModel): +class CompletionArgument(MCPModel): """The argument's information for completion requests.""" name: str """The name of the argument""" value: str """The value of the argument to use for completion matching.""" - model_config = ConfigDict(extra="allow") -class CompletionContext(BaseModel): +class CompletionContext(MCPModel): """Additional, optional context for completions.""" arguments: dict[str, str] | None = None """Previously-resolved variables in a URI template or prompt.""" - model_config = ConfigDict(extra="allow") class CompleteRequestParams(RequestParams): @@ -1639,7 +1421,6 @@ class CompleteRequestParams(RequestParams): argument: CompletionArgument context: CompletionContext | None = None """Additional, optional context for completions""" - model_config = ConfigDict(extra="allow") class CompleteRequest(Request[CompleteRequestParams, Literal["completion/complete"]]): @@ -1649,7 +1430,7 @@ class CompleteRequest(Request[CompleteRequestParams, Literal["completion/complet params: CompleteRequestParams -class Completion(BaseModel): +class Completion(MCPModel): """Completion information.""" values: list[str] @@ -1659,12 +1440,11 @@ class Completion(BaseModel): The total number of completion options available. This can exceed the number of values actually sent in the response. """ - hasMore: bool | None = None + has_more: bool | None = None """ Indicates whether there are additional completion options beyond those provided in the current response, even if the exact total is unknown. """ - model_config = ConfigDict(extra="allow") class CompleteResult(Result): @@ -1674,8 +1454,7 @@ class CompleteResult(Result): class ListRootsRequest(Request[RequestParams | None, Literal["roots/list"]]): - """ - Sent from the server to request a list of root URIs from the client. Roots allow + """Sent from the server to request a list of root URIs from the client. Roots allow servers to ask for specific directories or files to operate on. A common example for roots is providing a set of repositories or directories a server should operate on. @@ -1688,7 +1467,7 @@ class ListRootsRequest(Request[RequestParams | None, Literal["roots/list"]]): params: RequestParams | None = None -class Root(BaseModel): +class Root(MCPModel): """Represents a root directory or file that the server can operate on.""" uri: FileUrl @@ -1708,12 +1487,10 @@ class Root(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - model_config = ConfigDict(extra="allow") class ListRootsResult(Result): - """ - The client's response to a roots/list request from the server. + """The client's response to a roots/list request from the server. This result contains an array of Root objects, each representing a root directory or file that the server can operate on. """ @@ -1724,8 +1501,7 @@ class ListRootsResult(Result): class RootsListChangedNotification( Notification[NotificationParams | None, Literal["notifications/roots/list_changed"]] ): - """ - A notification from the client to the server, informing it that the list of + """A notification from the client to the server, informing it that the list of roots has changed. This notification should be sent whenever the client adds, removes, or @@ -1740,7 +1516,7 @@ class RootsListChangedNotification( class CancelledNotificationParams(NotificationParams): """Parameters for cancellation notifications.""" - requestId: RequestId | None = None + request_id: RequestId | None = None """ The ID of the request to cancel. @@ -1751,12 +1527,9 @@ class CancelledNotificationParams(NotificationParams): reason: str | None = None """An optional string describing the reason for the cancellation.""" - model_config = ConfigDict(extra="allow") - class CancelledNotification(Notification[CancelledNotificationParams, Literal["notifications/cancelled"]]): - """ - This notification can be sent by either side to indicate that it is canceling a + """This notification can be sent by either side to indicate that it is canceling a previously-issued request. """ @@ -1767,17 +1540,14 @@ class CancelledNotification(Notification[CancelledNotificationParams, Literal["n class ElicitCompleteNotificationParams(NotificationParams): """Parameters for elicitation completion notifications.""" - elicitationId: str + elicitation_id: str """The unique identifier of the elicitation that was completed.""" - model_config = ConfigDict(extra="allow") - class ElicitCompleteNotification( Notification[ElicitCompleteNotificationParams, Literal["notifications/elicitation/complete"]] ): - """ - A notification from the server to the client, informing it that a URL mode + """A notification from the server to the client, informing it that a URL mode elicitation has been completed. Clients MAY use the notification to automatically retry requests that received a @@ -1790,7 +1560,7 @@ class ElicitCompleteNotification( params: ElicitCompleteNotificationParams -ClientRequestType: TypeAlias = ( +ClientRequest = ( PingRequest | InitializeRequest | CompleteRequest @@ -1809,23 +1579,17 @@ class ElicitCompleteNotification( | ListTasksRequest | CancelTaskRequest ) +client_request_adapter = TypeAdapter[ClientRequest](ClientRequest) -class ClientRequest(RootModel[ClientRequestType]): - pass - - -ClientNotificationType: TypeAlias = ( +ClientNotification = ( CancelledNotification | ProgressNotification | InitializedNotification | RootsListChangedNotification | TaskStatusNotification ) - - -class ClientNotification(RootModel[ClientNotificationType]): - pass +client_notification_adapter = TypeAdapter[ClientNotification](ClientNotification) # Type for elicitation schema - a JSON Schema dict @@ -1846,14 +1610,12 @@ class ElicitRequestFormParams(RequestParams): message: str """The message to present to the user describing what information is being requested.""" - requestedSchema: ElicitRequestedSchema + requested_schema: ElicitRequestedSchema """ A restricted subset of JSON Schema defining the structure of expected response. Only top-level properties are allowed, without nesting. """ - model_config = ConfigDict(extra="allow") - class ElicitRequestURLParams(RequestParams): """Parameters for URL mode elicitation requests. @@ -1871,14 +1633,12 @@ class ElicitRequestURLParams(RequestParams): url: str """The URL that the user should navigate to.""" - elicitationId: str + elicitation_id: str """ The ID of the elicitation, which must be unique within the context of the server. The client MUST treat this ID as an opaque value. """ - model_config = ConfigDict(extra="allow") - # Union type for elicitation request parameters ElicitRequestParams: TypeAlias = ElicitRequestURLParams | ElicitRequestFormParams @@ -1912,7 +1672,7 @@ class ElicitResult(Result): """ -class ElicitationRequiredErrorData(BaseModel): +class ElicitationRequiredErrorData(MCPModel): """Error data for URLElicitationRequiredError. Servers return this when a request cannot be processed until one or more @@ -1922,10 +1682,8 @@ class ElicitationRequiredErrorData(BaseModel): elicitations: list[ElicitRequestURLParams] """List of URL mode elicitations that must be completed.""" - model_config = ConfigDict(extra="allow") - -ClientResultType: TypeAlias = ( +ClientResult = ( EmptyResult | CreateMessageResult | CreateMessageResultWithTools @@ -1937,13 +1695,10 @@ class ElicitationRequiredErrorData(BaseModel): | CancelTaskResult | CreateTaskResult ) +client_result_adapter = TypeAdapter[ClientResult](ClientResult) -class ClientResult(RootModel[ClientResultType]): - pass - - -ServerRequestType: TypeAlias = ( +ServerRequest = ( PingRequest | CreateMessageRequest | ListRootsRequest @@ -1953,13 +1708,10 @@ class ClientResult(RootModel[ClientResultType]): | ListTasksRequest | CancelTaskRequest ) +server_request_adapter = TypeAdapter[ServerRequest](ServerRequest) -class ServerRequest(RootModel[ServerRequestType]): - pass - - -ServerNotificationType: TypeAlias = ( +ServerNotification = ( CancelledNotification | ProgressNotification | LoggingMessageNotification @@ -1970,13 +1722,10 @@ class ServerRequest(RootModel[ServerRequestType]): | ElicitCompleteNotification | TaskStatusNotification ) +server_notification_adapter = TypeAdapter[ServerNotification](ServerNotification) -class ServerNotification(RootModel[ServerNotificationType]): - pass - - -ServerResultType: TypeAlias = ( +ServerResult = ( EmptyResult | InitializeResult | CompleteResult @@ -1993,7 +1742,4 @@ class ServerNotification(RootModel[ServerNotificationType]): | CancelTaskResult | CreateTaskResult ) - - -class ServerResult(RootModel[ServerResultType]): - pass +server_result_adapter = TypeAdapter[ServerResult](ServerResult) diff --git a/src/mcp/types/jsonrpc.py b/src/mcp/types/jsonrpc.py new file mode 100644 index 0000000000..86066d80dc --- /dev/null +++ b/src/mcp/types/jsonrpc.py @@ -0,0 +1,83 @@ +"""This module follows the JSON-RPC 2.0 specification: https://www.jsonrpc.org/specification.""" + +from __future__ import annotations + +from typing import Annotated, Any, Literal + +from pydantic import BaseModel, Field, TypeAdapter + +RequestId = Annotated[int, Field(strict=True)] | str +"""The ID of a JSON-RPC request.""" + + +class JSONRPCRequest(BaseModel): + """A JSON-RPC request that expects a response.""" + + jsonrpc: Literal["2.0"] + id: RequestId + method: str + params: dict[str, Any] | None = None + + +class JSONRPCNotification(BaseModel): + """A JSON-RPC notification which does not expect a response.""" + + jsonrpc: Literal["2.0"] + method: str + params: dict[str, Any] | None = None + + +# TODO(Marcelo): This is actually not correct. A JSONRPCResponse is the union of a successful response and an error. +class JSONRPCResponse(BaseModel): + """A successful (non-error) response to a request.""" + + jsonrpc: Literal["2.0"] + id: RequestId + result: dict[str, Any] + + +# MCP-specific error codes in the range [-32000, -32099] +URL_ELICITATION_REQUIRED = -32042 +"""Error code indicating that a URL mode elicitation is required before the request can be processed.""" + +# SDK error codes +CONNECTION_CLOSED = -32000 +REQUEST_TIMEOUT = -32001 + +# Standard JSON-RPC error codes +PARSE_ERROR = -32700 +INVALID_REQUEST = -32600 +METHOD_NOT_FOUND = -32601 +INVALID_PARAMS = -32602 +INTERNAL_ERROR = -32603 + + +class ErrorData(BaseModel): + """Error information for JSON-RPC error responses.""" + + code: int + """The error type that occurred.""" + + message: str + """ + A short description of the error. The message SHOULD be limited to a concise single + sentence. + """ + + data: Any = None + """ + Additional information about the error. The value of this member is defined by the + sender (e.g. detailed error information, nested errors etc.). + """ + + +class JSONRPCError(BaseModel): + """A response to a request that indicates an error occurred.""" + + jsonrpc: Literal["2.0"] + id: str | int + error: ErrorData + + +JSONRPCMessage = JSONRPCRequest | JSONRPCNotification | JSONRPCResponse | JSONRPCError +jsonrpc_message_adapter: TypeAdapter[JSONRPCMessage] = TypeAdapter(JSONRPCMessage) diff --git a/tests/client/auth/extensions/test_client_credentials.py b/tests/client/auth/extensions/test_client_credentials.py index 6d134af742..a4faada4a8 100644 --- a/tests/client/auth/extensions/test_client_credentials.py +++ b/tests/client/auth/extensions/test_client_credentials.py @@ -1,4 +1,5 @@ import urllib.parse +import warnings import jwt import pytest @@ -60,8 +61,6 @@ async def callback_handler() -> tuple[str, str | None]: # pragma: no cover """Mock callback handler.""" return "test_auth_code", "test_state" - import warnings - with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) return RFC7523OAuthClientProvider( diff --git a/tests/client/conftest.py b/tests/client/conftest.py index 1e5c4d524c..7314a37351 100644 --- a/tests/client/conftest.py +++ b/tests/client/conftest.py @@ -43,35 +43,33 @@ def clear(self) -> None: def get_client_requests(self, method: str | None = None) -> list[JSONRPCRequest]: # pragma: no cover """Get client-sent requests, optionally filtered by method.""" return [ - req.message.root + req.message for req in self.client.sent_messages - if isinstance(req.message.root, JSONRPCRequest) and (method is None or req.message.root.method == method) + if isinstance(req.message, JSONRPCRequest) and (method is None or req.message.method == method) ] def get_server_requests(self, method: str | None = None) -> list[JSONRPCRequest]: # pragma: no cover """Get server-sent requests, optionally filtered by method.""" return [ # pragma: no cover - req.message.root + req.message for req in self.server.sent_messages - if isinstance(req.message.root, JSONRPCRequest) and (method is None or req.message.root.method == method) + if isinstance(req.message, JSONRPCRequest) and (method is None or req.message.method == method) ] def get_client_notifications(self, method: str | None = None) -> list[JSONRPCNotification]: # pragma: no cover """Get client-sent notifications, optionally filtered by method.""" return [ - notif.message.root + notif.message for notif in self.client.sent_messages - if isinstance(notif.message.root, JSONRPCNotification) - and (method is None or notif.message.root.method == method) + if isinstance(notif.message, JSONRPCNotification) and (method is None or notif.message.method == method) ] def get_server_notifications(self, method: str | None = None) -> list[JSONRPCNotification]: # pragma: no cover """Get server-sent notifications, optionally filtered by method.""" return [ - notif.message.root + notif.message for notif in self.server.sent_messages - if isinstance(notif.message.root, JSONRPCNotification) - and (method is None or notif.message.root.method == method) + if isinstance(notif.message, JSONRPCNotification) and (method is None or notif.message.method == method) ] @@ -123,11 +121,13 @@ async def patched_create_streams(): yield (client_read, spy_client_write), (server_read, spy_server_write) # Apply the patch for the duration of the test + # Patch both locations since InMemoryTransport imports it directly with patch("mcp.shared.memory.create_client_server_memory_streams", patched_create_streams): - # Return a collection with helper methods - def get_spy_collection() -> StreamSpyCollection: - assert client_spy is not None, "client_spy was not initialized" - assert server_spy is not None, "server_spy was not initialized" - return StreamSpyCollection(client_spy, server_spy) - - yield get_spy_collection + with patch("mcp.client._memory.create_client_server_memory_streams", patched_create_streams): + # Return a collection with helper methods + def get_spy_collection() -> StreamSpyCollection: + assert client_spy is not None, "client_spy was not initialized" + assert server_spy is not None, "server_spy was not initialized" + return StreamSpyCollection(client_spy, server_spy) + + yield get_spy_collection diff --git a/tests/client/test_auth.py b/tests/client/test_auth.py index 593d5cfe06..2f531cc653 100644 --- a/tests/client/test_auth.py +++ b/tests/client/test_auth.py @@ -1,11 +1,9 @@ -""" -Tests for refactored OAuth client authentication implementation. -""" +"""Tests for refactored OAuth client authentication implementation.""" import base64 import time from unittest import mock -from urllib.parse import unquote +from urllib.parse import parse_qs, quote, unquote, urlparse import httpx import pytest @@ -27,6 +25,8 @@ is_valid_client_metadata_url, should_use_client_metadata_url, ) +from mcp.server.auth.routes import build_metadata +from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions from mcp.shared.auth import ( OAuthClientInformationFull, OAuthClientMetadata, @@ -758,8 +758,6 @@ async def test_resource_param_included_with_recent_protocol_version(self, oauth_ content = request.content.decode() assert "resource=" in content # Check URL-encoded resource parameter - from urllib.parse import quote - expected_resource = quote(oauth_provider.context.get_resource_url(), safe="") assert f"resource={expected_resource}" in content @@ -1081,6 +1079,116 @@ async def test_auth_flow_no_unnecessary_retry_after_oauth( # Verify exactly one request was yielded (no double-sending) assert request_yields == 1, f"Expected 1 request yield, got {request_yields}" + @pytest.mark.anyio + async def test_token_exchange_accepts_201_status( + self, oauth_provider: OAuthClientProvider, mock_storage: MockTokenStorage + ): + """Test that token exchange accepts both 200 and 201 status codes.""" + # Ensure no tokens are stored + oauth_provider.context.current_tokens = None + oauth_provider.context.token_expiry_time = None + oauth_provider._initialized = True + + # Create a test request + test_request = httpx.Request("GET", "https://api.example.com/mcp") + + # Mock the auth flow + auth_flow = oauth_provider.async_auth_flow(test_request) + + # First request should be the original request without auth header + request = await auth_flow.__anext__() + assert "Authorization" not in request.headers + + # Send a 401 response to trigger the OAuth flow + response = httpx.Response( + 401, + headers={ + "WWW-Authenticate": 'Bearer resource_metadata="https://api.example.com/.well-known/oauth-protected-resource"' + }, + request=test_request, + ) + + # Next request should be to discover protected resource metadata + discovery_request = await auth_flow.asend(response) + assert discovery_request.method == "GET" + assert str(discovery_request.url) == "https://api.example.com/.well-known/oauth-protected-resource" + + # Send a successful discovery response with minimal protected resource metadata + discovery_response = httpx.Response( + 200, + content=b'{"resource": "https://api.example.com/mcp", "authorization_servers": ["https://auth.example.com"]}', + request=discovery_request, + ) + + # Next request should be to discover OAuth metadata + oauth_metadata_request = await auth_flow.asend(discovery_response) + assert oauth_metadata_request.method == "GET" + assert str(oauth_metadata_request.url).startswith("https://auth.example.com/") + assert "mcp-protocol-version" in oauth_metadata_request.headers + + # Send a successful OAuth metadata response + oauth_metadata_response = httpx.Response( + 200, + content=( + b'{"issuer": "https://auth.example.com", ' + b'"authorization_endpoint": "https://auth.example.com/authorize", ' + b'"token_endpoint": "https://auth.example.com/token", ' + b'"registration_endpoint": "https://auth.example.com/register"}' + ), + request=oauth_metadata_request, + ) + + # Next request should be to register client + registration_request = await auth_flow.asend(oauth_metadata_response) + assert registration_request.method == "POST" + assert str(registration_request.url) == "https://auth.example.com/register" + + # Send a successful registration response with 201 status + registration_response = httpx.Response( + 201, + content=b'{"client_id": "test_client_id", "client_secret": "test_client_secret", "redirect_uris": ["http://localhost:3030/callback"]}', + request=registration_request, + ) + + # Mock the authorization process + oauth_provider._perform_authorization_code_grant = mock.AsyncMock( + return_value=("test_auth_code", "test_code_verifier") + ) + + # Next request should be to exchange token + token_request = await auth_flow.asend(registration_response) + assert token_request.method == "POST" + assert str(token_request.url) == "https://auth.example.com/token" + assert "code=test_auth_code" in token_request.content.decode() + + # Send a successful token response with 201 status code (test both 200 and 201 are accepted) + token_response = httpx.Response( + 201, + content=( + b'{"access_token": "new_access_token", "token_type": "Bearer", "expires_in": 3600, ' + b'"refresh_token": "new_refresh_token"}' + ), + request=token_request, + ) + + # Final request should be the original request with auth header + final_request = await auth_flow.asend(token_response) + assert final_request.headers["Authorization"] == "Bearer new_access_token" + assert final_request.method == "GET" + assert str(final_request.url) == "https://api.example.com/mcp" + + # Send final success response to properly close the generator + final_response = httpx.Response(200, request=final_request) + try: + await auth_flow.asend(final_response) + except StopAsyncIteration: + pass # Expected - generator should complete + + # Verify tokens were stored + assert oauth_provider.context.current_tokens is not None + assert oauth_provider.context.current_tokens.access_token == "new_access_token" + assert oauth_provider.context.token_expiry_time is not None + @pytest.mark.anyio async def test_403_insufficient_scope_updates_scope_from_header( self, @@ -1116,8 +1224,6 @@ async def capture_redirect(url: str) -> None: "%3A", ":" ).replace("+", " ") # Extract state from redirect URL - from urllib.parse import parse_qs, urlparse - parsed = urlparse(url) params = parse_qs(parsed.query) captured_state = params.get("state", [None])[0] @@ -1226,9 +1332,6 @@ def test_build_metadata( registration_endpoint: str, revocation_endpoint: str, ): - from mcp.server.auth.routes import build_metadata - from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions - metadata = build_metadata( issuer_url=AnyHttpUrl(issuer_url), service_documentation_url=AnyHttpUrl(service_documentation_url), diff --git a/tests/client/test_client.py b/tests/client/test_client.py new file mode 100644 index 0000000000..97319861bd --- /dev/null +++ b/tests/client/test_client.py @@ -0,0 +1,287 @@ +"""Tests for the unified Client class.""" + +from __future__ import annotations + +import anyio +import pytest +from inline_snapshot import snapshot + +import mcp.types as types +from mcp.client.client import Client +from mcp.server import Server +from mcp.server.fastmcp import FastMCP +from mcp.types import ( + CallToolResult, + EmptyResult, + GetPromptResult, + ListPromptsResult, + ListResourcesResult, + ListResourceTemplatesResult, + ListToolsResult, + Prompt, + PromptArgument, + PromptMessage, + PromptsCapability, + ReadResourceResult, + Resource, + ResourcesCapability, + ServerCapabilities, + TextContent, + TextResourceContents, + Tool, + ToolsCapability, +) + +pytestmark = pytest.mark.anyio + + +@pytest.fixture +def simple_server() -> Server: + """Create a simple MCP server for testing.""" + server = Server(name="test_server") + + @server.list_resources() + async def handle_list_resources(): + return [Resource(uri="memory://test", name="Test Resource", description="A test resource")] + + @server.subscribe_resource() + async def handle_subscribe_resource(uri: str): + pass + + @server.unsubscribe_resource() + async def handle_unsubscribe_resource(uri: str): + pass + + @server.set_logging_level() + async def handle_set_logging_level(level: str): + pass + + @server.completion() + async def handle_completion( + ref: types.PromptReference | types.ResourceTemplateReference, + argument: types.CompletionArgument, + context: types.CompletionContext | None, + ) -> types.Completion | None: + return types.Completion(values=[]) + + return server + + +@pytest.fixture +def app() -> FastMCP: + """Create a FastMCP server for testing.""" + server = FastMCP("test") + + @server.tool() + def greet(name: str) -> str: + """Greet someone by name.""" + return f"Hello, {name}!" + + @server.resource("test://resource") + def test_resource() -> str: + """A test resource.""" + return "Test content" + + @server.prompt() + def greeting_prompt(name: str) -> str: + """A greeting prompt.""" + return f"Please greet {name} warmly." + + return server + + +async def test_client_is_initialized(app: FastMCP): + """Test that the client is initialized after entering context.""" + async with Client(app) as client: + assert client.server_capabilities == snapshot( + ServerCapabilities( + experimental={}, + prompts=PromptsCapability(list_changed=False), + resources=ResourcesCapability(subscribe=False, list_changed=False), + tools=ToolsCapability(list_changed=False), + ) + ) + + +async def test_client_with_simple_server(simple_server: Server): + """Test that from_server works with a basic Server instance.""" + async with Client(simple_server) as client: + resources = await client.list_resources() + assert resources == snapshot( + ListResourcesResult( + resources=[Resource(name="Test Resource", uri="memory://test", description="A test resource")] + ) + ) + + +async def test_client_send_ping(app: FastMCP): + async with Client(app) as client: + result = await client.send_ping() + assert result == snapshot(EmptyResult()) + + +async def test_client_list_tools(app: FastMCP): + async with Client(app) as client: + result = await client.list_tools() + assert result == snapshot( + ListToolsResult( + tools=[ + Tool( + name="greet", + description="Greet someone by name.", + input_schema={ + "properties": {"name": {"title": "Name", "type": "string"}}, + "required": ["name"], + "title": "greetArguments", + "type": "object", + }, + output_schema={ + "properties": {"result": {"title": "Result", "type": "string"}}, + "required": ["result"], + "title": "greetOutput", + "type": "object", + }, + ) + ] + ) + ) + + +async def test_client_call_tool(app: FastMCP): + async with Client(app) as client: + result = await client.call_tool("greet", {"name": "World"}) + assert result == snapshot( + CallToolResult( + content=[TextContent(text="Hello, World!")], + structured_content={"result": "Hello, World!"}, + ) + ) + + +async def test_read_resource(app: FastMCP): + """Test reading a resource.""" + async with Client(app) as client: + result = await client.read_resource("test://resource") + assert result == snapshot( + ReadResourceResult( + contents=[TextResourceContents(uri="test://resource", mime_type="text/plain", text="Test content")] + ) + ) + + +async def test_get_prompt(app: FastMCP): + """Test getting a prompt.""" + async with Client(app) as client: + result = await client.get_prompt("greeting_prompt", {"name": "Alice"}) + assert result == snapshot( + GetPromptResult( + description="A greeting prompt.", + messages=[PromptMessage(role="user", content=TextContent(text="Please greet Alice warmly."))], + ) + ) + + +def test_client_session_property_before_enter(app: FastMCP): + """Test that accessing session before context manager raises RuntimeError.""" + client = Client(app) + with pytest.raises(RuntimeError, match="Client must be used within an async context manager"): + client.session + + +async def test_client_reentry_raises_runtime_error(app: FastMCP): + """Test that reentering a client raises RuntimeError.""" + async with Client(app) as client: + with pytest.raises(RuntimeError, match="Client is already entered"): + await client.__aenter__() + + +async def test_client_send_progress_notification(): + """Test sending progress notification.""" + received_from_client = None + event = anyio.Event() + server = Server(name="test_server") + + @server.progress_notification() + async def handle_progress_notification( + progress_token: str | int, + progress: float = 0.0, + total: float | None = None, + message: str | None = None, + ) -> None: + nonlocal received_from_client + received_from_client = {"progress_token": progress_token, "progress": progress} + event.set() + + async with Client(server) as client: + await client.send_progress_notification(progress_token="token123", progress=50.0) + await event.wait() + assert received_from_client == snapshot({"progress_token": "token123", "progress": 50.0}) + + +async def test_client_subscribe_resource(simple_server: Server): + async with Client(simple_server) as client: + result = await client.subscribe_resource("memory://test") + assert result == snapshot(EmptyResult()) + + +async def test_client_unsubscribe_resource(simple_server: Server): + async with Client(simple_server) as client: + result = await client.unsubscribe_resource("memory://test") + assert result == snapshot(EmptyResult()) + + +async def test_client_set_logging_level(simple_server: Server): + """Test setting logging level.""" + async with Client(simple_server) as client: + result = await client.set_logging_level("debug") + assert result == snapshot(EmptyResult()) + + +async def test_client_list_resources_with_params(app: FastMCP): + """Test listing resources with params parameter.""" + async with Client(app) as client: + result = await client.list_resources() + assert result == snapshot( + ListResourcesResult( + resources=[ + Resource( + name="test_resource", + uri="test://resource", + description="A test resource.", + mime_type="text/plain", + ) + ] + ) + ) + + +async def test_client_list_resource_templates(app: FastMCP): + """Test listing resource templates with params parameter.""" + async with Client(app) as client: + result = await client.list_resource_templates() + assert result == snapshot(ListResourceTemplatesResult(resource_templates=[])) + + +async def test_list_prompts(app: FastMCP): + """Test listing prompts with params parameter.""" + async with Client(app) as client: + result = await client.list_prompts() + assert result == snapshot( + ListPromptsResult( + prompts=[ + Prompt( + name="greeting_prompt", + description="A greeting prompt.", + arguments=[PromptArgument(name="name", required=True)], + ) + ] + ) + ) + + +async def test_complete_with_prompt_reference(simple_server: Server): + """Test getting completions for a prompt argument.""" + async with Client(simple_server) as client: + ref = types.PromptReference(type="ref/prompt", name="test_prompt") + result = await client.complete(ref=ref, argument={"name": "arg", "value": "test"}) + assert result == snapshot(types.CompleteResult(completion=types.Completion(values=[]))) diff --git a/tests/client/test_http_unicode.py b/tests/client/test_http_unicode.py index ec38f35838..f368c30182 100644 --- a/tests/client/test_http_unicode.py +++ b/tests/client/test_http_unicode.py @@ -1,5 +1,4 @@ -""" -Tests for Unicode handling in streamable HTTP transport. +"""Tests for Unicode handling in streamable HTTP transport. Verifies that Unicode text is correctly transmitted and received in both directions (server→client and client→server) using the streamable HTTP transport. @@ -7,12 +6,20 @@ import multiprocessing import socket -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator +from contextlib import asynccontextmanager +from typing import Any import pytest +from starlette.applications import Starlette +from starlette.routing import Mount +import mcp.types as types from mcp.client.session import ClientSession from mcp.client.streamable_http import streamable_http_client +from mcp.server import Server +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from mcp.types import TextContent, Tool from tests.test_helpers import wait_for_server # Test constants with various Unicode characters @@ -37,19 +44,7 @@ def run_unicode_server(port: int) -> None: # pragma: no cover """Run the Unicode test server in a separate process.""" - # Import inside the function since this runs in a separate process - from collections.abc import AsyncGenerator - from contextlib import asynccontextmanager - from typing import Any - import uvicorn - from starlette.applications import Starlette - from starlette.routing import Mount - - import mcp.types as types - from mcp.server import Server - from mcp.server.streamable_http_manager import StreamableHTTPSessionManager - from mcp.types import TextContent, Tool # Need to recreate the server setup in this process server = Server(name="unicode_test_server") @@ -61,7 +56,7 @@ async def list_tools() -> list[Tool]: Tool( name="echo_unicode", description="🔤 Echo Unicode text - Hello 👋 World 🌍 - Testing 🧪 Unicode ✨", - inputSchema={ + input_schema={ "type": "object", "properties": { "text": {"type": "string", "description": "Text to echo back"}, diff --git a/tests/client/test_list_methods_cursor.py b/tests/client/test_list_methods_cursor.py index 94a72c34e2..7d4124bbd5 100644 --- a/tests/client/test_list_methods_cursor.py +++ b/tests/client/test_list_methods_cursor.py @@ -3,9 +3,9 @@ import pytest import mcp.types as types +from mcp import Client from mcp.server import Server from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import create_connected_server_and_client_session as create_session from mcp.types import ListToolsRequest, ListToolsResult from .conftest import StreamSpyCollection @@ -18,30 +18,28 @@ async def full_featured_server(): """Create a server with tools, resources, prompts, and templates.""" server = FastMCP("test") - @server.tool(name="test_tool_1") - async def test_tool_1() -> str: # pragma: no cover - """First test tool""" - return "Result 1" + # pragma: no cover on handlers below - these exist only to register items with the + # server so list_* methods return results. The handlers themselves are never called + # because these tests only verify pagination/cursor behavior, not tool/resource invocation. + @server.tool() + def greet(name: str) -> str: # pragma: no cover + """Greet someone by name.""" + return f"Hello, {name}!" - @server.tool(name="test_tool_2") - async def test_tool_2() -> str: # pragma: no cover - """Second test tool""" - return "Result 2" + @server.resource("test://resource") + def test_resource() -> str: # pragma: no cover + """A test resource.""" + return "Test content" - @server.resource("resource://test/data") - async def test_resource() -> str: # pragma: no cover - """Test resource""" - return "Test data" + @server.resource("test://template/{id}") + def test_template(id: str) -> str: # pragma: no cover + """A test resource template.""" + return f"Template content for {id}" @server.prompt() - async def test_prompt(name: str) -> str: # pragma: no cover - """Test prompt""" - return f"Hello, {name}!" - - @server.resource("resource://test/{name}") - async def test_template(name: str) -> str: # pragma: no cover - """Test resource template""" - return f"Data for {name}" + def greeting_prompt(name: str) -> str: # pragma: no cover + """A greeting prompt.""" + return f"Please greet {name}." return server @@ -55,174 +53,70 @@ async def test_template(name: str) -> str: # pragma: no cover ("list_resource_templates", "resources/templates/list"), ], ) -@pytest.mark.filterwarnings("ignore::DeprecationWarning") -async def test_list_methods_cursor_parameter( +async def test_list_methods_params_parameter( stream_spy: Callable[[], StreamSpyCollection], full_featured_server: FastMCP, method_name: str, request_method: str, ): - """Test that the cursor parameter is accepted and correctly passed to the server. + """Test that the params parameter is accepted and correctly passed to the server. Covers: list_tools, list_resources, list_prompts, list_resource_templates See: https://modelcontextprotocol.io/specification/2025-03-26/server/utilities/pagination#request-format """ - async with create_session(full_featured_server._mcp_server) as client_session: + async with Client(full_featured_server) as client: spies = stream_spy() - # Test without cursor parameter (omitted) - method = getattr(client_session, method_name) + # Test without params (omitted) + method = getattr(client, method_name) _ = await method() requests = spies.get_client_requests(method=request_method) assert len(requests) == 1 - assert requests[0].params is None - - spies.clear() - - # Test with cursor=None - _ = await method(cursor=None) - requests = spies.get_client_requests(method=request_method) - assert len(requests) == 1 - assert requests[0].params is None + assert requests[0].params is None or "cursor" not in requests[0].params spies.clear() - # Test with cursor as string - _ = await method(cursor="some_cursor_value") + # Test with params containing cursor + _ = await method(cursor="from_params") requests = spies.get_client_requests(method=request_method) assert len(requests) == 1 assert requests[0].params is not None - assert requests[0].params["cursor"] == "some_cursor_value" + assert requests[0].params["cursor"] == "from_params" spies.clear() - # Test with empty string cursor - _ = await method(cursor="") - requests = spies.get_client_requests(method=request_method) - assert len(requests) == 1 - assert requests[0].params is not None - assert requests[0].params["cursor"] == "" - - -@pytest.mark.parametrize( - "method_name,request_method", - [ - ("list_tools", "tools/list"), - ("list_resources", "resources/list"), - ("list_prompts", "prompts/list"), - ("list_resource_templates", "resources/templates/list"), - ], -) -async def test_list_methods_params_parameter( - stream_spy: Callable[[], StreamSpyCollection], - full_featured_server: FastMCP, - method_name: str, - request_method: str, -): - """Test that the params parameter works correctly for list methods. - - Covers: list_tools, list_resources, list_prompts, list_resource_templates - - This tests the new params parameter API (non-deprecated) to ensure - it correctly handles all parameter combinations. - """ - async with create_session(full_featured_server._mcp_server) as client_session: - spies = stream_spy() - method = getattr(client_session, method_name) - - # Test without params parameter (omitted) + # Test with empty params _ = await method() requests = spies.get_client_requests(method=request_method) assert len(requests) == 1 - assert requests[0].params is None - - spies.clear() - - # Test with params=None - _ = await method(params=None) - requests = spies.get_client_requests(method=request_method) - assert len(requests) == 1 - assert requests[0].params is None - - spies.clear() - - # Test with empty params (for strict servers) - _ = await method(params=types.PaginatedRequestParams()) - requests = spies.get_client_requests(method=request_method) - assert len(requests) == 1 - assert requests[0].params is not None - assert requests[0].params.get("cursor") is None - - spies.clear() - - # Test with params containing cursor - _ = await method(params=types.PaginatedRequestParams(cursor="some_cursor_value")) - requests = spies.get_client_requests(method=request_method) - assert len(requests) == 1 - assert requests[0].params is not None - assert requests[0].params["cursor"] == "some_cursor_value" + # Empty params means no cursor + assert requests[0].params is None or "cursor" not in requests[0].params -@pytest.mark.parametrize( - "method_name", - [ - "list_tools", - "list_resources", - "list_prompts", - "list_resource_templates", - ], -) -async def test_list_methods_raises_error_when_both_cursor_and_params_provided( +async def test_list_tools_with_strict_server_validation( full_featured_server: FastMCP, - method_name: str, ): - """Test that providing both cursor and params raises ValueError. + """Test pagination with a server that validates request format strictly.""" + async with Client(full_featured_server) as client: + result = await client.list_tools() + assert isinstance(result, ListToolsResult) + assert len(result.tools) > 0 - Covers: list_tools, list_resources, list_prompts, list_resource_templates - When both cursor and params are provided, a ValueError should be raised - to prevent ambiguity. - """ - async with create_session(full_featured_server._mcp_server) as client_session: - method = getattr(client_session, method_name) - - # Call with both cursor and params - should raise ValueError - with pytest.raises(ValueError, match="Cannot specify both cursor and params"): - await method( - cursor="old_cursor", - params=types.PaginatedRequestParams(cursor="new_cursor"), - ) - - -async def test_list_tools_with_strict_server_validation(): - """Test that list_tools works with strict servers require a params field, - even if it is empty. +async def test_list_tools_with_lowlevel_server(): + """Test that list_tools works with a lowlevel Server using params.""" + server = Server("test-lowlevel") - Some MCP servers may implement strict JSON-RPC validation that requires - the params field to always be present in requests, even if empty {}. - - This test ensures such servers are supported by the client SDK for list_resources - requests without a cursor. - """ + @server.list_tools() + async def handle_list_tools(request: ListToolsRequest) -> ListToolsResult: + # Echo back what cursor we received in the tool description + cursor = request.params.cursor if request.params else None + return ListToolsResult(tools=[types.Tool(name="test_tool", description=f"cursor={cursor}", input_schema={})]) - server = Server("strict_server") + async with Client(server) as client: + result = await client.list_tools() + assert result.tools[0].description == "cursor=None" - @server.list_tools() - async def handle_list_tools(request: ListToolsRequest) -> ListToolsResult: # pragma: no cover - """Strict handler that validates params field exists""" - - # Simulate strict server validation - if request.params is None: - raise ValueError( - "Strict server validation failed: params field must be present. " - "Expected params: {} for requests without cursor." - ) - - # Return empty tools list - return ListToolsResult(tools=[]) - - async with create_session(server) as client_session: - # Use params to explicitly send params: {} for strict server compatibility - result = await client_session.list_tools(params=types.PaginatedRequestParams()) - assert result is not None + result = await client.list_tools(cursor="page2") + assert result.tools[0].description == "cursor=page2" diff --git a/tests/client/test_list_roots_callback.py b/tests/client/test_list_roots_callback.py index 0da0fff07a..a8f8823fe5 100644 --- a/tests/client/test_list_roots_callback.py +++ b/tests/client/test_list_roots_callback.py @@ -1,20 +1,17 @@ import pytest from pydantic import FileUrl +from mcp import Client from mcp.client.session import ClientSession +from mcp.server.fastmcp import FastMCP from mcp.server.fastmcp.server import Context from mcp.server.session import ServerSession from mcp.shared.context import RequestContext -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) from mcp.types import ListRootsResult, Root, TextContent @pytest.mark.anyio async def test_list_roots_callback(): - from mcp.server.fastmcp import FastMCP - server = FastMCP("test") callback_return = ListRootsResult( @@ -42,17 +39,17 @@ async def test_list_roots(context: Context[ServerSession, None], message: str): return True # Test with list_roots callback - async with create_session(server._mcp_server, list_roots_callback=list_roots_callback) as client_session: + async with Client(server, list_roots_callback=list_roots_callback) as client: # Make a request to trigger sampling callback - result = await client_session.call_tool("test_list_roots", {"message": "test message"}) - assert result.isError is False + result = await client.call_tool("test_list_roots", {"message": "test message"}) + assert result.is_error is False assert isinstance(result.content[0], TextContent) assert result.content[0].text == "true" # Test without list_roots callback - async with create_session(server._mcp_server) as client_session: + async with Client(server) as client: # Make a request to trigger sampling callback - result = await client_session.call_tool("test_list_roots", {"message": "test message"}) - assert result.isError is True + result = await client.call_tool("test_list_roots", {"message": "test message"}) + assert result.is_error is True assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Error executing tool test_list_roots: List roots not supported" diff --git a/tests/client/test_logging_callback.py b/tests/client/test_logging_callback.py index 5f5d534123..687efca71e 100644 --- a/tests/client/test_logging_callback.py +++ b/tests/client/test_logging_callback.py @@ -1,11 +1,10 @@ -from typing import Literal +from typing import Any, Literal import pytest import mcp.types as types -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) +from mcp import Client +from mcp.server.fastmcp import FastMCP from mcp.shared.session import RequestResponder from mcp.types import ( LoggingMessageNotificationParams, @@ -23,8 +22,6 @@ async def __call__(self, params: LoggingMessageNotificationParams) -> None: @pytest.mark.anyio async def test_logging_callback(): - from mcp.server.fastmcp import FastMCP - server = FastMCP("test") logging_collector = LoggingCollector() @@ -47,6 +44,23 @@ async def test_tool_with_log( ) return True + @server.tool("test_tool_with_log_extra") + async def test_tool_with_log_extra( + message: str, + level: Literal["debug", "info", "warning", "error"], + logger: str, + extra_string: str, + extra_dict: dict[str, Any], + ) -> bool: + """Send a log notification to the client with extra fields.""" + await server.get_context().log( + level=level, + message=message, + logger_name=logger, + extra={"extra_string": extra_string, "extra_dict": extra_dict}, + ) + return True + # Create a message handler to catch exceptions async def message_handler( message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception, @@ -54,19 +68,19 @@ async def message_handler( if isinstance(message, Exception): # pragma: no cover raise message - async with create_session( - server._mcp_server, + async with Client( + server, logging_callback=logging_collector, message_handler=message_handler, - ) as client_session: + ) as client: # First verify our test tool works - result = await client_session.call_tool("test_tool", {}) - assert result.isError is False + result = await client.call_tool("test_tool", {}) + assert result.is_error is False assert isinstance(result.content[0], TextContent) assert result.content[0].text == "true" # Now send a log message via our tool - log_result = await client_session.call_tool( + log_result = await client.call_tool( "test_tool_with_log", { "message": "Test log message", @@ -74,10 +88,30 @@ async def message_handler( "logger": "test_logger", }, ) - assert log_result.isError is False - assert len(logging_collector.log_messages) == 1 + log_result_with_extra = await client.call_tool( + "test_tool_with_log_extra", + { + "message": "Test log message", + "level": "info", + "logger": "test_logger", + "extra_string": "example", + "extra_dict": {"a": 1, "b": 2, "c": 3}, + }, + ) + assert log_result.is_error is False + assert log_result_with_extra.is_error is False + assert len(logging_collector.log_messages) == 2 # Create meta object with related_request_id added dynamically log = logging_collector.log_messages[0] assert log.level == "info" assert log.logger == "test_logger" assert log.data == "Test log message" + + log_with_extra = logging_collector.log_messages[1] + assert log_with_extra.level == "info" + assert log_with_extra.logger == "test_logger" + assert log_with_extra.data == { + "message": "Test log message", + "extra_string": "example", + "extra_dict": {"a": 1, "b": 2, "c": 3}, + } diff --git a/tests/client/test_notification_response.py b/tests/client/test_notification_response.py index 7500abee73..06d893ac68 100644 --- a/tests/client/test_notification_response.py +++ b/tests/client/test_notification_response.py @@ -1,5 +1,4 @@ -""" -Tests for StreamableHTTP client transport with non-SDK servers. +"""Tests for StreamableHTTP client transport with non-SDK servers. These tests verify client behavior when interacting with servers that don't follow SDK conventions. @@ -20,7 +19,7 @@ from mcp import ClientSession, types from mcp.client.streamable_http import streamable_http_client from mcp.shared.session import RequestResponder -from mcp.types import ClientNotification, RootsListChangedNotification +from mcp.types import RootsListChangedNotification from tests.test_helpers import wait_for_server @@ -110,8 +109,7 @@ def non_sdk_server(non_sdk_server_port: int) -> Generator[None, None, None]: @pytest.mark.anyio async def test_non_compliant_notification_response(non_sdk_server: None, non_sdk_server_port: int) -> None: - """ - This test verifies that the client ignores unexpected responses to notifications: the spec states they should + """This test verifies that the client ignores unexpected responses to notifications: the spec states they should either be 202 + no response body, or 4xx + optional error body (https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#sending-messages-to-the-server), but some servers wrongly return other 2xx codes (e.g. 204). For now we simply ignore unexpected responses @@ -137,9 +135,7 @@ async def message_handler( # pragma: no cover await session.initialize() # The test server returns a 204 instead of the expected 202 - await session.send_notification( - ClientNotification(RootsListChangedNotification(method="notifications/roots/list_changed")) - ) + await session.send_notification(RootsListChangedNotification(method="notifications/roots/list_changed")) if returned_exception: # pragma: no cover pytest.fail(f"Server encountered an exception: {returned_exception}") diff --git a/tests/client/test_output_schema_validation.py b/tests/client/test_output_schema_validation.py index e4a06b7f82..714352ad55 100644 --- a/tests/client/test_output_schema_validation.py +++ b/tests/client/test_output_schema_validation.py @@ -1,33 +1,28 @@ +import inspect import logging from contextlib import contextmanager from typing import Any from unittest.mock import patch +import jsonschema import pytest +from mcp import Client from mcp.server.lowlevel import Server -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) from mcp.types import Tool @contextmanager def bypass_server_output_validation(): - """ - Context manager that bypasses server-side output validation. + """Context manager that bypasses server-side output validation. This simulates a malicious or non-compliant server that doesn't validate its outputs, allowing us to test client-side validation. """ - import jsonschema - # Save the original validate function original_validate = jsonschema.validate # Create a mock that tracks which module is calling it def selective_mock(instance: Any = None, schema: Any = None, *args: Any, **kwargs: Any) -> None: - import inspect - # Check the call stack to see where this is being called from for frame_info in inspect.stack(): # If called from the server module, skip validation @@ -66,8 +61,8 @@ async def list_tools(): Tool( name="get_user", description="Get user data", - inputSchema={"type": "object"}, - outputSchema=output_schema, + input_schema={"type": "object"}, + output_schema=output_schema, ) ] @@ -79,7 +74,7 @@ async def call_tool(name: str, arguments: dict[str, Any]): # Test that client validates the structured content with bypass_server_output_validation(): - async with client_session(server) as client: + async with Client(server) as client: # The client validates structured content and should raise an error with pytest.raises(RuntimeError) as exc_info: await client.call_tool("get_user", {}) @@ -105,8 +100,8 @@ async def list_tools(): Tool( name="calculate", description="Calculate something", - inputSchema={"type": "object"}, - outputSchema=output_schema, + input_schema={"type": "object"}, + output_schema=output_schema, ) ] @@ -116,7 +111,7 @@ async def call_tool(name: str, arguments: dict[str, Any]): return {"result": "not_a_number"} # Invalid: should be int with bypass_server_output_validation(): - async with client_session(server) as client: + async with Client(server) as client: # The client validates structured content and should raise an error with pytest.raises(RuntimeError) as exc_info: await client.call_tool("calculate", {}) @@ -136,8 +131,8 @@ async def list_tools(): Tool( name="get_scores", description="Get scores", - inputSchema={"type": "object"}, - outputSchema=output_schema, + input_schema={"type": "object"}, + output_schema=output_schema, ) ] @@ -147,7 +142,7 @@ async def call_tool(name: str, arguments: dict[str, Any]): return {"alice": "100", "bob": "85"} # Invalid: values should be int with bypass_server_output_validation(): - async with client_session(server) as client: + async with Client(server) as client: # The client validates structured content and should raise an error with pytest.raises(RuntimeError) as exc_info: await client.call_tool("get_scores", {}) @@ -171,8 +166,8 @@ async def list_tools(): Tool( name="get_person", description="Get person data", - inputSchema={"type": "object"}, - outputSchema=output_schema, + input_schema={"type": "object"}, + output_schema=output_schema, ) ] @@ -182,7 +177,7 @@ async def call_tool(name: str, arguments: dict[str, Any]): return {"name": "John", "age": 30} # Missing required 'email' with bypass_server_output_validation(): - async with client_session(server) as client: + async with Client(server) as client: # The client validates structured content and should raise an error with pytest.raises(RuntimeError) as exc_info: await client.call_tool("get_person", {}) @@ -190,7 +185,7 @@ async def call_tool(name: str, arguments: dict[str, Any]): @pytest.mark.anyio async def test_tool_not_listed_warning(self, caplog: pytest.LogCaptureFixture): - """Test that client logs warning when tool is not in list_tools but has outputSchema""" + """Test that client logs warning when tool is not in list_tools but has output_schema""" server = Server("test-server") @server.list_tools() @@ -207,11 +202,11 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> dict[str, Any]: caplog.set_level(logging.WARNING) with bypass_server_output_validation(): - async with client_session(server) as client: + async with Client(server) as client: # Call a tool that wasn't listed result = await client.call_tool("mystery_tool", {}) - assert result.structuredContent == {"result": 42} - assert result.isError is False + assert result.structured_content == {"result": 42} + assert result.is_error is False # Check that warning was logged assert "Tool mystery_tool not listed" in caplog.text diff --git a/tests/client/test_resource_cleanup.py b/tests/client/test_resource_cleanup.py index cc6c5059fd..c7bf8fafa4 100644 --- a/tests/client/test_resource_cleanup.py +++ b/tests/client/test_resource_cleanup.py @@ -3,6 +3,7 @@ import anyio import pytest +from pydantic import TypeAdapter from mcp.shared.message import SessionMessage from mcp.shared.session import BaseSession, RequestId, SendResultT @@ -11,8 +12,7 @@ @pytest.mark.anyio async def test_send_request_stream_cleanup(): - """ - Test that send_request properly cleans up streams when an exception occurs. + """Test that send_request properly cleans up streams when an exception occurs. This test mocks out most of the session functionality to focus on stream cleanup. """ @@ -24,20 +24,23 @@ async def _send_response( ) -> None: # pragma: no cover pass + @property + def _receive_request_adapter(self) -> TypeAdapter[Any]: + return TypeAdapter(object) # pragma: no cover + + @property + def _receive_notification_adapter(self) -> TypeAdapter[Any]: + return TypeAdapter(object) # pragma: no cover + # Create streams write_stream_send, write_stream_receive = anyio.create_memory_object_stream[SessionMessage](1) read_stream_send, read_stream_receive = anyio.create_memory_object_stream[SessionMessage](1) # Create the session - session = TestSession( - read_stream_receive, - write_stream_send, - object, # Request type doesn't matter for this test - object, # Notification type doesn't matter for this test - ) + session = TestSession(read_stream_receive, write_stream_send) # Create a test request - request = ClientRequest(PingRequest()) + request = PingRequest() # Patch the _write_stream.send method to raise an exception async def mock_send(*args: Any, **kwargs: Any): diff --git a/tests/client/test_sampling_callback.py b/tests/client/test_sampling_callback.py index 733364a767..1394e665ca 100644 --- a/tests/client/test_sampling_callback.py +++ b/tests/client/test_sampling_callback.py @@ -1,10 +1,9 @@ import pytest +from mcp import Client from mcp.client.session import ClientSession +from mcp.server.fastmcp import FastMCP from mcp.shared.context import RequestContext -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) from mcp.types import ( CreateMessageRequestParams, CreateMessageResult, @@ -17,15 +16,13 @@ @pytest.mark.anyio async def test_sampling_callback(): - from mcp.server.fastmcp import FastMCP - server = FastMCP("test") callback_return = CreateMessageResult( role="assistant", content=TextContent(type="text", text="This is a response from the sampling callback"), model="test-model", - stopReason="endTurn", + stop_reason="endTurn", ) async def sampling_callback( @@ -44,18 +41,18 @@ async def test_sampling_tool(message: str): return True # Test with sampling callback - async with create_session(server._mcp_server, sampling_callback=sampling_callback) as client_session: + async with Client(server, sampling_callback=sampling_callback) as client: # Make a request to trigger sampling callback - result = await client_session.call_tool("test_sampling", {"message": "Test message for sampling"}) - assert result.isError is False + result = await client.call_tool("test_sampling", {"message": "Test message for sampling"}) + assert result.is_error is False assert isinstance(result.content[0], TextContent) assert result.content[0].text == "true" # Test without sampling callback - async with create_session(server._mcp_server) as client_session: + async with Client(server) as client: # Make a request to trigger sampling callback - result = await client_session.call_tool("test_sampling", {"message": "Test message for sampling"}) - assert result.isError is True + result = await client.call_tool("test_sampling", {"message": "Test message for sampling"}) + assert result.is_error is True assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Error executing tool test_sampling: Sampling not supported" @@ -63,8 +60,6 @@ async def test_sampling_tool(message: str): @pytest.mark.anyio async def test_create_message_backwards_compat_single_content(): """Test backwards compatibility: create_message without tools returns single content.""" - from mcp.server.fastmcp import FastMCP - server = FastMCP("test") # Callback returns single content (text) @@ -72,7 +67,7 @@ async def test_create_message_backwards_compat_single_content(): role="assistant", content=TextContent(type="text", text="Hello from LLM"), model="test-model", - stopReason="endTurn", + stop_reason="endTurn", ) async def sampling_callback( @@ -97,9 +92,9 @@ async def test_tool(message: str): assert not hasattr(result, "content_as_list") or not callable(getattr(result, "content_as_list", None)) return True - async with create_session(server._mcp_server, sampling_callback=sampling_callback) as client_session: - result = await client_session.call_tool("test_backwards_compat", {"message": "Test"}) - assert result.isError is False + async with Client(server, sampling_callback=sampling_callback) as client: + result = await client.call_tool("test_backwards_compat", {"message": "Test"}) + assert result.is_error is False assert isinstance(result.content[0], TextContent) assert result.content[0].text == "true" @@ -112,7 +107,7 @@ async def test_create_message_result_with_tools_type(): role="assistant", content=ToolUseContent(type="tool_use", id="call_123", name="get_weather", input={"city": "SF"}), model="test-model", - stopReason="toolUse", + stop_reason="toolUse", ) # CreateMessageResultWithTools should have content_as_list @@ -128,7 +123,7 @@ async def test_create_message_result_with_tools_type(): ToolUseContent(type="tool_use", id="call_456", name="get_weather", input={"city": "NYC"}), ], model="test-model", - stopReason="toolUse", + stop_reason="toolUse", ) content_list_array = result_array.content_as_list assert len(content_list_array) == 2 diff --git a/tests/client/test_scope_bug_1630.py b/tests/client/test_scope_bug_1630.py index 7884718c1e..fafa510075 100644 --- a/tests/client/test_scope_bug_1630.py +++ b/tests/client/test_scope_bug_1630.py @@ -1,5 +1,4 @@ -""" -Regression test for issue #1630: OAuth2 scope incorrectly set to resource_metadata URL. +"""Regression test for issue #1630: OAuth2 scope incorrectly set to resource_metadata URL. This test verifies that when a 401 response contains both resource_metadata and scope in the WWW-Authenticate header, the actual scope is used (not the resource_metadata URL). @@ -37,8 +36,7 @@ async def set_client_info(self, client_info: OAuthClientInformationFull) -> None @pytest.mark.anyio async def test_401_uses_www_auth_scope_not_resource_metadata_url(): - """ - Regression test for #1630: Ensure scope is extracted from WWW-Authenticate header, + """Regression test for #1630: Ensure scope is extracted from WWW-Authenticate header, not the resource_metadata URL. When a 401 response contains: diff --git a/tests/client/test_session.py b/tests/client/test_session.py index eb2683fbdb..5c1f55d238 100644 --- a/tests/client/test_session.py +++ b/tests/client/test_session.py @@ -12,19 +12,17 @@ from mcp.types import ( LATEST_PROTOCOL_VERSION, CallToolResult, - ClientNotification, - ClientRequest, Implementation, InitializedNotification, InitializeRequest, InitializeResult, - JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, ServerCapabilities, - ServerResult, TextContent, + client_notification_adapter, + client_request_adapter, ) @@ -41,43 +39,39 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities( - logging=None, - resources=None, - tools=None, - experimental=None, - prompts=None, - ), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - instructions="The server instructions.", - ) + assert isinstance(request, InitializeRequest) + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities( + logging=None, + resources=None, + tools=None, + experimental=None, + prompts=None, + ), + server_info=Implementation(name="mock-server", version="0.1.0"), + instructions="The server instructions.", ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) session_notification = await client_to_server_receive.receive() jsonrpc_notification = session_notification.message - assert isinstance(jsonrpc_notification.root, JSONRPCNotification) - initialized_notification = ClientNotification.model_validate( + assert isinstance(jsonrpc_notification, JSONRPCNotification) + initialized_notification = client_notification_adapter.validate_python( jsonrpc_notification.model_dump(by_alias=True, mode="json", exclude_none=True) ) @@ -105,14 +99,14 @@ async def message_handler( # pragma: no cover # Assert the result assert isinstance(result, InitializeResult) - assert result.protocolVersion == LATEST_PROTOCOL_VERSION + assert result.protocol_version == LATEST_PROTOCOL_VERSION assert isinstance(result.capabilities, ServerCapabilities) - assert result.serverInfo == Implementation(name="mock-server", version="0.1.0") + assert result.server_info == Implementation(name="mock-server", version="0.1.0") assert result.instructions == "The server instructions." # Check that the client sent the initialized notification assert initialized_notification - assert isinstance(initialized_notification.root, InitializedNotification) + assert isinstance(initialized_notification, InitializedNotification) @pytest.mark.anyio @@ -128,30 +122,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_client_info = request.root.params.clientInfo - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_client_info = request.params.client_info + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -189,30 +179,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_client_info = request.root.params.clientInfo - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_client_info = request.params.client_info + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -220,10 +206,7 @@ async def mock_server(): await client_to_server_receive.receive() async with ( - ClientSession( - server_to_client_receive, - client_to_server_send, - ) as session, + ClientSession(server_to_client_receive, client_to_server_send) as session, anyio.create_task_group() as tg, client_to_server_send, client_to_server_receive, @@ -247,33 +230,29 @@ async def test_client_session_version_negotiation_success(): async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) + assert isinstance(request, InitializeRequest) # Verify client sent the latest protocol version - assert request.root.params.protocolVersion == LATEST_PROTOCOL_VERSION + assert request.params.protocol_version == LATEST_PROTOCOL_VERSION # Server responds with a supported older version - result = ServerResult( - InitializeResult( - protocolVersion="2024-11-05", - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + result = InitializeResult( + protocol_version="2024-11-05", + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -281,10 +260,7 @@ async def mock_server(): await client_to_server_receive.receive() async with ( - ClientSession( - server_to_client_receive, - client_to_server_send, - ) as session, + ClientSession(server_to_client_receive, client_to_server_send) as session, anyio.create_task_group() as tg, client_to_server_send, client_to_server_receive, @@ -296,8 +272,8 @@ async def mock_server(): # Assert the result with negotiated version assert isinstance(result, InitializeResult) - assert result.protocolVersion == "2024-11-05" - assert result.protocolVersion in SUPPORTED_PROTOCOL_VERSIONS + assert result.protocol_version == "2024-11-05" + assert result.protocol_version in SUPPORTED_PROTOCOL_VERSIONS @pytest.mark.anyio @@ -309,39 +285,32 @@ async def test_client_session_version_negotiation_failure(): async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) + assert isinstance(request, InitializeRequest) # Server responds with an unsupported version - result = ServerResult( - InitializeResult( - protocolVersion="2020-01-01", # Unsupported old version - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + result = InitializeResult( + protocol_version="2020-01-01", # Unsupported old version + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) async with ( - ClientSession( - server_to_client_receive, - client_to_server_send, - ) as session, + ClientSession(server_to_client_receive, client_to_server_send) as session, anyio.create_task_group() as tg, client_to_server_send, client_to_server_receive, @@ -368,30 +337,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -399,10 +364,7 @@ async def mock_server(): await client_to_server_receive.receive() async with ( - ClientSession( - server_to_client_receive, - client_to_server_send, - ) as session, + ClientSession(server_to_client_receive, client_to_server_send) as session, anyio.create_task_group() as tg, client_to_server_send, client_to_server_receive, @@ -446,30 +408,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -503,7 +461,7 @@ async def mock_server(): assert received_capabilities.roots is not None assert isinstance(received_capabilities.roots, types.RootsCapability) # Should be True for custom callback - assert received_capabilities.roots.listChanged is True + assert received_capabilities.roots.list_changed is True @pytest.mark.anyio @@ -529,30 +487,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -592,37 +546,33 @@ async def test_get_server_capabilities(): expected_capabilities = ServerCapabilities( logging=types.LoggingCapability(), - prompts=types.PromptsCapability(listChanged=True), - resources=types.ResourcesCapability(subscribe=True, listChanged=True), - tools=types.ToolsCapability(listChanged=False), + prompts=types.PromptsCapability(list_changed=True), + resources=types.ResourcesCapability(subscribe=True, list_changed=True), + tools=types.ToolsCapability(list_changed=False), ) async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) + assert isinstance(request, InitializeRequest) - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=expected_capabilities, - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=expected_capabilities, + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -649,11 +599,11 @@ async def mock_server(): assert capabilities == expected_capabilities assert capabilities.logging is not None assert capabilities.prompts is not None - assert capabilities.prompts.listChanged is True + assert capabilities.prompts.list_changed is True assert capabilities.resources is not None assert capabilities.resources.subscribe is True assert capabilities.tools is not None - assert capabilities.tools.listChanged is False + assert capabilities.tools.list_changed is False @pytest.mark.anyio @@ -663,35 +613,31 @@ async def test_client_tool_call_with_meta(meta: dict[str, Any] | None): client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[SessionMessage](1) server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[SessionMessage](1) - mocked_tool = types.Tool(name="sample_tool", inputSchema={}) + mocked_tool = types.Tool(name="sample_tool", input_schema={}) async def mock_server(): # Receive initialization request from client session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) + assert isinstance(request, InitializeRequest) - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) # Answer initialization request await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -702,28 +648,24 @@ async def mock_server(): # Wait for the client to send a 'tools/call' request session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) + assert isinstance(jsonrpc_request, JSONRPCRequest) - assert jsonrpc_request.root.method == "tools/call" + assert jsonrpc_request.method == "tools/call" if meta is not None: - assert jsonrpc_request.root.params - assert "_meta" in jsonrpc_request.root.params - assert jsonrpc_request.root.params["_meta"] == meta + assert jsonrpc_request.params + assert "_meta" in jsonrpc_request.params + assert jsonrpc_request.params["_meta"] == meta - result = ServerResult( - CallToolResult(content=[TextContent(type="text", text="Called successfully")], isError=False) - ) + result = CallToolResult(content=[TextContent(type="text", text="Called successfully")], is_error=False) # Send the tools/call result await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -732,20 +674,18 @@ async def mock_server(): # The client requires this step to validate the tool output schema session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) + assert isinstance(jsonrpc_request, JSONRPCRequest) - assert jsonrpc_request.root.method == "tools/list" + assert jsonrpc_request.method == "tools/list" result = types.ListToolsResult(tools=[mocked_tool]) await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -753,10 +693,7 @@ async def mock_server(): server_to_client_send.close() async with ( - ClientSession( - server_to_client_receive, - client_to_server_send, - ) as session, + ClientSession(server_to_client_receive, client_to_server_send) as session, anyio.create_task_group() as tg, client_to_server_send, client_to_server_receive, diff --git a/tests/client/test_session_group.py b/tests/client/test_session_group.py index 755c613d9b..5efc1d7d22 100644 --- a/tests/client/test_session_group.py +++ b/tests/client/test_session_group.py @@ -1,6 +1,7 @@ import contextlib from unittest import mock +import httpx import pytest import mcp @@ -59,7 +60,7 @@ def hook(name: str, server_info: types.Implementation) -> str: # pragma: no cov return f"{(server_info.name)}-{name}" mcp_session_group = ClientSessionGroup(component_name_hook=hook) - mcp_session_group._tools = {"server1-my_tool": types.Tool(name="my_tool", inputSchema={})} + mcp_session_group._tools = {"server1-my_tool": types.Tool(name="my_tool", input_schema={})} mcp_session_group._tool_to_session = {"server1-my_tool": mock_session} text_content = types.TextContent(type="text", text="OK") mock_session.call_tool.return_value = types.CallToolResult(content=[text_content]) @@ -77,7 +78,7 @@ def hook(name: str, server_info: types.Implementation) -> str: # pragma: no cov assert result.content == [text_content] mock_session.call_tool.assert_called_once_with( "my_tool", - {"name": "value1", "args": {}}, + arguments={"name": "value1", "args": {}}, read_timeout_seconds=None, progress_callback=None, meta=None, @@ -273,7 +274,7 @@ async def test_disconnect_non_existent_server(self): "mcp.client.session_group.mcp.stdio_client", ), ( - SseServerParameters(url="http://test.com/sse", timeout=10), + SseServerParameters(url="http://test.com/sse", timeout=10.0), "sse", "mcp.client.session_group.sse_client", ), # url, headers, timeout, sse_read_timeout @@ -324,7 +325,7 @@ async def test_establish_session_parameterized( # Mock session.initialize() mock_initialize_result = mock.AsyncMock(name="InitializeResult") - mock_initialize_result.serverInfo = types.Implementation(name="foo", version="1") + mock_initialize_result.server_info = types.Implementation(name="foo", version="1") mock_entered_session.initialize.return_value = mock_initialize_result # --- Test Execution --- @@ -356,8 +357,6 @@ async def test_establish_session_parameterized( assert isinstance(server_params_instance, StreamableHttpParameters) # Verify streamable_http_client was called with url, httpx_client, and terminate_on_close # The http_client is created by the real create_mcp_http_client - import httpx - call_args = mock_specific_client_func.call_args assert call_args.kwargs["url"] == server_params_instance.url assert call_args.kwargs["terminate_on_close"] == server_params_instance.terminate_on_close @@ -381,5 +380,5 @@ async def test_establish_session_parameterized( mock_entered_session.initialize.assert_awaited_once() # 3. Assert returned values - assert returned_server_info is mock_initialize_result.serverInfo + assert returned_server_info is mock_initialize_result.server_info assert returned_session is mock_entered_session diff --git a/tests/client/test_stdio.py b/tests/client/test_stdio.py index ba58da7321..4059a92682 100644 --- a/tests/client/test_stdio.py +++ b/tests/client/test_stdio.py @@ -10,7 +10,12 @@ import pytest from mcp.client.session import ClientSession -from mcp.client.stdio import StdioServerParameters, _create_platform_compatible_process, stdio_client +from mcp.client.stdio import ( + StdioServerParameters, + _create_platform_compatible_process, + _terminate_process_tree, + stdio_client, +) from mcp.shared.exceptions import McpError from mcp.shared.message import SessionMessage from mcp.types import CONNECTION_CLOSED, JSONRPCMessage, JSONRPCRequest, JSONRPCResponse @@ -42,8 +47,8 @@ async def test_stdio_client(): async with stdio_client(server_parameters) as (read_stream, write_stream): # Test sending and receiving messages messages = [ - JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")), - JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})), + JSONRPCRequest(jsonrpc="2.0", id=1, method="ping"), + JSONRPCResponse(jsonrpc="2.0", id=2, result={}), ] async with write_stream: @@ -62,8 +67,8 @@ async def test_stdio_client(): break assert len(read_messages) == 2 - assert read_messages[0] == JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")) - assert read_messages[1] == JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})) + assert read_messages[0] == JSONRPCRequest(jsonrpc="2.0", id=1, method="ping") + assert read_messages[1] == JSONRPCResponse(jsonrpc="2.0", id=2, result={}) @pytest.mark.anyio @@ -101,8 +106,7 @@ async def test_stdio_client_nonexistent_command(): @pytest.mark.anyio async def test_stdio_client_universal_cleanup(): - """ - Test that stdio_client completes cleanup within reasonable time + """Test that stdio_client completes cleanup within reasonable time even when connected to processes that exit slowly. """ @@ -154,9 +158,7 @@ async def test_stdio_client_universal_cleanup(): @pytest.mark.anyio @pytest.mark.skipif(sys.platform == "win32", reason="Windows signal handling is different") async def test_stdio_client_sigint_only_process(): # pragma: no cover - """ - Test cleanup with a process that ignores SIGTERM but responds to SIGINT. - """ + """Test cleanup with a process that ignores SIGTERM but responds to SIGINT.""" # Create a Python script that ignores SIGTERM but handles SIGINT script_content = textwrap.dedent( """ @@ -220,8 +222,7 @@ def sigint_handler(signum, frame): class TestChildProcessCleanup: - """ - Tests for child process cleanup functionality using _terminate_process_tree. + """Tests for child process cleanup functionality using _terminate_process_tree. These tests verify that child processes are properly terminated when the parent is killed, addressing the issue where processes like npx spawn child processes @@ -247,8 +248,7 @@ class TestChildProcessCleanup: @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::ResourceWarning" if sys.platform == "win32" else "default") async def test_basic_child_process_cleanup(self): - """ - Test basic parent-child process cleanup. + """Test basic parent-child process cleanup. Parent spawns a single child process that writes continuously to a file. """ # Create a marker file for the child process to write to @@ -312,8 +312,6 @@ async def test_basic_child_process_cleanup(self): # Terminate using our function print("Terminating process and children...") - from mcp.client.stdio import _terminate_process_tree - await _terminate_process_tree(proc) # Verify processes stopped @@ -341,8 +339,7 @@ async def test_basic_child_process_cleanup(self): @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::ResourceWarning" if sys.platform == "win32" else "default") async def test_nested_process_tree(self): - """ - Test nested process tree cleanup (parent → child → grandchild). + """Test nested process tree cleanup (parent → child → grandchild). Each level writes to a different file to verify all processes are terminated. """ # Create temporary files for each process level @@ -413,8 +410,6 @@ async def test_nested_process_tree(self): assert new_size > initial_size, f"{name} process should be writing" # Terminate the whole tree - from mcp.client.stdio import _terminate_process_tree - await _terminate_process_tree(proc) # Verify all stopped @@ -439,8 +434,7 @@ async def test_nested_process_tree(self): @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::ResourceWarning" if sys.platform == "win32" else "default") async def test_early_parent_exit(self): - """ - Test cleanup when parent exits during termination sequence. + """Test cleanup when parent exits during termination sequence. Tests the race condition where parent might die during our termination sequence but we can still clean up the children via the process group. """ @@ -494,8 +488,6 @@ def handle_term(sig, frame): assert size2 > size1, "Child should be writing" # Terminate - this will kill the process group even if parent exits first - from mcp.client.stdio import _terminate_process_tree - await _terminate_process_tree(proc) # Verify child stopped @@ -518,8 +510,7 @@ def handle_term(sig, frame): @pytest.mark.anyio async def test_stdio_client_graceful_stdin_exit(): - """ - Test that a process exits gracefully when stdin is closed, + """Test that a process exits gracefully when stdin is closed, without needing SIGTERM or SIGKILL. """ # Create a Python script that exits when stdin is closed @@ -574,8 +565,7 @@ async def test_stdio_client_graceful_stdin_exit(): @pytest.mark.anyio async def test_stdio_client_stdin_close_ignored(): - """ - Test that when a process ignores stdin closure, the shutdown sequence + """Test that when a process ignores stdin closure, the shutdown sequence properly escalates to SIGTERM. """ # Create a Python script that ignores stdin closure but responds to SIGTERM diff --git a/tests/client/transports/__init__.py b/tests/client/transports/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/client/transports/test_memory.py b/tests/client/transports/test_memory.py new file mode 100644 index 0000000000..b97ebcea2f --- /dev/null +++ b/tests/client/transports/test_memory.py @@ -0,0 +1,99 @@ +"""Tests for InMemoryTransport.""" + +import pytest + +from mcp import Client +from mcp.client._memory import InMemoryTransport +from mcp.server import Server +from mcp.server.fastmcp import FastMCP +from mcp.types import Resource + + +@pytest.fixture +def simple_server() -> Server: + """Create a simple MCP server for testing.""" + server = Server(name="test_server") + + # pragma: no cover - handler exists only to register a resource capability. + # Transport tests verify stream creation, not handler invocation. + @server.list_resources() + async def handle_list_resources(): # pragma: no cover + return [ + Resource( + uri="memory://test", + name="Test Resource", + description="A test resource", + ) + ] + + return server + + +@pytest.fixture +def fastmcp_server() -> FastMCP: + """Create a FastMCP server for testing.""" + server = FastMCP("test") + + # pragma: no cover on handlers below - they exist only to register capabilities. + # Transport tests verify stream creation and basic protocol, not handler invocation. + @server.tool() + def greet(name: str) -> str: # pragma: no cover + """Greet someone by name.""" + return f"Hello, {name}!" + + @server.resource("test://resource") + def test_resource() -> str: # pragma: no cover + """A test resource.""" + return "Test content" + + return server + + +pytestmark = pytest.mark.anyio + + +async def test_with_server(simple_server: Server): + """Test creating transport with a Server instance.""" + transport = InMemoryTransport(simple_server) + async with transport.connect() as (read_stream, write_stream): + assert read_stream is not None + assert write_stream is not None + + +async def test_with_fastmcp(fastmcp_server: FastMCP): + """Test creating transport with a FastMCP instance.""" + transport = InMemoryTransport(fastmcp_server) + async with transport.connect() as (read_stream, write_stream): + assert read_stream is not None + assert write_stream is not None + + +async def test_server_is_running(fastmcp_server: FastMCP): + """Test that the server is running and responding to requests.""" + async with Client(fastmcp_server) as client: + assert client.server_capabilities is not None + + +async def test_list_tools(fastmcp_server: FastMCP): + """Test listing tools through the transport.""" + async with Client(fastmcp_server) as client: + tools_result = await client.list_tools() + assert len(tools_result.tools) > 0 + tool_names = [t.name for t in tools_result.tools] + assert "greet" in tool_names + + +async def test_call_tool(fastmcp_server: FastMCP): + """Test calling a tool through the transport.""" + async with Client(fastmcp_server) as client: + result = await client.call_tool("greet", {"name": "World"}) + assert result is not None + assert len(result.content) > 0 + assert "Hello, World!" in str(result.content[0]) + + +async def test_raise_exceptions(fastmcp_server: FastMCP): + """Test that raise_exceptions parameter is passed through.""" + transport = InMemoryTransport(fastmcp_server, raise_exceptions=True) + async with transport.connect() as (read_stream, _write_stream): + assert read_stream is not None diff --git a/tests/experimental/tasks/client/test_capabilities.py b/tests/experimental/tasks/client/test_capabilities.py index f2def4e3a6..7bb8066966 100644 --- a/tests/experimental/tasks/client/test_capabilities.py +++ b/tests/experimental/tasks/client/test_capabilities.py @@ -11,15 +11,13 @@ from mcp.shared.message import SessionMessage from mcp.types import ( LATEST_PROTOCOL_VERSION, - ClientRequest, Implementation, InitializeRequest, InitializeResult, - JSONRPCMessage, JSONRPCRequest, JSONRPCResponse, ServerCapabilities, - ServerResult, + client_request_adapter, ) @@ -36,30 +34,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -110,30 +104,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -194,30 +184,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) @@ -274,30 +260,26 @@ async def mock_server(): session_message = await client_to_server_receive.receive() jsonrpc_request = session_message.message - assert isinstance(jsonrpc_request.root, JSONRPCRequest) - request = ClientRequest.model_validate( + assert isinstance(jsonrpc_request, JSONRPCRequest) + request = client_request_adapter.validate_python( jsonrpc_request.model_dump(by_alias=True, mode="json", exclude_none=True) ) - assert isinstance(request.root, InitializeRequest) - received_capabilities = request.root.params.capabilities - - result = ServerResult( - InitializeResult( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ServerCapabilities(), - serverInfo=Implementation(name="mock-server", version="0.1.0"), - ) + assert isinstance(request, InitializeRequest) + received_capabilities = request.params.capabilities + + result = InitializeResult( + protocol_version=LATEST_PROTOCOL_VERSION, + capabilities=ServerCapabilities(), + server_info=Implementation(name="mock-server", version="0.1.0"), ) async with server_to_client_send: await server_to_client_send.send( SessionMessage( - JSONRPCMessage( - JSONRPCResponse( - jsonrpc="2.0", - id=jsonrpc_request.root.id, - result=result.model_dump(by_alias=True, mode="json", exclude_none=True), - ) + JSONRPCResponse( + jsonrpc="2.0", + id=jsonrpc_request.id, + result=result.model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) diff --git a/tests/experimental/tasks/client/test_handlers.py b/tests/experimental/tasks/client/test_handlers.py index 86cea42ae1..0cac3c7362 100644 --- a/tests/experimental/tasks/client/test_handlers.py +++ b/tests/experimental/tasks/client/test_handlers.py @@ -117,17 +117,17 @@ async def get_task_handler( params: GetTaskRequestParams, ) -> GetTaskResult | ErrorData: nonlocal received_task_id - received_task_id = params.taskId - task = await store.get_task(params.taskId) - assert task is not None, f"Test setup error: task {params.taskId} should exist" + received_task_id = params.task_id + task = await store.get_task(params.task_id) + assert task is not None, f"Test setup error: task {params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) await store.create_task(TaskMetadata(ttl=60000), task_id="test-task-123") @@ -150,21 +150,17 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = GetTaskRequest(params=GetTaskRequestParams(taskId="test-task-123")) - request = types.JSONRPCRequest( - jsonrpc="2.0", - id="req-1", - **typed_request.model_dump(by_alias=True), - ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + typed_request = GetTaskRequest(params=GetTaskRequestParams(task_id="test-task-123")) + request = types.JSONRPCRequest(jsonrpc="2.0", id="req-1", **typed_request.model_dump(by_alias=True)) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) assert response.id == "req-1" result = GetTaskResult.model_validate(response.result) - assert result.taskId == "test-task-123" + assert result.task_id == "test-task-123" assert result.status == "working" assert received_task_id == "test-task-123" @@ -183,8 +179,8 @@ async def get_task_result_handler( context: RequestContext[ClientSession, None], params: GetTaskPayloadRequestParams, ) -> GetTaskPayloadResult | ErrorData: - result = await store.get_result(params.taskId) - assert result is not None, f"Test setup error: result for {params.taskId} should exist" + result = await store.get_result(params.task_id) + assert result is not None, f"Test setup error: result for {params.task_id} should exist" assert isinstance(result, types.CallToolResult) return GetTaskPayloadResult(**result.model_dump()) @@ -213,16 +209,16 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId="test-task-456")) + typed_request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id="test-task-456")) request = types.JSONRPCRequest( jsonrpc="2.0", id="req-2", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) assert isinstance(response.result, dict) @@ -248,7 +244,7 @@ async def list_tasks_handler( ) -> ListTasksResult | ErrorData: cursor = params.cursor if params else None tasks_list, next_cursor = await store.list_tasks(cursor=cursor) - return ListTasksResult(tasks=tasks_list, nextCursor=next_cursor) + return ListTasksResult(tasks=tasks_list, next_cursor=next_cursor) await store.create_task(TaskMetadata(ttl=60000), task_id="task-1") await store.create_task(TaskMetadata(ttl=60000), task_id="task-2") @@ -277,10 +273,10 @@ async def run_client() -> None: id="req-3", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) result = ListTasksResult.model_validate(response.result) @@ -301,16 +297,16 @@ async def cancel_task_handler( context: RequestContext[ClientSession, None], params: CancelTaskRequestParams, ) -> CancelTaskResult | ErrorData: - task = await store.get_task(params.taskId) - assert task is not None, f"Test setup error: task {params.taskId} should exist" - await store.update_task(params.taskId, status="cancelled") - updated = await store.get_task(params.taskId) + task = await store.get_task(params.task_id) + assert task is not None, f"Test setup error: task {params.task_id} should exist" + await store.update_task(params.task_id, status="cancelled") + updated = await store.get_task(params.task_id) assert updated is not None return CancelTaskResult( - taskId=updated.taskId, + task_id=updated.task_id, status=updated.status, - createdAt=updated.createdAt, - lastUpdatedAt=updated.lastUpdatedAt, + created_at=updated.created_at, + last_updated_at=updated.last_updated_at, ttl=updated.ttl, ) @@ -334,20 +330,20 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = CancelTaskRequest(params=CancelTaskRequestParams(taskId="task-to-cancel")) + typed_request = CancelTaskRequest(params=CancelTaskRequestParams(task_id="task-to-cancel")) request = types.JSONRPCRequest( jsonrpc="2.0", id="req-4", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) result = CancelTaskResult.model_validate(response.result) - assert result.taskId == "task-to-cancel" + assert result.task_id == "task-to-cancel" assert result.status == "cancelled" tg.cancel_scope.cancel() @@ -370,17 +366,17 @@ async def task_augmented_sampling_callback( task_metadata: TaskMetadata, ) -> CreateTaskResult: task = await store.create_task(task_metadata) - created_task_id[0] = task.taskId + created_task_id[0] = task.task_id async def do_sampling() -> None: result = CreateMessageResult( role="assistant", content=TextContent(type="text", text="Sampled response"), model="test-model", - stopReason="endTurn", + stop_reason="endTurn", ) - await store.store_result(task.taskId, result) - await store.update_task(task.taskId, status="completed") + await store.store_result(task.task_id, result) + await store.update_task(task.task_id, status="completed") sampling_completed.set() assert background_tg[0] is not None @@ -391,24 +387,24 @@ async def get_task_handler( context: RequestContext[ClientSession, None], params: GetTaskRequestParams, ) -> GetTaskResult | ErrorData: - task = await store.get_task(params.taskId) - assert task is not None, f"Test setup error: task {params.taskId} should exist" + task = await store.get_task(params.task_id) + assert task is not None, f"Test setup error: task {params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) async def get_task_result_handler( context: RequestContext[ClientSession, None], params: GetTaskPayloadRequestParams, ) -> GetTaskPayloadResult | ErrorData: - result = await store.get_result(params.taskId) - assert result is not None, f"Test setup error: result for {params.taskId} should exist" + result = await store.get_result(params.task_id) + assert result is not None, f"Test setup error: result for {params.task_id} should exist" assert isinstance(result, CreateMessageResult) return GetTaskPayloadResult(**result.model_dump()) @@ -439,7 +435,7 @@ async def run_client() -> None: typed_request = CreateMessageRequest( params=CreateMessageRequestParams( messages=[SamplingMessage(role="user", content=TextContent(type="text", text="Hello"))], - maxTokens=100, + max_tokens=100, task=TaskMetadata(ttl=60000), ) ) @@ -448,47 +444,47 @@ async def run_client() -> None: id="req-sampling", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) # Step 2: Client responds with CreateTaskResult response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) task_result = CreateTaskResult.model_validate(response.result) - task_id = task_result.task.taskId + task_id = task_result.task.task_id assert task_id == created_task_id[0] # Step 3: Wait for background sampling await sampling_completed.wait() # Step 4: Server polls task status - typed_poll = GetTaskRequest(params=GetTaskRequestParams(taskId=task_id)) + typed_poll = GetTaskRequest(params=GetTaskRequestParams(task_id=task_id)) poll_request = types.JSONRPCRequest( jsonrpc="2.0", id="req-poll", **typed_poll.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(poll_request))) + await client_streams.server_send.send(SessionMessage(poll_request)) poll_response_msg = await client_streams.server_receive.receive() - poll_response = poll_response_msg.message.root + poll_response = poll_response_msg.message assert isinstance(poll_response, types.JSONRPCResponse) status = GetTaskResult.model_validate(poll_response.result) assert status.status == "completed" # Step 5: Server gets result - typed_result_req = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task_id)) + typed_result_req = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task_id)) result_request = types.JSONRPCRequest( jsonrpc="2.0", id="req-result", **typed_result_req.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(result_request))) + await client_streams.server_send.send(SessionMessage(result_request)) result_response_msg = await client_streams.server_receive.receive() - result_response = result_response_msg.message.root + result_response = result_response_msg.message assert isinstance(result_response, types.JSONRPCResponse) assert isinstance(result_response.result, dict) @@ -514,13 +510,13 @@ async def task_augmented_elicitation_callback( task_metadata: TaskMetadata, ) -> CreateTaskResult | ErrorData: task = await store.create_task(task_metadata) - created_task_id[0] = task.taskId + created_task_id[0] = task.task_id async def do_elicitation() -> None: # Simulate user providing elicitation response result = ElicitResult(action="accept", content={"name": "Test User"}) - await store.store_result(task.taskId, result) - await store.update_task(task.taskId, status="completed") + await store.store_result(task.task_id, result) + await store.update_task(task.task_id, status="completed") elicitation_completed.set() assert background_tg[0] is not None @@ -531,24 +527,24 @@ async def get_task_handler( context: RequestContext[ClientSession, None], params: GetTaskRequestParams, ) -> GetTaskResult | ErrorData: - task = await store.get_task(params.taskId) - assert task is not None, f"Test setup error: task {params.taskId} should exist" + task = await store.get_task(params.task_id) + assert task is not None, f"Test setup error: task {params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) async def get_task_result_handler( context: RequestContext[ClientSession, None], params: GetTaskPayloadRequestParams, ) -> GetTaskPayloadResult | ErrorData: - result = await store.get_result(params.taskId) - assert result is not None, f"Test setup error: result for {params.taskId} should exist" + result = await store.get_result(params.task_id) + assert result is not None, f"Test setup error: result for {params.task_id} should exist" assert isinstance(result, ElicitResult) return GetTaskPayloadResult(**result.model_dump()) @@ -579,7 +575,7 @@ async def run_client() -> None: typed_request = ElicitRequest( params=ElicitRequestFormParams( message="What is your name?", - requestedSchema={"type": "object", "properties": {"name": {"type": "string"}}}, + requested_schema={"type": "object", "properties": {"name": {"type": "string"}}}, task=TaskMetadata(ttl=60000), ) ) @@ -588,47 +584,47 @@ async def run_client() -> None: id="req-elicit", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) # Step 2: Client responds with CreateTaskResult response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCResponse) task_result = CreateTaskResult.model_validate(response.result) - task_id = task_result.task.taskId + task_id = task_result.task.task_id assert task_id == created_task_id[0] # Step 3: Wait for background elicitation await elicitation_completed.wait() # Step 4: Server polls task status - typed_poll = GetTaskRequest(params=GetTaskRequestParams(taskId=task_id)) + typed_poll = GetTaskRequest(params=GetTaskRequestParams(task_id=task_id)) poll_request = types.JSONRPCRequest( jsonrpc="2.0", id="req-poll", **typed_poll.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(poll_request))) + await client_streams.server_send.send(SessionMessage(poll_request)) poll_response_msg = await client_streams.server_receive.receive() - poll_response = poll_response_msg.message.root + poll_response = poll_response_msg.message assert isinstance(poll_response, types.JSONRPCResponse) status = GetTaskResult.model_validate(poll_response.result) assert status.status == "completed" # Step 5: Server gets result - typed_result_req = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task_id)) + typed_result_req = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task_id)) result_request = types.JSONRPCRequest( jsonrpc="2.0", id="req-result", **typed_result_req.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(result_request))) + await client_streams.server_send.send(SessionMessage(result_request)) result_response_msg = await client_streams.server_receive.receive() - result_response = result_response_msg.message.root + result_response = result_response_msg.message assert isinstance(result_response, types.JSONRPCResponse) # Verify the elicitation result @@ -661,16 +657,16 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = GetTaskRequest(params=GetTaskRequestParams(taskId="nonexistent")) + typed_request = GetTaskRequest(params=GetTaskRequestParams(task_id="nonexistent")) request = types.JSONRPCRequest( jsonrpc="2.0", id="req-unhandled", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert ( "not supported" in response.error.message.lower() @@ -700,16 +696,16 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId="nonexistent")) + typed_request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id="nonexistent")) request = types.JSONRPCRequest( jsonrpc="2.0", id="req-result", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert "not supported" in response.error.message.lower() @@ -742,10 +738,10 @@ async def run_client() -> None: id="req-list", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert "not supported" in response.error.message.lower() @@ -772,16 +768,16 @@ async def run_client() -> None: tg.start_soon(run_client) await client_ready.wait() - typed_request = CancelTaskRequest(params=CancelTaskRequestParams(taskId="nonexistent")) + typed_request = CancelTaskRequest(params=CancelTaskRequestParams(task_id="nonexistent")) request = types.JSONRPCRequest( jsonrpc="2.0", id="req-cancel", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert "not supported" in response.error.message.lower() @@ -813,7 +809,7 @@ async def run_client() -> None: typed_request = CreateMessageRequest( params=CreateMessageRequestParams( messages=[SamplingMessage(role="user", content=TextContent(type="text", text="Hello"))], - maxTokens=100, + max_tokens=100, task=TaskMetadata(ttl=60000), ) ) @@ -822,10 +818,10 @@ async def run_client() -> None: id="req-sampling", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert "not supported" in response.error.message.lower() @@ -859,7 +855,7 @@ async def run_client() -> None: typed_request = ElicitRequest( params=ElicitRequestFormParams( message="What is your name?", - requestedSchema={"type": "object", "properties": {"name": {"type": "string"}}}, + requested_schema={"type": "object", "properties": {"name": {"type": "string"}}}, task=TaskMetadata(ttl=60000), ) ) @@ -868,10 +864,10 @@ async def run_client() -> None: id="req-elicit", **typed_request.model_dump(by_alias=True), ) - await client_streams.server_send.send(SessionMessage(types.JSONRPCMessage(request))) + await client_streams.server_send.send(SessionMessage(request)) response_msg = await client_streams.server_receive.receive() - response = response_msg.message.root + response = response_msg.message assert isinstance(response, types.JSONRPCError) assert "not supported" in response.error.message.lower() diff --git a/tests/experimental/tasks/client/test_poll_task.py b/tests/experimental/tasks/client/test_poll_task.py index 8275dc668e..5e3158d955 100644 --- a/tests/experimental/tasks/client/test_poll_task.py +++ b/tests/experimental/tasks/client/test_poll_task.py @@ -20,13 +20,13 @@ def make_task_result( """Create GetTaskResult with sensible defaults.""" now = datetime.now(timezone.utc) return GetTaskResult( - taskId=task_id, + task_id=task_id, status=status, - statusMessage=status_message, - createdAt=now, - lastUpdatedAt=now, + status_message=status_message, + created_at=now, + last_updated_at=now, ttl=60000, - pollInterval=poll_interval, + poll_interval=poll_interval, ) @@ -117,5 +117,5 @@ async def mock_get_task(task_id: str) -> GetTaskResult: assert len(results) == 1 assert results[0].status == "completed" - assert results[0].statusMessage == "All done!" - assert results[0].taskId == "test-task" + assert results[0].status_message == "All done!" + assert results[0].task_id == "test-task" diff --git a/tests/experimental/tasks/client/test_tasks.py b/tests/experimental/tasks/client/test_tasks.py index 24c8891def..f21abf4d0f 100644 --- a/tests/experimental/tasks/client/test_tasks.py +++ b/tests/experimental/tasks/client/test_tasks.py @@ -23,7 +23,6 @@ CallToolResult, CancelTaskRequest, CancelTaskResult, - ClientRequest, ClientResult, CreateTaskResult, GetTaskPayloadRequest, @@ -58,7 +57,7 @@ async def test_session_experimental_get_task() -> None: @server.list_tools() async def list_tools(): - return [Tool(name="test_tool", description="Test", inputSchema={"type": "object"})] + return [Tool(name="test_tool", description="Test", input_schema={"type": "object"})] @server.call_tool() async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent] | CreateTaskResult: @@ -70,10 +69,10 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon task = await app.store.create_task(task_metadata) done_event = Event() - app.task_done_events[task.taskId] = done_event + app.task_done_events[task.task_id] = done_event async def do_work(): - async with task_execution(task.taskId, app.store) as task_ctx: + async with task_execution(task.task_id, app.store) as task_ctx: await task_ctx.complete(CallToolResult(content=[TextContent(type="text", text="Done")])) done_event.set() @@ -85,16 +84,16 @@ async def do_work(): @server.experimental.get_task() async def handle_get_task(request: GetTaskRequest) -> GetTaskResult: app = server.request_context.lifespan_context - task = await app.store.get_task(request.params.taskId) - assert task is not None, f"Test setup error: task {request.params.taskId} should exist" + task = await app.store.get_task(request.params.task_id) + assert task is not None, f"Test setup error: task {request.params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) # Set up streams @@ -134,18 +133,16 @@ async def run_server(app_context: AppContext): # Create a task create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="test_tool", - arguments={}, - task=TaskMetadata(ttl=60000), - ) + CallToolRequest( + params=CallToolRequestParams( + name="test_tool", + arguments={}, + task=TaskMetadata(ttl=60000), ) ), CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id # Wait for task to complete await app_context.task_done_events[task_id].wait() @@ -153,7 +150,7 @@ async def run_server(app_context: AppContext): # Use session.experimental to get task status task_status = await client_session.experimental.get_task(task_id) - assert task_status.taskId == task_id + assert task_status.task_id == task_id assert task_status.status == "completed" tg.cancel_scope.cancel() @@ -167,7 +164,7 @@ async def test_session_experimental_get_task_result() -> None: @server.list_tools() async def list_tools(): - return [Tool(name="test_tool", description="Test", inputSchema={"type": "object"})] + return [Tool(name="test_tool", description="Test", input_schema={"type": "object"})] @server.call_tool() async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent] | CreateTaskResult: @@ -179,10 +176,10 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon task = await app.store.create_task(task_metadata) done_event = Event() - app.task_done_events[task.taskId] = done_event + app.task_done_events[task.task_id] = done_event async def do_work(): - async with task_execution(task.taskId, app.store) as task_ctx: + async with task_execution(task.task_id, app.store) as task_ctx: await task_ctx.complete( CallToolResult(content=[TextContent(type="text", text="Task result content")]) ) @@ -198,8 +195,8 @@ async def handle_get_task_result( request: GetTaskPayloadRequest, ) -> GetTaskPayloadResult: app = server.request_context.lifespan_context - result = await app.store.get_result(request.params.taskId) - assert result is not None, f"Test setup error: result for {request.params.taskId} should exist" + result = await app.store.get_result(request.params.task_id) + assert result is not None, f"Test setup error: result for {request.params.task_id} should exist" assert isinstance(result, CallToolResult) return GetTaskPayloadResult(**result.model_dump()) @@ -240,18 +237,16 @@ async def run_server(app_context: AppContext): # Create a task create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="test_tool", - arguments={}, - task=TaskMetadata(ttl=60000), - ) + CallToolRequest( + params=CallToolRequestParams( + name="test_tool", + arguments={}, + task=TaskMetadata(ttl=60000), ) ), CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id # Wait for task to complete await app_context.task_done_events[task_id].wait() @@ -275,7 +270,7 @@ async def test_session_experimental_list_tasks() -> None: @server.list_tools() async def list_tools(): - return [Tool(name="test_tool", description="Test", inputSchema={"type": "object"})] + return [Tool(name="test_tool", description="Test", input_schema={"type": "object"})] @server.call_tool() async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent] | CreateTaskResult: @@ -287,10 +282,10 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon task = await app.store.create_task(task_metadata) done_event = Event() - app.task_done_events[task.taskId] = done_event + app.task_done_events[task.task_id] = done_event async def do_work(): - async with task_execution(task.taskId, app.store) as task_ctx: + async with task_execution(task.task_id, app.store) as task_ctx: await task_ctx.complete(CallToolResult(content=[TextContent(type="text", text="Done")])) done_event.set() @@ -303,7 +298,7 @@ async def do_work(): async def handle_list_tasks(request: ListTasksRequest) -> ListTasksResult: app = server.request_context.lifespan_context tasks_list, next_cursor = await app.store.list_tasks(cursor=request.params.cursor if request.params else None) - return ListTasksResult(tasks=tasks_list, nextCursor=next_cursor) + return ListTasksResult(tasks=tasks_list, next_cursor=next_cursor) # Set up streams server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[SessionMessage](10) @@ -343,18 +338,16 @@ async def run_server(app_context: AppContext): # Create two tasks for _ in range(2): create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="test_tool", - arguments={}, - task=TaskMetadata(ttl=60000), - ) + CallToolRequest( + params=CallToolRequestParams( + name="test_tool", + arguments={}, + task=TaskMetadata(ttl=60000), ) ), CreateTaskResult, ) - await app_context.task_done_events[create_result.task.taskId].wait() + await app_context.task_done_events[create_result.task.task_id].wait() # Use TaskClient to list tasks list_result = await client_session.experimental.list_tasks() @@ -372,7 +365,7 @@ async def test_session_experimental_cancel_task() -> None: @server.list_tools() async def list_tools(): - return [Tool(name="test_tool", description="Test", inputSchema={"type": "object"})] + return [Tool(name="test_tool", description="Test", input_schema={"type": "object"})] @server.call_tool() async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextContent] | CreateTaskResult: @@ -390,32 +383,32 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon @server.experimental.get_task() async def handle_get_task(request: GetTaskRequest) -> GetTaskResult: app = server.request_context.lifespan_context - task = await app.store.get_task(request.params.taskId) - assert task is not None, f"Test setup error: task {request.params.taskId} should exist" + task = await app.store.get_task(request.params.task_id) + assert task is not None, f"Test setup error: task {request.params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) @server.experimental.cancel_task() async def handle_cancel_task(request: CancelTaskRequest) -> CancelTaskResult: app = server.request_context.lifespan_context - task = await app.store.get_task(request.params.taskId) - assert task is not None, f"Test setup error: task {request.params.taskId} should exist" - await app.store.update_task(request.params.taskId, status="cancelled") + task = await app.store.get_task(request.params.task_id) + assert task is not None, f"Test setup error: task {request.params.task_id} should exist" + await app.store.update_task(request.params.task_id, status="cancelled") # CancelTaskResult extends Task, so we need to return the updated task info - updated_task = await app.store.get_task(request.params.taskId) + updated_task = await app.store.get_task(request.params.task_id) assert updated_task is not None return CancelTaskResult( - taskId=updated_task.taskId, + task_id=updated_task.task_id, status=updated_task.status, - createdAt=updated_task.createdAt, - lastUpdatedAt=updated_task.lastUpdatedAt, + created_at=updated_task.created_at, + last_updated_at=updated_task.last_updated_at, ttl=updated_task.ttl, ) @@ -456,18 +449,16 @@ async def run_server(app_context: AppContext): # Create a task (but don't complete it) create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="test_tool", - arguments={}, - task=TaskMetadata(ttl=60000), - ) + CallToolRequest( + params=CallToolRequestParams( + name="test_tool", + arguments={}, + task=TaskMetadata(ttl=60000), ) ), CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id # Verify task is working status_before = await client_session.experimental.get_task(task_id) diff --git a/tests/experimental/tasks/server/test_context.py b/tests/experimental/tasks/server/test_context.py index 2f09ff1540..a0f1a190d2 100644 --- a/tests/experimental/tasks/server/test_context.py +++ b/tests/experimental/tasks/server/test_context.py @@ -15,8 +15,8 @@ async def test_task_context_properties() -> None: task = await store.create_task(metadata=TaskMetadata(ttl=60000)) ctx = TaskContext(task, store) - assert ctx.task_id == task.taskId - assert ctx.task.taskId == task.taskId + assert ctx.task_id == task.task_id + assert ctx.task.task_id == task.task_id assert ctx.task.status == "working" assert ctx.is_cancelled is False @@ -33,9 +33,9 @@ async def test_task_context_update_status() -> None: await ctx.update_status("Processing step 1...") # Check status message was updated - updated = await store.get_task(task.taskId) + updated = await store.get_task(task.task_id) assert updated is not None - assert updated.statusMessage == "Processing step 1..." + assert updated.status_message == "Processing step 1..." store.cleanup() @@ -51,12 +51,12 @@ async def test_task_context_complete() -> None: await ctx.complete(result) # Check task status - updated = await store.get_task(task.taskId) + updated = await store.get_task(task.task_id) assert updated is not None assert updated.status == "completed" # Check result is stored - stored_result = await store.get_result(task.taskId) + stored_result = await store.get_result(task.task_id) assert stored_result is not None store.cleanup() @@ -72,10 +72,10 @@ async def test_task_context_fail() -> None: await ctx.fail("Something went wrong!") # Check task status - updated = await store.get_task(task.taskId) + updated = await store.get_task(task.task_id) assert updated is not None assert updated.status == "failed" - assert updated.statusMessage == "Something went wrong!" + assert updated.status_message == "Something went wrong!" store.cleanup() @@ -101,13 +101,13 @@ def test_create_task_state_generates_id() -> None: task1 = create_task_state(TaskMetadata(ttl=60000)) task2 = create_task_state(TaskMetadata(ttl=60000)) - assert task1.taskId != task2.taskId + assert task1.task_id != task2.task_id def test_create_task_state_uses_provided_id() -> None: """create_task_state uses the provided task ID.""" task = create_task_state(TaskMetadata(ttl=60000), task_id="my-task-123") - assert task.taskId == "my-task-123" + assert task.task_id == "my-task-123" def test_create_task_state_null_ttl() -> None: @@ -119,7 +119,7 @@ def test_create_task_state_null_ttl() -> None: def test_create_task_state_has_created_at() -> None: """create_task_state sets createdAt timestamp.""" task = create_task_state(TaskMetadata(ttl=60000)) - assert task.createdAt is not None + assert task.created_at is not None @pytest.mark.anyio @@ -148,7 +148,7 @@ async def test_task_execution_auto_fails_on_exception() -> None: failed_task = await store.get_task("exec-fail-1") assert failed_task is not None assert failed_task.status == "failed" - assert "Oops!" in (failed_task.statusMessage or "") + assert "Oops!" in (failed_task.status_message or "") store.cleanup() diff --git a/tests/experimental/tasks/server/test_integration.py b/tests/experimental/tasks/server/test_integration.py index ba61dfcead..41cecc1295 100644 --- a/tests/experimental/tasks/server/test_integration.py +++ b/tests/experimental/tasks/server/test_integration.py @@ -30,7 +30,6 @@ CallToolRequest, CallToolRequestParams, CallToolResult, - ClientRequest, ClientResult, CreateTaskResult, GetTaskPayloadRequest, @@ -62,8 +61,7 @@ class AppContext: @pytest.mark.anyio async def test_task_lifecycle_with_task_execution() -> None: - """ - Test the complete task lifecycle using the task_execution pattern. + """Test the complete task lifecycle using the task_execution pattern. This demonstrates the recommended way to implement task-augmented tools: 1. Create task in store @@ -81,11 +79,11 @@ async def list_tools(): Tool( name="process_data", description="Process data asynchronously", - inputSchema={ + input_schema={ "type": "object", "properties": {"input": {"type": "string"}}, }, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -101,11 +99,11 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon # 2. Create event to signal completion (for testing) done_event = Event() - app.task_done_events[task.taskId] = done_event + app.task_done_events[task.task_id] = done_event # 3. Define work function using task_execution for safety async def do_work(): - async with task_execution(task.taskId, app.store) as task_ctx: + async with task_execution(task.task_id, app.store) as task_ctx: await task_ctx.update_status("Processing input...") # Simulate work input_value = arguments.get("input", "") @@ -126,16 +124,16 @@ async def do_work(): @server.experimental.get_task() async def handle_get_task(request: GetTaskRequest) -> GetTaskResult: app = server.request_context.lifespan_context - task = await app.store.get_task(request.params.taskId) - assert task is not None, f"Test setup error: task {request.params.taskId} should exist" + task = await app.store.get_task(request.params.task_id) + assert task is not None, f"Test setup error: task {request.params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) @server.experimental.get_task_result() @@ -143,8 +141,8 @@ async def handle_get_task_result( request: GetTaskPayloadRequest, ) -> GetTaskPayloadResult: app = server.request_context.lifespan_context - result = await app.store.get_result(request.params.taskId) - assert result is not None, f"Test setup error: result for {request.params.taskId} should exist" + result = await app.store.get_result(request.params.task_id) + assert result is not None, f"Test setup error: result for {request.params.task_id} should exist" assert isinstance(result, CallToolResult) # Return as GetTaskPayloadResult (which accepts extra fields) return GetTaskPayloadResult(**result.model_dump()) @@ -191,36 +189,34 @@ async def run_server(app_context: AppContext): # === Step 1: Send task-augmented tool call === create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="process_data", - arguments={"input": "hello world"}, - task=TaskMetadata(ttl=60000), - ), - ) + CallToolRequest( + params=CallToolRequestParams( + name="process_data", + arguments={"input": "hello world"}, + task=TaskMetadata(ttl=60000), + ), ), CreateTaskResult, ) assert isinstance(create_result, CreateTaskResult) assert create_result.task.status == "working" - task_id = create_result.task.taskId + task_id = create_result.task.task_id # === Step 2: Wait for task to complete === await app_context.task_done_events[task_id].wait() task_status = await client_session.send_request( - ClientRequest(GetTaskRequest(params=GetTaskRequestParams(taskId=task_id))), + GetTaskRequest(params=GetTaskRequestParams(task_id=task_id)), GetTaskResult, ) - assert task_status.taskId == task_id + assert task_status.task_id == task_id assert task_status.status == "completed" # === Step 3: Retrieve the actual result === task_result = await client_session.send_request( - ClientRequest(GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task_id))), + GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task_id)), CallToolResult, ) @@ -245,7 +241,7 @@ async def list_tools(): Tool( name="failing_task", description="A task that fails", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ) ] @@ -260,10 +256,10 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> list[TextCon # Create event to signal completion (for testing) done_event = Event() - app.task_done_events[task.taskId] = done_event + app.task_done_events[task.task_id] = done_event async def do_failing_work(): - async with task_execution(task.taskId, app.store) as task_ctx: + async with task_execution(task.task_id, app.store) as task_ctx: await task_ctx.update_status("About to fail...") raise RuntimeError("Something went wrong!") # Note: complete() is never called, but task_execution @@ -279,16 +275,16 @@ async def do_failing_work(): @server.experimental.get_task() async def handle_get_task(request: GetTaskRequest) -> GetTaskResult: app = server.request_context.lifespan_context - task = await app.store.get_task(request.params.taskId) - assert task is not None, f"Test setup error: task {request.params.taskId} should exist" + task = await app.store.get_task(request.params.task_id) + assert task is not None, f"Test setup error: task {request.params.task_id} should exist" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=task.pollInterval, + poll_interval=task.poll_interval, ) # Set up streams @@ -328,30 +324,27 @@ async def run_server(app_context: AppContext): # Send task request create_result = await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="failing_task", - arguments={}, - task=TaskMetadata(ttl=60000), - ), - ) + CallToolRequest( + params=CallToolRequestParams( + name="failing_task", + arguments={}, + task=TaskMetadata(ttl=60000), + ), ), CreateTaskResult, ) - task_id = create_result.task.taskId + task_id = create_result.task.task_id # Wait for task to complete (even though it fails) await app_context.task_done_events[task_id].wait() # Check that task was auto-failed task_status = await client_session.send_request( - ClientRequest(GetTaskRequest(params=GetTaskRequestParams(taskId=task_id))), - GetTaskResult, + GetTaskRequest(params=GetTaskRequestParams(task_id=task_id)), GetTaskResult ) assert task_status.status == "failed" - assert task_status.statusMessage == "Something went wrong!" + assert task_status.status_message == "Something went wrong!" tg.cancel_scope.cancel() diff --git a/tests/experimental/tasks/server/test_run_task_flow.py b/tests/experimental/tasks/server/test_run_task_flow.py index 7f680beb66..13c702a1cf 100644 --- a/tests/experimental/tasks/server/test_run_task_flow.py +++ b/tests/experimental/tasks/server/test_run_task_flow.py @@ -1,5 +1,4 @@ -""" -Tests for the simplified task API: enable_tasks() + run_task() +"""Tests for the simplified task API: enable_tasks() + run_task() This tests the recommended user flow: 1. server.experimental.enable_tasks() - one-line setup @@ -45,8 +44,7 @@ @pytest.mark.anyio async def test_run_task_basic_flow() -> None: - """ - Test the basic run_task flow without elicitation. + """Test the basic run_task flow without elicitation. 1. enable_tasks() sets up handlers 2. Client calls tool with task field @@ -69,8 +67,8 @@ async def list_tools() -> list[Tool]: Tool( name="simple_task", description="A simple task", - inputSchema={"type": "object", "properties": {"input": {"type": "string"}}}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object", "properties": {"input": {"type": "string"}}}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -119,7 +117,7 @@ async def run_client() -> None: ) # Should get CreateTaskResult - task_id = result.task.taskId + task_id = result.task.task_id assert result.task.status == "working" # Wait for work to complete @@ -143,9 +141,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_run_task_auto_fails_on_exception() -> None: - """ - Test that run_task automatically fails the task when work raises. - """ + """Test that run_task automatically fails the task when work raises.""" server = Server("test-run-task-fail") server.experimental.enable_tasks() @@ -157,8 +153,8 @@ async def list_tools() -> list[Tool]: Tool( name="failing_task", description="A task that fails", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -188,7 +184,7 @@ async def run_client() -> None: await client_session.initialize() result = await client_session.experimental.call_tool_as_task("failing_task", {}) - task_id = result.task.taskId + task_id = result.task.task_id # Wait for work to fail with anyio.fail_after(5): @@ -201,7 +197,7 @@ async def run_client() -> None: if task_status.status == "failed": # pragma: no branch break - assert "Something went wrong" in (task_status.statusMessage or "") + assert "Something went wrong" in (task_status.status_message or "") async with anyio.create_task_group() as tg: tg.start_soon(run_server) @@ -210,9 +206,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_enable_tasks_auto_registers_handlers() -> None: - """ - Test that enable_tasks() auto-registers get_task, list_tasks, cancel_task handlers. - """ + """Test that enable_tasks() auto-registers get_task, list_tasks, cancel_task handlers.""" server = Server("test-enable-tasks") # Before enable_tasks, no task capabilities @@ -227,6 +221,10 @@ async def test_enable_tasks_auto_registers_handlers() -> None: assert caps_after.tasks is not None assert caps_after.tasks.list is not None assert caps_after.tasks.cancel is not None + # Verify nested call capability is present + assert caps_after.tasks.requests is not None + assert caps_after.tasks.requests.tools is not None + assert caps_after.tasks.requests.tools.call is not None @pytest.mark.anyio @@ -359,8 +357,8 @@ async def list_tools() -> list[Tool]: Tool( name="task_with_immediate", description="A task with immediate response", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -418,8 +416,8 @@ async def list_tools() -> list[Tool]: Tool( name="manual_complete_task", description="A task that manually completes", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -453,7 +451,7 @@ async def run_client() -> None: await client_session.initialize() result = await client_session.experimental.call_tool_as_task("manual_complete_task", {}) - task_id = result.task.taskId + task_id = result.task.task_id with anyio.fail_after(5): await work_completed.wait() @@ -484,8 +482,8 @@ async def list_tools() -> list[Tool]: Tool( name="manual_cancel_task", description="A task that manually cancels then raises", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -518,7 +516,7 @@ async def run_client() -> None: await client_session.initialize() result = await client_session.experimental.call_tool_as_task("manual_cancel_task", {}) - task_id = result.task.taskId + task_id = result.task.task_id with anyio.fail_after(5): await work_completed.wait() @@ -531,7 +529,7 @@ async def run_client() -> None: break # Task should still be failed (from manual fail, not auto-fail from exception) - assert status.statusMessage == "Manually failed" # Not "This error should not change status" + assert status.status_message == "Manually failed" # Not "This error should not change status" async with anyio.create_task_group() as tg: tg.start_soon(run_server) diff --git a/tests/experimental/tasks/server/test_server.py b/tests/experimental/tasks/server/test_server.py index 7209ed412a..cb8b737f28 100644 --- a/tests/experimental/tasks/server/test_server.py +++ b/tests/experimental/tasks/server/test_server.py @@ -26,7 +26,6 @@ CancelTaskRequest, CancelTaskRequestParams, CancelTaskResult, - ClientRequest, ClientResult, ErrorData, GetTaskPayloadRequest, @@ -36,7 +35,6 @@ GetTaskRequestParams, GetTaskResult, JSONRPCError, - JSONRPCMessage, JSONRPCNotification, JSONRPCResponse, ListTasksRequest, @@ -64,20 +62,20 @@ async def test_list_tasks_handler() -> None: now = datetime.now(timezone.utc) test_tasks = [ Task( - taskId="task-1", + task_id="task-1", status="working", - createdAt=now, - lastUpdatedAt=now, + created_at=now, + last_updated_at=now, ttl=60000, - pollInterval=1000, + poll_interval=1000, ), Task( - taskId="task-2", + task_id="task-2", status="completed", - createdAt=now, - lastUpdatedAt=now, + created_at=now, + last_updated_at=now, ttl=60000, - pollInterval=1000, + poll_interval=1000, ), ] @@ -90,10 +88,10 @@ async def handle_list_tasks(request: ListTasksRequest) -> ListTasksResult: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListTasksResult) - assert len(result.root.tasks) == 2 - assert result.root.tasks[0].taskId == "task-1" - assert result.root.tasks[1].taskId == "task-2" + assert isinstance(result, ListTasksResult) + assert len(result.tasks) == 2 + assert result.tasks[0].task_id == "task-1" + assert result.tasks[1].task_id == "task-2" @pytest.mark.anyio @@ -105,25 +103,25 @@ async def test_get_task_handler() -> None: async def handle_get_task(request: GetTaskRequest) -> GetTaskResult: now = datetime.now(timezone.utc) return GetTaskResult( - taskId=request.params.taskId, + task_id=request.params.task_id, status="working", - createdAt=now, - lastUpdatedAt=now, + created_at=now, + last_updated_at=now, ttl=60000, - pollInterval=1000, + poll_interval=1000, ) handler = server.request_handlers[GetTaskRequest] request = GetTaskRequest( method="tasks/get", - params=GetTaskRequestParams(taskId="test-task-123"), + params=GetTaskRequestParams(task_id="test-task-123"), ) result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, GetTaskResult) - assert result.root.taskId == "test-task-123" - assert result.root.status == "working" + assert isinstance(result, GetTaskResult) + assert result.task_id == "test-task-123" + assert result.status == "working" @pytest.mark.anyio @@ -138,12 +136,12 @@ async def handle_get_task_result(request: GetTaskPayloadRequest) -> GetTaskPaylo handler = server.request_handlers[GetTaskPayloadRequest] request = GetTaskPayloadRequest( method="tasks/result", - params=GetTaskPayloadRequestParams(taskId="test-task-123"), + params=GetTaskPayloadRequestParams(task_id="test-task-123"), ) result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, GetTaskPayloadResult) + assert isinstance(result, GetTaskPayloadResult) @pytest.mark.anyio @@ -155,24 +153,24 @@ async def test_cancel_task_handler() -> None: async def handle_cancel_task(request: CancelTaskRequest) -> CancelTaskResult: now = datetime.now(timezone.utc) return CancelTaskResult( - taskId=request.params.taskId, + task_id=request.params.task_id, status="cancelled", - createdAt=now, - lastUpdatedAt=now, + created_at=now, + last_updated_at=now, ttl=60000, ) handler = server.request_handlers[CancelTaskRequest] request = CancelTaskRequest( method="tasks/cancel", - params=CancelTaskRequestParams(taskId="test-task-123"), + params=CancelTaskRequestParams(task_id="test-task-123"), ) result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, CancelTaskResult) - assert result.root.taskId == "test-task-123" - assert result.root.status == "cancelled" + assert isinstance(result, CancelTaskResult) + assert result.task_id == "test-task-123" + assert result.status == "cancelled" @pytest.mark.anyio @@ -232,20 +230,20 @@ async def list_tools(): Tool( name="quick_tool", description="Fast tool", - inputSchema={"type": "object", "properties": {}}, - execution=ToolExecution(taskSupport=TASK_FORBIDDEN), + input_schema={"type": "object", "properties": {}}, + execution=ToolExecution(task_support=TASK_FORBIDDEN), ), Tool( name="long_tool", description="Long running tool", - inputSchema={"type": "object", "properties": {}}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object", "properties": {}}, + execution=ToolExecution(task_support=TASK_REQUIRED), ), Tool( name="flexible_tool", description="Can be either", - inputSchema={"type": "object", "properties": {}}, - execution=ToolExecution(taskSupport=TASK_OPTIONAL), + input_schema={"type": "object", "properties": {}}, + execution=ToolExecution(task_support=TASK_OPTIONAL), ), ] @@ -254,15 +252,15 @@ async def list_tools(): result = await tools_handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListToolsResult) - tools = result.root.tools + assert isinstance(result, ListToolsResult) + tools = result.tools assert tools[0].execution is not None - assert tools[0].execution.taskSupport == TASK_FORBIDDEN + assert tools[0].execution.task_support == TASK_FORBIDDEN assert tools[1].execution is not None - assert tools[1].execution.taskSupport == TASK_REQUIRED + assert tools[1].execution.task_support == TASK_REQUIRED assert tools[2].execution is not None - assert tools[2].execution.taskSupport == TASK_OPTIONAL + assert tools[2].execution.task_support == TASK_OPTIONAL @pytest.mark.anyio @@ -277,8 +275,8 @@ async def list_tools(): Tool( name="long_task", description="A long running task", - inputSchema={"type": "object", "properties": {}}, - execution=ToolExecution(taskSupport="optional"), + input_schema={"type": "object", "properties": {}}, + execution=ToolExecution(task_support="optional"), ) ] @@ -312,7 +310,8 @@ async def run_server(): async with anyio.create_task_group() as tg: async def handle_messages(): - async for message in server_session.incoming_messages: + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + async for message in server_session.incoming_messages: # pragma: no cover await server._handle_message(message, server_session, {}, False) tg.start_soon(handle_messages) @@ -330,14 +329,12 @@ async def handle_messages(): # Call tool with task metadata await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="long_task", - arguments={}, - task=TaskMetadata(ttl=60000), - ), - ) + CallToolRequest( + params=CallToolRequestParams( + name="long_task", + arguments={}, + task=TaskMetadata(ttl=60000), + ), ), CallToolResult, ) @@ -360,7 +357,7 @@ async def list_tools(): Tool( name="test_tool", description="Test tool", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ) ] @@ -391,8 +388,8 @@ async def run_server(): ), ) as server_session: async with anyio.create_task_group() as tg: - - async def handle_messages(): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + async def handle_messages(): # pragma: no cover async for message in server_session.incoming_messages: await server._handle_message(message, server_session, {}, False) @@ -411,24 +408,14 @@ async def handle_messages(): # Call without task metadata await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams(name="test_tool", arguments={}), - ) - ), + CallToolRequest(params=CallToolRequestParams(name="test_tool", arguments={})), CallToolResult, ) # Call with task metadata await client_session.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams( - name="test_tool", - arguments={}, - task=TaskMetadata(ttl=60000), - ), - ) + CallToolRequest( + params=CallToolRequestParams(name="test_tool", arguments={}, task=TaskMetadata(ttl=60000)), ), CallToolResult, ) @@ -507,38 +494,35 @@ async def run_server() -> None: task = await store.create_task(TaskMetadata(ttl=60000)) # Test list_tasks (default handler) - list_result = await client_session.send_request( - ClientRequest(ListTasksRequest()), - ListTasksResult, - ) + list_result = await client_session.send_request(ListTasksRequest(), ListTasksResult) assert len(list_result.tasks) == 1 - assert list_result.tasks[0].taskId == task.taskId + assert list_result.tasks[0].task_id == task.task_id # Test get_task (default handler - found) get_result = await client_session.send_request( - ClientRequest(GetTaskRequest(params=GetTaskRequestParams(taskId=task.taskId))), + GetTaskRequest(params=GetTaskRequestParams(task_id=task.task_id)), GetTaskResult, ) - assert get_result.taskId == task.taskId + assert get_result.task_id == task.task_id assert get_result.status == "working" # Test get_task (default handler - not found path) with pytest.raises(McpError, match="not found"): await client_session.send_request( - ClientRequest(GetTaskRequest(params=GetTaskRequestParams(taskId="nonexistent-task"))), + GetTaskRequest(params=GetTaskRequestParams(task_id="nonexistent-task")), GetTaskResult, ) # Create a completed task to test get_task_result completed_task = await store.create_task(TaskMetadata(ttl=60000)) await store.store_result( - completed_task.taskId, CallToolResult(content=[TextContent(type="text", text="Test result")]) + completed_task.task_id, CallToolResult(content=[TextContent(type="text", text="Test result")]) ) - await store.update_task(completed_task.taskId, status="completed") + await store.update_task(completed_task.task_id, status="completed") # Test get_task_result (default handler) payload_result = await client_session.send_request( - ClientRequest(GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=completed_task.taskId))), + GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=completed_task.task_id)), GetTaskPayloadResult, ) # The result should have the related-task metadata @@ -547,10 +531,9 @@ async def run_server() -> None: # Test cancel_task (default handler) cancel_result = await client_session.send_request( - ClientRequest(CancelTaskRequest(params=CancelTaskRequestParams(taskId=task.taskId))), - CancelTaskResult, + CancelTaskRequest(params=CancelTaskRequestParams(task_id=task.task_id)), CancelTaskResult ) - assert cancel_result.taskId == task.taskId + assert cancel_result.task_id == task.task_id assert cancel_result.status == "cancelled" tg.cancel_scope.cancel() @@ -566,16 +549,12 @@ async def test_build_elicit_form_request() -> None: async with ServerSession( client_to_server_receive, server_to_client_send, - InitializationOptions( - server_name="test-server", - server_version="1.0.0", - capabilities=ServerCapabilities(), - ), + InitializationOptions(server_name="test-server", server_version="1.0.0", capabilities=ServerCapabilities()), ) as server_session: # Test without task_id request = server_session._build_elicit_form_request( message="Test message", - requestedSchema={"type": "object", "properties": {"answer": {"type": "string"}}}, + requested_schema={"type": "object", "properties": {"answer": {"type": "string"}}}, ) assert request.method == "elicitation/create" assert request.params is not None @@ -584,7 +563,7 @@ async def test_build_elicit_form_request() -> None: # Test with related_task_id (adds related-task metadata) request_with_task = server_session._build_elicit_form_request( message="Task message", - requestedSchema={"type": "object"}, + requested_schema={"type": "object"}, related_task_id="test-task-123", ) assert request_with_task.method == "elicitation/create" @@ -611,11 +590,7 @@ async def test_build_elicit_url_request() -> None: async with ServerSession( client_to_server_receive, server_to_client_send, - InitializationOptions( - server_name="test-server", - server_version="1.0.0", - capabilities=ServerCapabilities(), - ), + InitializationOptions(server_name="test-server", server_version="1.0.0", capabilities=ServerCapabilities()), ) as server_session: # Test without related_task_id request = server_session._build_elicit_url_request( @@ -721,7 +696,7 @@ async def test_send_message() -> None: # Create a test message notification = JSONRPCNotification(jsonrpc="2.0", method="test/notification") message = SessionMessage( - message=JSONRPCMessage(notification), + message=notification, metadata=ServerMessageMetadata(related_request_id="test-req-1"), ) @@ -730,8 +705,8 @@ async def test_send_message() -> None: # Verify it was sent to the stream received = await server_to_client_receive.receive() - assert isinstance(received.message.root, JSONRPCNotification) - assert received.message.root.method == "test/notification" + assert isinstance(received.message, JSONRPCNotification) + assert received.message.method == "test/notification" finally: # pragma: no cover await server_to_client_send.aclose() await server_to_client_receive.aclose() @@ -773,7 +748,7 @@ def route_error(self, request_id: str | int, error: ErrorData) -> bool: # Simulate receiving a response from client response = JSONRPCResponse(jsonrpc="2.0", id="test-req-1", result={"status": "ok"}) - message = SessionMessage(message=JSONRPCMessage(response)) + message = SessionMessage(message=response) # Send from "client" side await client_to_server_send.send(message) @@ -828,7 +803,7 @@ def route_error(self, request_id: str | int, error: ErrorData) -> bool: # Simulate receiving an error response from client error_data = ErrorData(code=INVALID_REQUEST, message="Test error") error_response = JSONRPCError(jsonrpc="2.0", id="test-req-2", error=error_data) - message = SessionMessage(message=JSONRPCMessage(error_response)) + message = SessionMessage(message=error_response) # Send from "client" side await client_to_server_send.send(message) @@ -891,7 +866,7 @@ def route_error(self, request_id: str | int, error: ErrorData) -> bool: # Send a response - should skip first router and be handled by second response = JSONRPCResponse(jsonrpc="2.0", id="test-req-1", result={"status": "ok"}) - message = SessionMessage(message=JSONRPCMessage(response)) + message = SessionMessage(message=response) await client_to_server_send.send(message) with anyio.fail_after(5): @@ -950,7 +925,7 @@ def route_error(self, request_id: str | int, error: ErrorData) -> bool: # Send an error - should skip first router and be handled by second error_data = ErrorData(code=INVALID_REQUEST, message="Test error") error_response = JSONRPCError(jsonrpc="2.0", id="test-req-2", error=error_data) - message = SessionMessage(message=JSONRPCMessage(error_response)) + message = SessionMessage(message=error_response) await client_to_server_send.send(message) with anyio.fail_after(5): diff --git a/tests/experimental/tasks/server/test_server_task_context.py b/tests/experimental/tasks/server/test_server_task_context.py index 3d6b16f482..0fe563a75c 100644 --- a/tests/experimental/tasks/server/test_server_task_context.py +++ b/tests/experimental/tasks/server/test_server_task_context.py @@ -45,7 +45,7 @@ async def test_server_task_context_properties() -> None: ) assert ctx.task_id == "test-123" - assert ctx.task.taskId == "test-123" + assert ctx.task.task_id == "test-123" assert ctx.is_cancelled is False store.cleanup() @@ -181,7 +181,7 @@ async def test_elicit_raises_when_client_lacks_capability() -> None: ) with pytest.raises(McpError) as exc_info: - await ctx.elicit(message="Test?", requestedSchema={"type": "object"}) + await ctx.elicit(message="Test?", requested_schema={"type": "object"}) assert "elicitation capability" in exc_info.value.error.message mock_session.check_client_capability.assert_called_once() @@ -232,7 +232,7 @@ async def test_elicit_raises_without_handler() -> None: ) with pytest.raises(RuntimeError, match="handler is required"): - await ctx.elicit(message="Test?", requestedSchema={"type": "object"}) + await ctx.elicit(message="Test?", requested_schema={"type": "object"}) store.cleanup() @@ -320,22 +320,22 @@ async def run_elicit() -> None: nonlocal elicit_result elicit_result = await ctx.elicit( message="Test?", - requestedSchema={"type": "object"}, + requested_schema={"type": "object"}, ) async with anyio.create_task_group() as tg: tg.start_soon(run_elicit) # Wait for request to be queued - await queue.wait_for_message(task.taskId) + await queue.wait_for_message(task.task_id) # Verify task is in input_required status - updated_task = await store.get_task(task.taskId) + updated_task = await store.get_task(task.task_id) assert updated_task is not None assert updated_task.status == "input_required" # Dequeue and simulate response - msg = await queue.dequeue(task.taskId) + msg = await queue.dequeue(task.task_id) assert msg is not None assert msg.resolver is not None @@ -348,7 +348,7 @@ async def run_elicit() -> None: assert elicit_result.content == {"name": "Alice"} # Verify task is back to working - final_task = await store.get_task(task.taskId) + final_task = await store.get_task(task.task_id) assert final_task is not None assert final_task.status == "working" @@ -396,15 +396,15 @@ async def run_elicit_url() -> None: tg.start_soon(run_elicit_url) # Wait for request to be queued - await queue.wait_for_message(task.taskId) + await queue.wait_for_message(task.task_id) # Verify task is in input_required status - updated_task = await store.get_task(task.taskId) + updated_task = await store.get_task(task.task_id) assert updated_task is not None assert updated_task.status == "input_required" # Dequeue and simulate response - msg = await queue.dequeue(task.taskId) + msg = await queue.dequeue(task.task_id) assert msg is not None assert msg.resolver is not None @@ -416,7 +416,7 @@ async def run_elicit_url() -> None: assert elicit_result.action == "accept" # Verify task is back to working - final_task = await store.get_task(task.taskId) + final_task = await store.get_task(task.task_id) assert final_task is not None assert final_task.status == "working" @@ -463,15 +463,15 @@ async def run_sampling() -> None: tg.start_soon(run_sampling) # Wait for request to be queued - await queue.wait_for_message(task.taskId) + await queue.wait_for_message(task.task_id) # Verify task is in input_required status - updated_task = await store.get_task(task.taskId) + updated_task = await store.get_task(task.task_id) assert updated_task is not None assert updated_task.status == "input_required" # Dequeue and simulate response - msg = await queue.dequeue(task.taskId) + msg = await queue.dequeue(task.task_id) assert msg is not None assert msg.resolver is not None @@ -491,7 +491,7 @@ async def run_sampling() -> None: assert sampling_result.model == "test-model" # Verify task is back to working - final_task = await store.get_task(task.taskId) + final_task = await store.get_task(task.task_id) assert final_task is not None assert final_task.status == "working" @@ -534,7 +534,7 @@ async def do_elicit() -> None: try: await ctx.elicit( message="Test?", - requestedSchema={"type": "object"}, + requested_schema={"type": "object"}, ) except anyio.get_cancelled_exc_class(): cancelled_error_raised = True @@ -543,15 +543,15 @@ async def do_elicit() -> None: tg.start_soon(do_elicit) # Wait for request to be queued - await queue.wait_for_message(task.taskId) + await queue.wait_for_message(task.task_id) # Verify task is in input_required status - updated_task = await store.get_task(task.taskId) + updated_task = await store.get_task(task.task_id) assert updated_task is not None assert updated_task.status == "input_required" # Get the queued message and set cancellation exception on its resolver - msg = await queue.dequeue(task.taskId) + msg = await queue.dequeue(task.task_id) assert msg is not None assert msg.resolver is not None @@ -559,7 +559,7 @@ async def do_elicit() -> None: msg.resolver.set_exception(asyncio.CancelledError()) # Verify task is back to working after cancellation - final_task = await store.get_task(task.taskId) + final_task = await store.get_task(task.task_id) assert final_task is not None assert final_task.status == "working" assert cancelled_error_raised @@ -612,15 +612,15 @@ async def do_sampling() -> None: tg.start_soon(do_sampling) # Wait for request to be queued - await queue.wait_for_message(task.taskId) + await queue.wait_for_message(task.task_id) # Verify task is in input_required status - updated_task = await store.get_task(task.taskId) + updated_task = await store.get_task(task.task_id) assert updated_task is not None assert updated_task.status == "input_required" # Get the queued message and set cancellation exception on its resolver - msg = await queue.dequeue(task.taskId) + msg = await queue.dequeue(task.task_id) assert msg is not None assert msg.resolver is not None @@ -628,7 +628,7 @@ async def do_sampling() -> None: msg.resolver.set_exception(asyncio.CancelledError()) # Verify task is back to working after cancellation - final_task = await store.get_task(task.taskId) + final_task = await store.get_task(task.task_id) assert final_task is not None assert final_task.status == "working" assert cancelled_error_raised @@ -646,7 +646,7 @@ async def test_elicit_as_task_raises_without_handler() -> None: # Create mock session with proper client capabilities mock_session = Mock() mock_session.client_params = InitializeRequestParams( - protocolVersion="2025-01-01", + protocol_version="2025-01-01", capabilities=ClientCapabilities( tasks=ClientTasksCapability( requests=ClientTasksRequestsCapability( @@ -654,7 +654,7 @@ async def test_elicit_as_task_raises_without_handler() -> None: ) ) ), - clientInfo=Implementation(name="test", version="1.0"), + client_info=Implementation(name="test", version="1.0"), ) ctx = ServerTaskContext( @@ -666,7 +666,7 @@ async def test_elicit_as_task_raises_without_handler() -> None: ) with pytest.raises(RuntimeError, match="handler is required for elicit_as_task"): - await ctx.elicit_as_task(message="Test?", requestedSchema={"type": "object"}) + await ctx.elicit_as_task(message="Test?", requested_schema={"type": "object"}) store.cleanup() @@ -681,15 +681,15 @@ async def test_create_message_as_task_raises_without_handler() -> None: # Create mock session with proper client capabilities mock_session = Mock() mock_session.client_params = InitializeRequestParams( - protocolVersion="2025-01-01", + protocol_version="2025-01-01", capabilities=ClientCapabilities( tasks=ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) ), - clientInfo=Implementation(name="test", version="1.0"), + client_info=Implementation(name="test", version="1.0"), ) ctx = ServerTaskContext( diff --git a/tests/experimental/tasks/server/test_store.py b/tests/experimental/tasks/server/test_store.py index 2eac31dfe6..d6f297e6c1 100644 --- a/tests/experimental/tasks/server/test_store.py +++ b/tests/experimental/tasks/server/test_store.py @@ -24,13 +24,13 @@ async def test_create_and_get(store: InMemoryTaskStore) -> None: """Test InMemoryTaskStore create and get operations.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - assert task.taskId is not None + assert task.task_id is not None assert task.status == "working" assert task.ttl == 60000 - retrieved = await store.get_task(task.taskId) + retrieved = await store.get_task(task.task_id) assert retrieved is not None - assert retrieved.taskId == task.taskId + assert retrieved.task_id == task.task_id assert retrieved.status == "working" @@ -42,12 +42,12 @@ async def test_create_with_custom_id(store: InMemoryTaskStore) -> None: task_id="my-custom-id", ) - assert task.taskId == "my-custom-id" + assert task.task_id == "my-custom-id" assert task.status == "working" retrieved = await store.get_task("my-custom-id") assert retrieved is not None - assert retrieved.taskId == "my-custom-id" + assert retrieved.task_id == "my-custom-id" @pytest.mark.anyio @@ -71,15 +71,15 @@ async def test_update_status(store: InMemoryTaskStore) -> None: """Test InMemoryTaskStore status updates.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - updated = await store.update_task(task.taskId, status="completed", status_message="All done!") + updated = await store.update_task(task.task_id, status="completed", status_message="All done!") assert updated.status == "completed" - assert updated.statusMessage == "All done!" + assert updated.status_message == "All done!" - retrieved = await store.get_task(task.taskId) + retrieved = await store.get_task(task.task_id) assert retrieved is not None assert retrieved.status == "completed" - assert retrieved.statusMessage == "All done!" + assert retrieved.status_message == "All done!" @pytest.mark.anyio @@ -96,10 +96,10 @@ async def test_store_and_get_result(store: InMemoryTaskStore) -> None: # Store result result = CallToolResult(content=[TextContent(type="text", text="Result data")]) - await store.store_result(task.taskId, result) + await store.store_result(task.task_id, result) # Retrieve result - retrieved_result = await store.get_result(task.taskId) + retrieved_result = await store.get_result(task.task_id) assert retrieved_result == result @@ -114,7 +114,7 @@ async def test_get_result_nonexistent_returns_none(store: InMemoryTaskStore) -> async def test_get_result_no_result_returns_none(store: InMemoryTaskStore) -> None: """Test that getting result when none stored returns None.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - result = await store.get_result(task.taskId) + result = await store.get_result(task.task_id) assert result is None @@ -172,14 +172,14 @@ async def test_delete_task(store: InMemoryTaskStore) -> None: """Test InMemoryTaskStore delete operation.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - deleted = await store.delete_task(task.taskId) + deleted = await store.delete_task(task.task_id) assert deleted is True - retrieved = await store.get_task(task.taskId) + retrieved = await store.get_task(task.task_id) assert retrieved is None # Delete non-existent - deleted = await store.delete_task(task.taskId) + deleted = await store.delete_task(task.task_id) assert deleted is False @@ -210,7 +210,7 @@ async def test_create_task_with_null_ttl(store: InMemoryTaskStore) -> None: assert task.ttl is None # Task should persist (not expire) - retrieved = await store.get_task(task.taskId) + retrieved = await store.get_task(task.task_id) assert retrieved is not None @@ -221,19 +221,19 @@ async def test_task_expiration_cleanup(store: InMemoryTaskStore) -> None: task = await store.create_task(metadata=TaskMetadata(ttl=1)) # 1ms TTL # Manually force the expiry to be in the past - stored = store._tasks.get(task.taskId) + stored = store._tasks.get(task.task_id) assert stored is not None stored.expires_at = datetime.now(timezone.utc) - timedelta(seconds=10) # Task should still exist in internal dict but be expired - assert task.taskId in store._tasks + assert task.task_id in store._tasks # Any access operation should clean up expired tasks # list_tasks triggers cleanup tasks, _ = await store.list_tasks() # Expired task should be cleaned up - assert task.taskId not in store._tasks + assert task.task_id not in store._tasks assert len(tasks) == 0 @@ -244,17 +244,17 @@ async def test_task_with_null_ttl_never_expires(store: InMemoryTaskStore) -> Non task = await store.create_task(metadata=TaskMetadata(ttl=None)) # Verify internal storage has no expiry - stored = store._tasks.get(task.taskId) + stored = store._tasks.get(task.task_id) assert stored is not None assert stored.expires_at is None # Access operations should NOT remove this task await store.list_tasks() - await store.get_task(task.taskId) + await store.get_task(task.task_id) # Task should still exist - assert task.taskId in store._tasks - retrieved = await store.get_task(task.taskId) + assert task.task_id in store._tasks + retrieved = await store.get_task(task.task_id) assert retrieved is not None @@ -265,13 +265,13 @@ async def test_terminal_task_ttl_reset(store: InMemoryTaskStore) -> None: task = await store.create_task(metadata=TaskMetadata(ttl=60000)) # 60s # Get the initial expiry - stored = store._tasks.get(task.taskId) + stored = store._tasks.get(task.task_id) assert stored is not None initial_expiry = stored.expires_at assert initial_expiry is not None # Update to terminal state (completed) - await store.update_task(task.taskId, status="completed") + await store.update_task(task.task_id, status="completed") # Expiry should be reset to a new time (from now + TTL) new_expiry = stored.expires_at @@ -291,16 +291,16 @@ async def test_terminal_status_transition_rejected(store: InMemoryTaskStore) -> task = await store.create_task(metadata=TaskMetadata(ttl=60000)) # Move to terminal state - await store.update_task(task.taskId, status=terminal_status) + await store.update_task(task.task_id, status=terminal_status) # Attempting to transition to any other status should raise with pytest.raises(ValueError, match="Cannot transition from terminal status"): - await store.update_task(task.taskId, status="working") + await store.update_task(task.task_id, status="working") # Also test transitioning to another terminal state other_terminal = "failed" if terminal_status != "failed" else "completed" with pytest.raises(ValueError, match="Cannot transition from terminal status"): - await store.update_task(task.taskId, status=other_terminal) + await store.update_task(task.task_id, status=other_terminal) @pytest.mark.anyio @@ -310,15 +310,15 @@ async def test_terminal_status_allows_same_status(store: InMemoryTaskStore) -> N This is not a transition, so it should be allowed (no-op). """ task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - await store.update_task(task.taskId, status="completed") + await store.update_task(task.task_id, status="completed") # Setting the same status should not raise - updated = await store.update_task(task.taskId, status="completed") + updated = await store.update_task(task.task_id, status="completed") assert updated.status == "completed" # Updating just the message should also work - updated = await store.update_task(task.taskId, status_message="Updated message") - assert updated.statusMessage == "Updated message" + updated = await store.update_task(task.task_id, status_message="Updated message") + assert updated.status_message == "Updated message" @pytest.mark.anyio @@ -334,13 +334,13 @@ async def test_cancel_task_succeeds_for_working_task(store: InMemoryTaskStore) - task = await store.create_task(metadata=TaskMetadata(ttl=60000)) assert task.status == "working" - result = await cancel_task(store, task.taskId) + result = await cancel_task(store, task.task_id) - assert result.taskId == task.taskId + assert result.task_id == task.task_id assert result.status == "cancelled" # Verify store is updated - retrieved = await store.get_task(task.taskId) + retrieved = await store.get_task(task.task_id) assert retrieved is not None assert retrieved.status == "cancelled" @@ -359,10 +359,10 @@ async def test_cancel_task_rejects_nonexistent_task(store: InMemoryTaskStore) -> async def test_cancel_task_rejects_completed_task(store: InMemoryTaskStore) -> None: """Test cancel_task raises McpError with INVALID_PARAMS for completed task.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - await store.update_task(task.taskId, status="completed") + await store.update_task(task.task_id, status="completed") with pytest.raises(McpError) as exc_info: - await cancel_task(store, task.taskId) + await cancel_task(store, task.task_id) assert exc_info.value.error.code == INVALID_PARAMS assert "terminal state 'completed'" in exc_info.value.error.message @@ -372,10 +372,10 @@ async def test_cancel_task_rejects_completed_task(store: InMemoryTaskStore) -> N async def test_cancel_task_rejects_failed_task(store: InMemoryTaskStore) -> None: """Test cancel_task raises McpError with INVALID_PARAMS for failed task.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - await store.update_task(task.taskId, status="failed") + await store.update_task(task.task_id, status="failed") with pytest.raises(McpError) as exc_info: - await cancel_task(store, task.taskId) + await cancel_task(store, task.task_id) assert exc_info.value.error.code == INVALID_PARAMS assert "terminal state 'failed'" in exc_info.value.error.message @@ -385,10 +385,10 @@ async def test_cancel_task_rejects_failed_task(store: InMemoryTaskStore) -> None async def test_cancel_task_rejects_already_cancelled_task(store: InMemoryTaskStore) -> None: """Test cancel_task raises McpError with INVALID_PARAMS for already cancelled task.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - await store.update_task(task.taskId, status="cancelled") + await store.update_task(task.task_id, status="cancelled") with pytest.raises(McpError) as exc_info: - await cancel_task(store, task.taskId) + await cancel_task(store, task.task_id) assert exc_info.value.error.code == INVALID_PARAMS assert "terminal state 'cancelled'" in exc_info.value.error.message @@ -398,9 +398,9 @@ async def test_cancel_task_rejects_already_cancelled_task(store: InMemoryTaskSto async def test_cancel_task_succeeds_for_input_required_task(store: InMemoryTaskStore) -> None: """Test cancel_task helper succeeds for a task in input_required status.""" task = await store.create_task(metadata=TaskMetadata(ttl=60000)) - await store.update_task(task.taskId, status="input_required") + await store.update_task(task.task_id, status="input_required") - result = await cancel_task(store, task.taskId) + result = await cancel_task(store, task.task_id) - assert result.taskId == task.taskId + assert result.task_id == task.task_id assert result.status == "cancelled" diff --git a/tests/experimental/tasks/server/test_task_result_handler.py b/tests/experimental/tasks/server/test_task_result_handler.py index db5b9edc70..ed6c296b73 100644 --- a/tests/experimental/tasks/server/test_task_result_handler.py +++ b/tests/experimental/tasks/server/test_task_result_handler.py @@ -53,13 +53,13 @@ async def test_handle_returns_result_for_completed_task( """Test that handle() returns the stored result for a completed task.""" task = await store.create_task(TaskMetadata(ttl=60000), task_id="test-task") result = CallToolResult(content=[TextContent(type="text", text="Done!")]) - await store.store_result(task.taskId, result) - await store.update_task(task.taskId, status="completed") + await store.store_result(task.task_id, result) + await store.update_task(task.task_id, status="completed") mock_session = Mock() mock_session.send_message = AsyncMock() - request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task.taskId)) + request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task.task_id)) response = await handler.handle(request, mock_session, "req-1") assert response is not None @@ -73,7 +73,7 @@ async def test_handle_raises_for_nonexistent_task( ) -> None: """Test that handle() raises McpError for nonexistent task.""" mock_session = Mock() - request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId="nonexistent")) + request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id="nonexistent")) with pytest.raises(McpError) as exc_info: await handler.handle(request, mock_session, "req-1") @@ -87,12 +87,12 @@ async def test_handle_returns_empty_result_when_no_result_stored( ) -> None: """Test that handle() returns minimal result when task completed without stored result.""" task = await store.create_task(TaskMetadata(ttl=60000), task_id="test-task") - await store.update_task(task.taskId, status="completed") + await store.update_task(task.task_id, status="completed") mock_session = Mock() mock_session.send_message = AsyncMock() - request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task.taskId)) + request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task.task_id)) response = await handler.handle(request, mock_session, "req-1") assert response is not None @@ -116,8 +116,8 @@ async def test_handle_delivers_queued_messages( params={}, ), ) - await queue.enqueue(task.taskId, queued_msg) - await store.update_task(task.taskId, status="completed") + await queue.enqueue(task.task_id, queued_msg) + await store.update_task(task.task_id, status="completed") sent_messages: list[SessionMessage] = [] @@ -127,7 +127,7 @@ async def track_send(msg: SessionMessage) -> None: mock_session = Mock() mock_session.send_message = track_send - request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task.taskId)) + request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task.task_id)) await handler.handle(request, mock_session, "req-1") assert len(sent_messages) == 1 @@ -143,7 +143,7 @@ async def test_handle_waits_for_task_completion( mock_session = Mock() mock_session.send_message = AsyncMock() - request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(taskId=task.taskId)) + request = GetTaskPayloadRequest(params=GetTaskPayloadRequestParams(task_id=task.task_id)) result_holder: list[GetTaskPayloadResult | None] = [None] async def run_handle() -> None: @@ -153,11 +153,11 @@ async def run_handle() -> None: tg.start_soon(run_handle) # Wait for handler to start waiting (event gets created when wait starts) - while task.taskId not in store._update_events: + while task.task_id not in store._update_events: await anyio.sleep(0) - await store.store_result(task.taskId, CallToolResult(content=[TextContent(type="text", text="Done")])) - await store.update_task(task.taskId, status="completed") + await store.store_result(task.task_id, CallToolResult(content=[TextContent(type="text", text="Done")])) + await store.update_task(task.task_id, status="completed") assert result_holder[0] is not None @@ -248,12 +248,12 @@ async def test_deliver_registers_resolver_for_request_messages( resolver=resolver, original_request_id="inner-req-1", ) - await queue.enqueue(task.taskId, queued_msg) + await queue.enqueue(task.task_id, queued_msg) mock_session = Mock() mock_session.send_message = AsyncMock() - await handler._deliver_queued_messages(task.taskId, mock_session, "outer-req-1") + await handler._deliver_queued_messages(task.task_id, mock_session, "outer-req-1") assert "inner-req-1" in handler._pending_requests assert handler._pending_requests["inner-req-1"] is resolver @@ -278,12 +278,12 @@ async def test_deliver_skips_resolver_registration_when_no_original_id( resolver=resolver, original_request_id=None, # No original request ID ) - await queue.enqueue(task.taskId, queued_msg) + await queue.enqueue(task.task_id, queued_msg) mock_session = Mock() mock_session.send_message = AsyncMock() - await handler._deliver_queued_messages(task.taskId, mock_session, "outer-req-1") + await handler._deliver_queued_messages(task.task_id, mock_session, "outer-req-1") # Resolver should NOT be registered since original_request_id is None assert len(handler._pending_requests) == 0 @@ -307,10 +307,10 @@ async def failing_wait(task_id: str) -> None: # Queue a message to unblock the race via the queue path async def enqueue_later() -> None: # Wait for queue to start waiting (event gets created when wait starts) - while task.taskId not in queue._events: + while task.task_id not in queue._events: await anyio.sleep(0) await queue.enqueue( - task.taskId, + task.task_id, QueuedMessage( type="notification", message=JSONRPCRequest( @@ -325,7 +325,7 @@ async def enqueue_later() -> None: async with anyio.create_task_group() as tg: tg.start_soon(enqueue_later) # This should complete via the queue path even though store raises - await handler._wait_for_task_update(task.taskId) + await handler._wait_for_task_update(task.task_id) @pytest.mark.anyio @@ -344,11 +344,11 @@ async def failing_wait(task_id: str) -> None: # Update the store to unblock the race via the store path async def update_later() -> None: # Wait for store to start waiting (event gets created when wait starts) - while task.taskId not in store._update_events: + while task.task_id not in store._update_events: await anyio.sleep(0) - await store.update_task(task.taskId, status="completed") + await store.update_task(task.task_id, status="completed") async with anyio.create_task_group() as tg: tg.start_soon(update_later) # This should complete via the store path even though queue raises - await handler._wait_for_task_update(task.taskId) + await handler._wait_for_task_update(task.task_id) diff --git a/tests/experimental/tasks/test_capabilities.py b/tests/experimental/tasks/test_capabilities.py index e78f16fe3f..4298ebdebb 100644 --- a/tests/experimental/tasks/test_capabilities.py +++ b/tests/experimental/tasks/test_capabilities.py @@ -82,7 +82,7 @@ def test_sampling_create_message_required_but_client_missing(self) -> None: """When sampling.createMessage is required but client doesn't have it.""" required = ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) client = ClientTasksCapability( @@ -96,12 +96,12 @@ def test_sampling_create_message_present(self) -> None: """When sampling.createMessage is required and client has it.""" required = ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) client = ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) assert check_tasks_capability(required, client) is True @@ -111,13 +111,13 @@ def test_both_elicitation_and_sampling_present(self) -> None: required = ClientTasksCapability( requests=ClientTasksRequestsCapability( elicitation=TasksElicitationCapability(create=TasksCreateElicitationCapability()), - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()), + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()), ) ) client = ClientTasksCapability( requests=ClientTasksRequestsCapability( elicitation=TasksElicitationCapability(create=TasksCreateElicitationCapability()), - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()), + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()), ) ) assert check_tasks_capability(required, client) is True @@ -145,7 +145,7 @@ def test_sampling_without_create_message_required(self) -> None: ) client = ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) assert check_tasks_capability(required, client) is True @@ -220,7 +220,7 @@ def test_create_message_present(self) -> None: caps = ClientCapabilities( tasks=ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) ) @@ -276,7 +276,7 @@ def test_passes_when_present(self) -> None: caps = ClientCapabilities( tasks=ClientTasksCapability( requests=ClientTasksRequestsCapability( - sampling=TasksSamplingCapability(createMessage=TasksCreateMessageCapability()) + sampling=TasksSamplingCapability(create_message=TasksCreateMessageCapability()) ) ) ) diff --git a/tests/experimental/tasks/test_elicitation_scenarios.py b/tests/experimental/tasks/test_elicitation_scenarios.py index be2b616018..1cefe847da 100644 --- a/tests/experimental/tasks/test_elicitation_scenarios.py +++ b/tests/experimental/tasks/test_elicitation_scenarios.py @@ -1,5 +1,4 @@ -""" -Tests for the four elicitation scenarios with tasks. +"""Tests for the four elicitation scenarios with tasks. This tests all combinations of tool call types and elicitation types: 1. Normal tool call + Normal elicitation (session.elicit) @@ -61,13 +60,13 @@ async def handle_augmented_elicitation( """Handle task-augmented elicitation by creating a client-side task.""" elicit_received.set() task = await client_task_store.create_task(task_metadata) - task_complete_events[task.taskId] = Event() + task_complete_events[task.task_id] = Event() async def complete_task() -> None: # Store result before updating status to avoid race condition - await client_task_store.store_result(task.taskId, elicit_response) - await client_task_store.update_task(task.taskId, status="completed") - task_complete_events[task.taskId].set() + await client_task_store.store_result(task.task_id, elicit_response) + await client_task_store.update_task(task.task_id, status="completed") + task_complete_events[task.task_id].set() context.session._task_group.start_soon(complete_task) # pyright: ignore[reportPrivateUsage] return CreateTaskResult(task=task) @@ -77,16 +76,16 @@ async def handle_get_task( params: Any, ) -> GetTaskResult: """Handle tasks/get from server.""" - task = await client_task_store.get_task(params.taskId) - assert task is not None, f"Task not found: {params.taskId}" + task = await client_task_store.get_task(params.task_id) + assert task is not None, f"Task not found: {params.task_id}" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=100, + poll_interval=100, ) async def handle_get_task_result( @@ -94,11 +93,11 @@ async def handle_get_task_result( params: Any, ) -> GetTaskPayloadResult | ErrorData: """Handle tasks/result from server.""" - event = task_complete_events.get(params.taskId) - assert event is not None, f"No completion event for task: {params.taskId}" + event = task_complete_events.get(params.task_id) + assert event is not None, f"No completion event for task: {params.task_id}" await event.wait() - result = await client_task_store.get_result(params.taskId) - assert result is not None, f"Result not found for task: {params.taskId}" + result = await client_task_store.get_result(params.task_id) + assert result is not None, f"Result not found for task: {params.task_id}" return GetTaskPayloadResult.model_validate(result.model_dump(by_alias=True)) return ExperimentalTaskHandlers( @@ -129,13 +128,13 @@ async def handle_augmented_sampling( """Handle task-augmented sampling by creating a client-side task.""" sampling_received.set() task = await client_task_store.create_task(task_metadata) - task_complete_events[task.taskId] = Event() + task_complete_events[task.task_id] = Event() async def complete_task() -> None: # Store result before updating status to avoid race condition - await client_task_store.store_result(task.taskId, sampling_response) - await client_task_store.update_task(task.taskId, status="completed") - task_complete_events[task.taskId].set() + await client_task_store.store_result(task.task_id, sampling_response) + await client_task_store.update_task(task.task_id, status="completed") + task_complete_events[task.task_id].set() context.session._task_group.start_soon(complete_task) # pyright: ignore[reportPrivateUsage] return CreateTaskResult(task=task) @@ -145,16 +144,16 @@ async def handle_get_task( params: Any, ) -> GetTaskResult: """Handle tasks/get from server.""" - task = await client_task_store.get_task(params.taskId) - assert task is not None, f"Task not found: {params.taskId}" + task = await client_task_store.get_task(params.task_id) + assert task is not None, f"Task not found: {params.task_id}" return GetTaskResult( - taskId=task.taskId, + task_id=task.task_id, status=task.status, - statusMessage=task.statusMessage, - createdAt=task.createdAt, - lastUpdatedAt=task.lastUpdatedAt, + status_message=task.status_message, + created_at=task.created_at, + last_updated_at=task.last_updated_at, ttl=task.ttl, - pollInterval=100, + poll_interval=100, ) async def handle_get_task_result( @@ -162,11 +161,11 @@ async def handle_get_task_result( params: Any, ) -> GetTaskPayloadResult | ErrorData: """Handle tasks/result from server.""" - event = task_complete_events.get(params.taskId) - assert event is not None, f"No completion event for task: {params.taskId}" + event = task_complete_events.get(params.task_id) + assert event is not None, f"No completion event for task: {params.task_id}" await event.wait() - result = await client_task_store.get_result(params.taskId) - assert result is not None, f"Result not found for task: {params.taskId}" + result = await client_task_store.get_result(params.task_id) + assert result is not None, f"Result not found for task: {params.task_id}" return GetTaskPayloadResult.model_validate(result.model_dump(by_alias=True)) return ExperimentalTaskHandlers( @@ -178,8 +177,7 @@ async def handle_get_task_result( @pytest.mark.anyio async def test_scenario1_normal_tool_normal_elicitation() -> None: - """ - Scenario 1: Normal tool call with normal elicitation. + """Scenario 1: Normal tool call with normal elicitation. Server calls session.elicit() directly, client responds immediately. """ @@ -193,7 +191,7 @@ async def list_tools() -> list[Tool]: Tool( name="confirm_action", description="Confirm an action", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, ) ] @@ -204,7 +202,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> CallToolResu # Normal elicitation - expects immediate response result = await ctx.session.elicit( message="Please confirm the action", - requestedSchema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, + requested_schema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, ) confirmed = result.content.get("confirm", False) if result.content else False @@ -259,8 +257,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_scenario2_normal_tool_task_augmented_elicitation() -> None: - """ - Scenario 2: Normal tool call with task-augmented elicitation. + """Scenario 2: Normal tool call with task-augmented elicitation. Server calls session.experimental.elicit_as_task(), client creates a task for the elicitation and returns CreateTaskResult. Server polls client. @@ -278,7 +275,7 @@ async def list_tools() -> list[Tool]: Tool( name="confirm_action", description="Confirm an action", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, ) ] @@ -289,7 +286,7 @@ async def handle_call_tool(name: str, arguments: dict[str, Any]) -> CallToolResu # Task-augmented elicitation - server polls client result = await ctx.session.experimental.elicit_as_task( message="Please confirm the action", - requestedSchema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, + requested_schema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, ttl=60000, ) @@ -340,8 +337,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_scenario3_task_augmented_tool_normal_elicitation() -> None: - """ - Scenario 3: Task-augmented tool call with normal elicitation. + """Scenario 3: Task-augmented tool call with normal elicitation. Client calls tool as task. Inside the task, server uses task.elicit() which queues the request and delivers via tasks/result. @@ -358,8 +354,8 @@ async def list_tools() -> list[Tool]: Tool( name="confirm_action", description="Confirm an action", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -372,7 +368,7 @@ async def work(task: ServerTaskContext) -> CallToolResult: # Normal elicitation within task - queued and delivered via tasks/result result = await task.elicit( message="Please confirm the action", - requestedSchema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, + requested_schema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, ) confirmed = result.content.get("confirm", False) if result.content else False @@ -413,7 +409,7 @@ async def run_client() -> None: # Call tool as task create_result = await client_session.experimental.call_tool_as_task("confirm_action", {}) - task_id = create_result.task.taskId + task_id = create_result.task.task_id assert create_result.task.status == "working" # Poll until input_required, then call tasks/result @@ -442,8 +438,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_scenario4_task_augmented_tool_task_augmented_elicitation() -> None: - """ - Scenario 4: Task-augmented tool call with task-augmented elicitation. + """Scenario 4: Task-augmented tool call with task-augmented elicitation. Client calls tool as task. Inside the task, server uses task.elicit_as_task() which sends task-augmented elicitation. Client creates its own task for the @@ -472,8 +467,8 @@ async def list_tools() -> list[Tool]: Tool( name="confirm_action", description="Confirm an action", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -486,7 +481,7 @@ async def work(task: ServerTaskContext) -> CallToolResult: # Task-augmented elicitation within task - server polls client result = await task.elicit_as_task( message="Please confirm the action", - requestedSchema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, + requested_schema={"type": "object", "properties": {"confirm": {"type": "boolean"}}}, ttl=60000, ) @@ -522,7 +517,7 @@ async def run_client() -> None: # Call tool as task create_result = await client_session.experimental.call_tool_as_task("confirm_action", {}) - task_id = create_result.task.taskId + task_id = create_result.task.task_id assert create_result.task.status == "working" # Poll until input_required or terminal, then call tasks/result @@ -553,8 +548,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_scenario2_sampling_normal_tool_task_augmented_sampling() -> None: - """ - Scenario 2 for sampling: Normal tool call with task-augmented sampling. + """Scenario 2 for sampling: Normal tool call with task-augmented sampling. Server calls session.experimental.create_message_as_task(), client creates a task for the sampling and returns CreateTaskResult. Server polls client. @@ -572,7 +566,7 @@ async def list_tools() -> list[Tool]: Tool( name="generate_text", description="Generate text using sampling", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, ) ] @@ -636,8 +630,7 @@ async def run_client() -> None: @pytest.mark.anyio async def test_scenario4_sampling_task_augmented_tool_task_augmented_sampling() -> None: - """ - Scenario 4 for sampling: Task-augmented tool call with task-augmented sampling. + """Scenario 4 for sampling: Task-augmented tool call with task-augmented sampling. Client calls tool as task. Inside the task, server uses task.create_message_as_task() which sends task-augmented sampling. Client creates its own task for the sampling, @@ -658,8 +651,8 @@ async def list_tools() -> list[Tool]: Tool( name="generate_text", description="Generate text using sampling", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) ] @@ -710,7 +703,7 @@ async def run_client() -> None: # Call tool as task create_result = await client_session.experimental.call_tool_as_task("generate_text", {}) - task_id = create_result.task.taskId + task_id = create_result.task.task_id assert create_result.task.status == "working" # Poll until input_required or terminal diff --git a/tests/experimental/tasks/test_message_queue.py b/tests/experimental/tasks/test_message_queue.py index 86d6875cc4..a8517e535c 100644 --- a/tests/experimental/tasks/test_message_queue.py +++ b/tests/experimental/tasks/test_message_queue.py @@ -1,6 +1,4 @@ -""" -Tests for TaskMessageQueue and InMemoryTaskMessageQueue. -""" +"""Tests for TaskMessageQueue and InMemoryTaskMessageQueue.""" from datetime import datetime, timezone diff --git a/tests/experimental/tasks/test_request_context.py b/tests/experimental/tasks/test_request_context.py index 5fa5da81af..0c342d8340 100644 --- a/tests/experimental/tasks/test_request_context.py +++ b/tests/experimental/tasks/test_request_context.py @@ -108,8 +108,8 @@ def test_validate_for_tool_with_execution_required() -> None: tool = Tool( name="test", description="test", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_REQUIRED), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_REQUIRED), ) error = exp.validate_for_tool(tool, raise_error=False) assert error is not None @@ -121,7 +121,7 @@ def test_validate_for_tool_without_execution() -> None: tool = Tool( name="test", description="test", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, execution=None, ) error = exp.validate_for_tool(tool, raise_error=False) @@ -134,8 +134,8 @@ def test_validate_for_tool_optional_with_task() -> None: tool = Tool( name="test", description="test", - inputSchema={"type": "object"}, - execution=ToolExecution(taskSupport=TASK_OPTIONAL), + input_schema={"type": "object"}, + execution=ToolExecution(task_support=TASK_OPTIONAL), ) error = exp.validate_for_tool(tool, raise_error=False) assert error is None diff --git a/tests/experimental/tasks/test_spec_compliance.py b/tests/experimental/tasks/test_spec_compliance.py index 842bfa7e1f..d00ce40a45 100644 --- a/tests/experimental/tasks/test_spec_compliance.py +++ b/tests/experimental/tasks/test_spec_compliance.py @@ -1,5 +1,4 @@ -""" -Tasks Spec Compliance Tests +"""Tasks Spec Compliance Tests =========================== Test structure mirrors: https://modelcontextprotocol.io/specification/draft/basic/utilities/tasks.md @@ -72,8 +71,7 @@ async def handle_cancel(req: CancelTaskRequest) -> CancelTaskResult: def test_server_with_get_task_handler_declares_requests_tools_call_capability() -> None: - """ - Server with get_task handler declares tasks.requests.tools.call capability. + """Server with get_task handler declares tasks.requests.tools.call capability. (get_task is required for task-augmented tools/call support) """ server: Server = Server("test") @@ -141,8 +139,7 @@ async def handle_get(req: GetTaskRequest) -> GetTaskResult: class TestClientCapabilities: - """ - Clients declare: + """Clients declare: - tasks.list — supports listing operations - tasks.cancel — supports cancellation - tasks.requests.sampling.createMessage — task-augmented sampling @@ -155,8 +152,7 @@ def test_client_declares_tasks_capability(self) -> None: class TestToolLevelNegotiation: - """ - Tools in tools/list responses include execution.taskSupport with values: + """Tools in tools/list responses include execution.taskSupport with values: - Not present or "forbidden": No task augmentation allowed - "optional": Task augmentation allowed at requestor discretion - "required": Task augmentation is mandatory @@ -188,8 +184,7 @@ def test_tool_execution_task_required_accepts_task_augmented_call(self) -> None: class TestCapabilityNegotiation: - """ - Requestors SHOULD only augment requests with a task if the corresponding + """Requestors SHOULD only augment requests with a task if the corresponding capability has been declared by the receiver. Receivers that do not declare the task capability for a request type @@ -198,23 +193,20 @@ class TestCapabilityNegotiation: """ def test_receiver_without_capability_ignores_task_metadata(self) -> None: - """ - Receiver without task capability MUST process request normally, + """Receiver without task capability MUST process request normally, ignoring task-augmentation metadata. """ pytest.skip("TODO") def test_receiver_with_capability_may_require_task_augmentation(self) -> None: - """ - Receivers that declare task capability MAY return error (-32600) + """Receivers that declare task capability MAY return error (-32600) for non-task-augmented requests, requiring task augmentation. """ pytest.skip("TODO") class TestTaskStatusLifecycle: - """ - Tasks begin in working status and follow valid transitions: + """Tasks begin in working status and follow valid transitions: working → input_required → working → terminal working → terminal (directly) input_required → terminal (directly) @@ -271,8 +263,7 @@ def test_cancelled_is_terminal(self) -> None: class TestInputRequiredStatus: - """ - When a receiver needs information to proceed, it moves the task to input_required. + """When a receiver needs information to proceed, it moves the task to input_required. The requestor should call tasks/result to retrieve input requests. The task must include io.modelcontextprotocol/related-task metadata in associated requests. """ @@ -282,16 +273,14 @@ def test_input_required_status_retrievable_via_tasks_get(self) -> None: pytest.skip("TODO") def test_input_required_related_task_metadata_in_requests(self) -> None: - """ - Task MUST include io.modelcontextprotocol/related-task metadata + """Task MUST include io.modelcontextprotocol/related-task metadata in associated requests. """ pytest.skip("TODO") class TestCreatingTask: - """ - Request structure: + """Request structure: {"method": "tools/call", "params": {"name": "...", "arguments": {...}, "task": {"ttl": 60000}}} Response (CreateTaskResult): @@ -337,8 +326,7 @@ def test_receiver_may_override_requested_ttl(self) -> None: pytest.skip("TODO") def test_model_immediate_response_in_meta(self) -> None: - """ - Receiver MAY include io.modelcontextprotocol/model-immediate-response + """Receiver MAY include io.modelcontextprotocol/model-immediate-response in _meta to provide immediate response while task executes. """ # Verify the constant has the correct value per spec @@ -346,10 +334,10 @@ def test_model_immediate_response_in_meta(self) -> None: # CreateTaskResult can include model-immediate-response in _meta task = Task( - taskId="test-123", + task_id="test-123", status="working", - createdAt=TEST_DATETIME, - lastUpdatedAt=TEST_DATETIME, + created_at=TEST_DATETIME, + last_updated_at=TEST_DATETIME, ttl=60000, ) immediate_msg = "Task started, processing your request..." @@ -372,8 +360,7 @@ def test_model_immediate_response_in_meta(self) -> None: class TestGettingTaskStatus: - """ - Request: {"method": "tasks/get", "params": {"taskId": "..."}} + """Request: {"method": "tasks/get", "params": {"taskId": "..."}} Response: Returns full Task object with current status and pollInterval. """ @@ -399,8 +386,7 @@ def test_tasks_get_nonexistent_task_id_returns_error(self) -> None: class TestRetrievingResults: - """ - Request: {"method": "tasks/result", "params": {"taskId": "..."}} + """Request: {"method": "tasks/result", "params": {"taskId": "..."}} Response: The actual operation result structure (e.g., CallToolResult). This call blocks until terminal status. @@ -423,8 +409,7 @@ def test_tasks_result_includes_related_task_metadata(self) -> None: pytest.skip("TODO") def test_tasks_result_returns_error_for_failed_task(self) -> None: - """ - tasks/result returns the same error the underlying request + """tasks/result returns the same error the underlying request would have produced for failed tasks. """ pytest.skip("TODO") @@ -435,8 +420,7 @@ def test_tasks_result_invalid_task_id_returns_error(self) -> None: class TestListingTasks: - """ - Request: {"method": "tasks/list", "params": {"cursor": "optional"}} + """Request: {"method": "tasks/list", "params": {"cursor": "optional"}} Response: Array of tasks with pagination support via nextCursor. """ @@ -462,8 +446,7 @@ def test_tasks_list_invalid_cursor_returns_error(self) -> None: class TestCancellingTasks: - """ - Request: {"method": "tasks/cancel", "params": {"taskId": "..."}} + """Request: {"method": "tasks/cancel", "params": {"taskId": "..."}} Response: Returns the task object with status: "cancelled". """ @@ -493,8 +476,7 @@ def test_tasks_cancel_invalid_task_id_returns_error(self) -> None: class TestStatusNotifications: - """ - Receivers MAY send: {"method": "notifications/tasks/status", "params": {...}} + """Receivers MAY send: {"method": "notifications/tasks/status", "params": {...}} These are optional; requestors MUST NOT rely on them and SHOULD continue polling. """ @@ -512,8 +494,7 @@ def test_status_notification_contains_status(self) -> None: class TestTaskManagement: - """ - - Receivers generate unique task IDs as strings + """- Receivers generate unique task IDs as strings - Tasks must begin in working status - createdAt timestamps must be ISO 8601 formatted - Receivers may override requested ttl but must return actual value @@ -535,8 +516,7 @@ def test_receiver_may_delete_tasks_after_ttl(self) -> None: pytest.skip("TODO") def test_related_task_metadata_in_task_messages(self) -> None: - """ - All task-related messages MUST include io.modelcontextprotocol/related-task + """All task-related messages MUST include io.modelcontextprotocol/related-task in _meta. """ pytest.skip("TODO") @@ -555,8 +535,7 @@ def test_tasks_cancel_does_not_require_related_task_metadata(self) -> None: class TestResultHandling: - """ - - Receivers must return CreateTaskResult immediately upon accepting task-augmented requests + """- Receivers must return CreateTaskResult immediately upon accepting task-augmented requests - tasks/result must return exactly what the underlying request would return - tasks/result blocks for non-terminal tasks; must unblock upon reaching terminal status """ @@ -575,8 +554,7 @@ def test_tasks_result_for_tool_call_returns_call_tool_result(self) -> None: class TestProgressTracking: - """ - Task-augmented requests support progress notifications using the progressToken + """Task-augmented requests support progress notifications using the progressToken mechanism, which remains valid throughout the task lifetime. """ @@ -590,8 +568,7 @@ def test_progress_notifications_sent_during_task_execution(self) -> None: class TestProtocolErrors: - """ - Protocol Errors (JSON-RPC standard codes): + """Protocol Errors (JSON-RPC standard codes): - -32600 (Invalid request): Non-task requests to endpoint requiring task augmentation - -32602 (Invalid params): Invalid/nonexistent taskId, invalid cursor, cancel terminal task - -32603 (Internal error): Server-side execution failures @@ -623,8 +600,7 @@ def test_internal_error_for_server_failure(self) -> None: class TestTaskExecutionErrors: - """ - When underlying requests fail, the task moves to failed status. + """When underlying requests fail, the task moves to failed status. - tasks/get response should include statusMessage explaining failure - tasks/result returns same error the underlying request would have produced - For tool calls, isError: true moves task to failed status @@ -648,8 +624,7 @@ def test_tool_call_is_error_true_moves_to_failed(self) -> None: class TestTaskObject: - """ - Task Object fields: + """Task Object fields: - taskId: String identifier - status: Current execution state - statusMessage: Optional human-readable description @@ -684,8 +659,7 @@ def test_task_poll_interval_is_optional(self) -> None: class TestRelatedTaskMetadata: - """ - Related Task Metadata structure: + """Related Task Metadata structure: {"_meta": {"io.modelcontextprotocol/related-task": {"taskId": "..."}}} """ @@ -699,42 +673,34 @@ def test_related_task_metadata_contains_task_id(self) -> None: class TestAccessAndIsolation: - """ - - Task IDs enable access to sensitive results + """- Task IDs enable access to sensitive results - Authorization context binding is essential where available - For non-authorized environments: strong entropy IDs, strict TTL limits """ def test_task_bound_to_authorization_context(self) -> None: - """ - Receivers receiving authorization context MUST bind tasks to that context. - """ + """Receivers receiving authorization context MUST bind tasks to that context.""" pytest.skip("TODO") def test_reject_task_operations_outside_authorization_context(self) -> None: - """ - Receivers MUST reject task operations for tasks outside + """Receivers MUST reject task operations for tasks outside requestor's authorization context. """ pytest.skip("TODO") def test_non_authorized_environments_use_secure_ids(self) -> None: - """ - For non-authorized environments, receivers SHOULD use + """For non-authorized environments, receivers SHOULD use cryptographically secure IDs. """ pytest.skip("TODO") def test_non_authorized_environments_use_shorter_ttls(self) -> None: - """ - For non-authorized environments, receivers SHOULD use shorter TTLs. - """ + """For non-authorized environments, receivers SHOULD use shorter TTLs.""" pytest.skip("TODO") class TestResourceLimits: - """ - Receivers should: + """Receivers should: - Enforce concurrent task limits per requestor - Implement maximum TTL constraints - Clean up expired tasks promptly diff --git a/tests/issues/test_1027_win_unreachable_cleanup.py b/tests/issues/test_1027_win_unreachable_cleanup.py index 63d6dd8dcf..0c569edb2a 100644 --- a/tests/issues/test_1027_win_unreachable_cleanup.py +++ b/tests/issues/test_1027_win_unreachable_cleanup.py @@ -1,5 +1,4 @@ -""" -Regression test for issue #1027: Ensure cleanup procedures run properly during shutdown +"""Regression test for issue #1027: Ensure cleanup procedures run properly during shutdown Issue #1027 reported that cleanup code after "yield" in lifespan was unreachable when processes were terminated. This has been fixed by implementing the MCP spec-compliant @@ -12,25 +11,18 @@ import tempfile import textwrap from pathlib import Path -from typing import TYPE_CHECKING import anyio import pytest from mcp import ClientSession, StdioServerParameters from mcp.client.stdio import _create_platform_compatible_process, stdio_client - -# TODO(Marcelo): This doesn't seem to be the right path. We should fix this. -if TYPE_CHECKING: - from ..shared.test_win32_utils import escape_path_for_python -else: - from tests.shared.test_win32_utils import escape_path_for_python +from tests.shared.test_win32_utils import escape_path_for_python @pytest.mark.anyio async def test_lifespan_cleanup_executed(): - """ - Regression test ensuring MCP server cleanup code runs during shutdown. + """Regression test ensuring MCP server cleanup code runs during shutdown. This test verifies that the fix for issue #1027 works correctly by: 1. Starting an MCP server that writes a marker file on startup @@ -95,7 +87,7 @@ def echo(text: str) -> str: async with ClientSession(read, write) as session: # Initialize the session result = await session.initialize() - assert result.protocolVersion in ["2024-11-05", "2025-06-18", "2025-11-25"] + assert result.protocol_version in ["2024-11-05", "2025-06-18", "2025-11-25"] # Verify startup marker was created assert Path(startup_marker).exists(), "Server startup marker not created" @@ -130,8 +122,7 @@ def echo(text: str) -> str: @pytest.mark.anyio @pytest.mark.filterwarnings("ignore::ResourceWarning" if sys.platform == "win32" else "default") async def test_stdin_close_triggers_cleanup(): - """ - Regression test verifying the stdin-based graceful shutdown mechanism. + """Regression test verifying the stdin-based graceful shutdown mechanism. This test ensures the core fix for issue #1027 continues to work by: 1. Manually managing a server process diff --git a/tests/issues/test_129_resource_templates.py b/tests/issues/test_129_resource_templates.py index 958773d127..26b58343c3 100644 --- a/tests/issues/test_129_resource_templates.py +++ b/tests/issues/test_129_resource_templates.py @@ -26,17 +26,17 @@ def get_user_profile(user_id: str) -> str: # pragma: no cover result = await mcp._mcp_server.request_handlers[types.ListResourceTemplatesRequest]( types.ListResourceTemplatesRequest(params=None) ) - assert isinstance(result.root, types.ListResourceTemplatesResult) - templates = result.root.resourceTemplates + assert isinstance(result, types.ListResourceTemplatesResult) + templates = result.resource_templates # Verify we get both templates back assert len(templates) == 2 # Verify template details greeting_template = next(t for t in templates if t.name == "get_greeting") # pragma: no cover - assert greeting_template.uriTemplate == "greeting://{name}" + assert greeting_template.uri_template == "greeting://{name}" assert greeting_template.description == "Get a personalized greeting" profile_template = next(t for t in templates if t.name == "get_user_profile") # pragma: no cover - assert profile_template.uriTemplate == "users://{user_id}/profile" + assert profile_template.uri_template == "users://{user_id}/profile" assert profile_template.description == "Dynamic user data" diff --git a/tests/issues/test_1338_icons_and_metadata.py b/tests/issues/test_1338_icons_and_metadata.py index adc37f1c6e..41df47ee4f 100644 --- a/tests/issues/test_1338_icons_and_metadata.py +++ b/tests/issues/test_1338_icons_and_metadata.py @@ -14,7 +14,7 @@ async def test_icons_and_website_url(): # Create test icon test_icon = Icon( src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==", - mimeType="image/png", + mime_type="image/png", sizes=["1x1"], ) @@ -51,7 +51,7 @@ def test_resource_template(city: str) -> str: # pragma: no cover assert mcp.icons is not None assert len(mcp.icons) == 1 assert mcp.icons[0].src == test_icon.src - assert mcp.icons[0].mimeType == test_icon.mimeType + assert mcp.icons[0].mime_type == test_icon.mime_type assert mcp.icons[0].sizes == test_icon.sizes # Test tool includes icon @@ -86,7 +86,7 @@ def test_resource_template(city: str) -> str: # pragma: no cover assert len(templates) == 1 template = templates[0] assert template.name == "test_resource_template" - assert template.uriTemplate == "test://weather/{city}" + assert template.uri_template == "test://weather/{city}" assert template.icons is not None assert len(template.icons) == 1 assert template.icons[0].src == test_icon.src @@ -96,9 +96,9 @@ async def test_multiple_icons(): """Test that multiple icons can be added to tools, resources, and prompts.""" # Create multiple test icons - icon1 = Icon(src="data:image/png;base64,icon1", mimeType="image/png", sizes=["16x16"]) - icon2 = Icon(src="data:image/png;base64,icon2", mimeType="image/png", sizes=["32x32"]) - icon3 = Icon(src="data:image/png;base64,icon3", mimeType="image/png", sizes=["64x64"]) + icon1 = Icon(src="data:image/png;base64,icon1", mime_type="image/png", sizes=["16x16"]) + icon2 = Icon(src="data:image/png;base64,icon2", mime_type="image/png", sizes=["32x32"]) + icon3 = Icon(src="data:image/png;base64,icon3", mime_type="image/png", sizes=["64x64"]) mcp = FastMCP("MultiIconServer") diff --git a/tests/issues/test_1363_race_condition_streamable_http.py b/tests/issues/test_1363_race_condition_streamable_http.py index 49242d6d8b..caa6db46ec 100644 --- a/tests/issues/test_1363_race_condition_streamable_http.py +++ b/tests/issues/test_1363_race_condition_streamable_http.py @@ -90,8 +90,7 @@ def stop(self) -> None: def check_logs_for_race_condition_errors(caplog: pytest.LogCaptureFixture, test_name: str) -> None: - """ - Check logs for ClosedResourceError and other race condition errors. + """Check logs for ClosedResourceError and other race condition errors. Args: caplog: pytest log capture fixture @@ -121,8 +120,7 @@ def check_logs_for_race_condition_errors(caplog: pytest.LogCaptureFixture, test_ @pytest.mark.anyio async def test_race_condition_invalid_accept_headers(caplog: pytest.LogCaptureFixture): - """ - Test the race condition with invalid Accept headers. + """Test the race condition with invalid Accept headers. This test reproduces the exact scenario described in issue #1363: - Send POST request with incorrect Accept headers (missing either application/json or text/event-stream) @@ -196,8 +194,7 @@ async def test_race_condition_invalid_accept_headers(caplog: pytest.LogCaptureFi @pytest.mark.anyio async def test_race_condition_invalid_content_type(caplog: pytest.LogCaptureFixture): - """ - Test the race condition with invalid Content-Type headers. + """Test the race condition with invalid Content-Type headers. This test reproduces the race condition scenario with Content-Type validation failure. """ @@ -237,8 +234,7 @@ async def test_race_condition_invalid_content_type(caplog: pytest.LogCaptureFixt @pytest.mark.anyio async def test_race_condition_message_router_async_for(caplog: pytest.LogCaptureFixture): - """ - Uses json_response=True to trigger the `if self.is_json_response_enabled` branch, + """Uses json_response=True to trigger the `if self.is_json_response_enabled` branch, which reproduces the ClosedResourceError when message_router is suspended in async for loop while transport cleanup closes streams concurrently. """ diff --git a/tests/issues/test_141_resource_templates.py b/tests/issues/test_141_resource_templates.py index 0a0484d894..b024d8e923 100644 --- a/tests/issues/test_141_resource_templates.py +++ b/tests/issues/test_141_resource_templates.py @@ -1,10 +1,8 @@ import pytest from pydantic import AnyUrl +from mcp import Client from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) from mcp.types import ( ListResourceTemplatesResult, TextResourceContents, @@ -78,17 +76,14 @@ def get_user_post(user_id: str, post_id: str) -> str: def get_user_profile(user_id: str) -> str: return f"Profile for user {user_id}" - async with client_session(mcp._mcp_server) as session: - # Initialize the session - await session.initialize() - + async with Client(mcp) as session: # List available resources resources = await session.list_resource_templates() assert isinstance(resources, ListResourceTemplatesResult) - assert len(resources.resourceTemplates) == 2 + assert len(resources.resource_templates) == 2 # Verify resource templates are listed correctly - templates = [r.uriTemplate for r in resources.resourceTemplates] + templates = [r.uri_template for r in resources.resource_templates] assert "resource://users/{user_id}/posts/{post_id}" in templates assert "resource://users/{user_id}/profile" in templates @@ -97,14 +92,14 @@ def get_user_profile(user_id: str) -> str: contents = result.contents[0] assert isinstance(contents, TextResourceContents) assert contents.text == "Post 456 by user 123" - assert contents.mimeType == "text/plain" + assert contents.mime_type == "text/plain" # Read another resource with valid parameters result = await session.read_resource(AnyUrl("resource://users/789/profile")) contents = result.contents[0] assert isinstance(contents, TextResourceContents) assert contents.text == "Profile for user 789" - assert contents.mimeType == "text/plain" + assert contents.mime_type == "text/plain" # Verify invalid resource URIs raise appropriate errors with pytest.raises(Exception): # Specific exception type may vary diff --git a/tests/issues/test_152_resource_mime_type.py b/tests/issues/test_152_resource_mime_type.py index 2a8cd6202e..9618d8414a 100644 --- a/tests/issues/test_152_resource_mime_type.py +++ b/tests/issues/test_152_resource_mime_type.py @@ -3,13 +3,10 @@ import pytest from pydantic import AnyUrl -from mcp import types +from mcp import Client, types from mcp.server.fastmcp import FastMCP from mcp.server.lowlevel import Server from mcp.server.lowlevel.helper_types import ReadResourceContents -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) pytestmark = pytest.mark.anyio @@ -33,7 +30,7 @@ def get_image_as_bytes() -> bytes: return image_bytes # Test that resources are listed with correct mime type - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # List resources and verify mime types resources = await client.list_resources() assert resources.resources is not None @@ -45,19 +42,19 @@ def get_image_as_bytes() -> bytes: bytes_resource = mapping["test://image_bytes"] # Verify mime types - assert string_resource.mimeType == "image/png", "String resource mime type not respected" - assert bytes_resource.mimeType == "image/png", "Bytes resource mime type not respected" + assert string_resource.mime_type == "image/png", "String resource mime type not respected" + assert bytes_resource.mime_type == "image/png", "Bytes resource mime type not respected" # Also verify the content can be read correctly string_result = await client.read_resource(AnyUrl("test://image")) assert len(string_result.contents) == 1 assert getattr(string_result.contents[0], "text") == base64_string, "Base64 string mismatch" - assert string_result.contents[0].mimeType == "image/png", "String content mime type not preserved" + assert string_result.contents[0].mime_type == "image/png", "String content mime type not preserved" bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) assert len(bytes_result.contents) == 1 assert base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes, "Bytes mismatch" - assert bytes_result.contents[0].mimeType == "image/png", "Bytes content mime type not preserved" + assert bytes_result.contents[0].mime_type == "image/png", "Bytes content mime type not preserved" async def test_lowlevel_resource_mime_type(): @@ -70,11 +67,11 @@ async def test_lowlevel_resource_mime_type(): # Create test resources with specific mime types test_resources = [ - types.Resource(uri=AnyUrl("test://image"), name="test image", mimeType="image/png"), + types.Resource(uri="test://image", name="test image", mime_type="image/png"), types.Resource( - uri=AnyUrl("test://image_bytes"), + uri="test://image_bytes", name="test image bytes", - mimeType="image/png", + mime_type="image/png", ), ] @@ -83,7 +80,7 @@ async def handle_list_resources(): return test_resources @server.read_resource() - async def handle_read_resource(uri: AnyUrl): + async def handle_read_resource(uri: str): if str(uri) == "test://image": return [ReadResourceContents(content=base64_string, mime_type="image/png")] elif str(uri) == "test://image_bytes": @@ -91,7 +88,7 @@ async def handle_read_resource(uri: AnyUrl): raise Exception(f"Resource not found: {uri}") # pragma: no cover # Test that resources are listed with correct mime type - async with client_session(server) as client: + async with Client(server) as client: # List resources and verify mime types resources = await client.list_resources() assert resources.resources is not None @@ -103,16 +100,16 @@ async def handle_read_resource(uri: AnyUrl): bytes_resource = mapping["test://image_bytes"] # Verify mime types - assert string_resource.mimeType == "image/png", "String resource mime type not respected" - assert bytes_resource.mimeType == "image/png", "Bytes resource mime type not respected" + assert string_resource.mime_type == "image/png", "String resource mime type not respected" + assert bytes_resource.mime_type == "image/png", "Bytes resource mime type not respected" # Also verify the content can be read correctly string_result = await client.read_resource(AnyUrl("test://image")) assert len(string_result.contents) == 1 assert getattr(string_result.contents[0], "text") == base64_string, "Base64 string mismatch" - assert string_result.contents[0].mimeType == "image/png", "String content mime type not preserved" + assert string_result.contents[0].mime_type == "image/png", "String content mime type not preserved" bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) assert len(bytes_result.contents) == 1 assert base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes, "Bytes mismatch" - assert bytes_result.contents[0].mimeType == "image/png", "Bytes content mime type not preserved" + assert bytes_result.contents[0].mime_type == "image/png", "Bytes content mime type not preserved" diff --git a/tests/issues/test_1574_resource_uri_validation.py b/tests/issues/test_1574_resource_uri_validation.py new file mode 100644 index 0000000000..e6ff568774 --- /dev/null +++ b/tests/issues/test_1574_resource_uri_validation.py @@ -0,0 +1,129 @@ +"""Tests for issue #1574: Python SDK incorrectly validates Resource URIs. + +The Python SDK previously used Pydantic's AnyUrl for URI fields, which rejected +relative paths like 'users/me' that are valid according to the MCP spec and +accepted by the TypeScript SDK. + +The fix changed URI fields to plain strings to match the spec, which defines +uri fields as strings with no JSON Schema format validation. + +These tests verify the fix works end-to-end through the JSON-RPC protocol. +""" + +import pytest + +from mcp import Client, types +from mcp.server.lowlevel import Server +from mcp.server.lowlevel.helper_types import ReadResourceContents + +pytestmark = pytest.mark.anyio + + +async def test_relative_uri_roundtrip(): + """Relative URIs survive the full server-client JSON-RPC roundtrip. + + This is the critical regression test - if someone reintroduces AnyUrl, + the server would fail to serialize resources with relative URIs, + or the URI would be transformed during the roundtrip. + """ + server = Server("test") + + @server.list_resources() + async def list_resources(): + return [ + types.Resource(name="user", uri="users/me"), + types.Resource(name="config", uri="./config"), + types.Resource(name="parent", uri="../parent/resource"), + ] + + @server.read_resource() + async def read_resource(uri: str): + return [ + ReadResourceContents( + content=f"data for {uri}", + mime_type="text/plain", + ) + ] + + async with Client(server) as client: + # List should return the exact URIs we specified + resources = await client.list_resources() + uri_map = {r.uri: r for r in resources.resources} + + assert "users/me" in uri_map, f"Expected 'users/me' in {list(uri_map.keys())}" + assert "./config" in uri_map, f"Expected './config' in {list(uri_map.keys())}" + assert "../parent/resource" in uri_map, f"Expected '../parent/resource' in {list(uri_map.keys())}" + + # Read should work with each relative URI and preserve it in the response + for uri_str in ["users/me", "./config", "../parent/resource"]: + result = await client.read_resource(uri_str) + assert len(result.contents) == 1 + assert result.contents[0].uri == uri_str + + +async def test_custom_scheme_uri_roundtrip(): + """Custom scheme URIs work through the protocol. + + Some MCP servers use custom schemes like "custom://resource". + These should work end-to-end. + """ + server = Server("test") + + @server.list_resources() + async def list_resources(): + return [ + types.Resource(name="custom", uri="custom://my-resource"), + types.Resource(name="file", uri="file:///path/to/file"), + ] + + @server.read_resource() + async def read_resource(uri: str): + return [ReadResourceContents(content="data", mime_type="text/plain")] + + async with Client(server) as client: + resources = await client.list_resources() + uri_map = {r.uri: r for r in resources.resources} + + assert "custom://my-resource" in uri_map + assert "file:///path/to/file" in uri_map + + # Read with custom scheme + result = await client.read_resource("custom://my-resource") + assert len(result.contents) == 1 + + +def test_uri_json_roundtrip_preserves_value(): + """URI is preserved exactly through JSON serialization. + + This catches any Pydantic validation or normalization that would + alter the URI during the JSON-RPC message flow. + """ + test_uris = [ + "users/me", + "custom://resource", + "./relative", + "../parent", + "file:///absolute/path", + "https://example.com/path", + ] + + for uri_str in test_uris: + resource = types.Resource(name="test", uri=uri_str) + json_data = resource.model_dump(mode="json") + restored = types.Resource.model_validate(json_data) + assert restored.uri == uri_str, f"URI mutated: {uri_str} -> {restored.uri}" + + +def test_resource_contents_uri_json_roundtrip(): + """TextResourceContents URI is preserved through JSON serialization.""" + test_uris = ["users/me", "./relative", "custom://resource"] + + for uri_str in test_uris: + contents = types.TextResourceContents( + uri=uri_str, + text="data", + mime_type="text/plain", + ) + json_data = contents.model_dump(mode="json") + restored = types.TextResourceContents.model_validate(json_data) + assert restored.uri == uri_str, f"URI mutated: {uri_str} -> {restored.uri}" diff --git a/tests/issues/test_1754_mime_type_parameters.py b/tests/issues/test_1754_mime_type_parameters.py index cd8239ad2a..c48d56b810 100644 --- a/tests/issues/test_1754_mime_type_parameters.py +++ b/tests/issues/test_1754_mime_type_parameters.py @@ -7,10 +7,8 @@ import pytest from pydantic import AnyUrl +from mcp import Client from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) pytestmark = pytest.mark.anyio @@ -26,7 +24,7 @@ def widget() -> str: resources = await mcp.list_resources() assert len(resources) == 1 - assert resources[0].mimeType == "text/html;profile=mcp-app" + assert resources[0].mime_type == "text/html;profile=mcp-app" async def test_mime_type_with_parameters_and_space(): @@ -39,7 +37,7 @@ def data() -> str: resources = await mcp.list_resources() assert len(resources) == 1 - assert resources[0].mimeType == "application/json; charset=utf-8" + assert resources[0].mime_type == "application/json; charset=utf-8" async def test_mime_type_with_multiple_parameters(): @@ -52,7 +50,7 @@ def data() -> str: resources = await mcp.list_resources() assert len(resources) == 1 - assert resources[0].mimeType == "text/plain; charset=utf-8; format=fixed" + assert resources[0].mime_type == "text/plain; charset=utf-8; format=fixed" async def test_mime_type_preserved_in_read_resource(): @@ -63,8 +61,8 @@ async def test_mime_type_preserved_in_read_resource(): def my_widget() -> str: return "Hello MCP-UI" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Read the resource result = await client.read_resource(AnyUrl("ui://my-widget")) assert len(result.contents) == 1 - assert result.contents[0].mimeType == "text/html;profile=mcp-app" + assert result.contents[0].mime_type == "text/html;profile=mcp-app" diff --git a/tests/issues/test_176_progress_token.py b/tests/issues/test_176_progress_token.py index eb5f19d64c..07c3ce3976 100644 --- a/tests/issues/test_176_progress_token.py +++ b/tests/issues/test_176_progress_token.py @@ -17,7 +17,7 @@ async def test_progress_token_zero_first_call(): # Create request context with progress token 0 mock_meta = MagicMock() - mock_meta.progressToken = 0 # This is the key test case - token is 0 + mock_meta.progress_token = 0 # This is the key test case - token is 0 request_context = RequestContext( request_id="test-request", diff --git a/tests/issues/test_188_concurrency.py b/tests/issues/test_188_concurrency.py index 831736510b..615df3d8e8 100644 --- a/tests/issues/test_188_concurrency.py +++ b/tests/issues/test_188_concurrency.py @@ -2,8 +2,8 @@ import pytest from pydantic import AnyUrl +from mcp import Client from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import create_connected_server_and_client_session as create_session @pytest.mark.anyio @@ -30,7 +30,7 @@ async def trigger(): call_order.append("trigger_end") return "slow" - async with create_session(server._mcp_server) as client_session: + async with Client(server) as client_session: # First tool will wait on event, second will set it async with anyio.create_task_group() as tg: # Start the tool first (it will wait on event) @@ -70,7 +70,7 @@ async def slow_resource(): call_order.append("resource_end") return "slow" - async with create_session(server._mcp_server) as client_session: + async with Client(server) as client_session: # First tool will wait on event, second will set it async with anyio.create_task_group() as tg: # Start the tool first (it will wait on event) diff --git a/tests/issues/test_192_request_id.py b/tests/issues/test_192_request_id.py index 3762b092bd..de96dbe23a 100644 --- a/tests/issues/test_192_request_id.py +++ b/tests/issues/test_192_request_id.py @@ -59,14 +59,14 @@ async def run_server(): id="init-1", method="initialize", params=InitializeRequestParams( - protocolVersion=LATEST_PROTOCOL_VERSION, + protocol_version=LATEST_PROTOCOL_VERSION, capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="1.0.0"), + client_info=Implementation(name="test-client", version="1.0.0"), ).model_dump(by_alias=True, exclude_none=True), jsonrpc="2.0", ) - await client_writer.send(SessionMessage(JSONRPCMessage(root=init_req))) + await client_writer.send(SessionMessage(init_req)) response = await server_reader.receive() # Get init response but don't need to check it # Send initialized notification @@ -75,12 +75,12 @@ async def run_server(): params=NotificationParams().model_dump(by_alias=True, exclude_none=True), jsonrpc="2.0", ) - await client_writer.send(SessionMessage(JSONRPCMessage(root=initialized_notification))) + await client_writer.send(SessionMessage(initialized_notification)) # Send ping request with custom ID ping_request = JSONRPCRequest(id=custom_request_id, method="ping", params={}, jsonrpc="2.0") - await client_writer.send(SessionMessage(JSONRPCMessage(root=ping_request))) + await client_writer.send(SessionMessage(ping_request)) # Read response response = await server_reader.receive() @@ -88,8 +88,8 @@ async def run_server(): # Verify response ID matches request ID assert isinstance(response, SessionMessage) assert isinstance(response.message, JSONRPCMessage) - assert isinstance(response.message.root, JSONRPCResponse) - assert response.message.root.id == custom_request_id, "Response ID should match request ID" + assert isinstance(response.message, JSONRPCResponse) + assert response.message.id == custom_request_id, "Response ID should match request ID" # Cancel server task tg.cancel_scope.cancel() diff --git a/tests/issues/test_342_base64_encoding.py b/tests/issues/test_342_base64_encoding.py index da56959975..44b17d3372 100644 --- a/tests/issues/test_342_base64_encoding.py +++ b/tests/issues/test_342_base64_encoding.py @@ -13,7 +13,6 @@ from typing import cast import pytest -from pydantic import AnyUrl from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.lowlevel.server import Server @@ -46,7 +45,7 @@ async def test_server_base64_encoding_issue(): # Register a resource handler that returns our test data @server.read_resource() - async def read_resource(uri: AnyUrl) -> list[ReadResourceContents]: + async def read_resource(uri: str) -> list[ReadResourceContents]: return [ReadResourceContents(content=binary_data, mime_type="application/octet-stream")] # Get the handler directly from the server @@ -54,14 +53,14 @@ async def read_resource(uri: AnyUrl) -> list[ReadResourceContents]: # Create a request request = ReadResourceRequest( - params=ReadResourceRequestParams(uri=AnyUrl("test://resource")), + params=ReadResourceRequestParams(uri="test://resource"), ) # Call the handler to get the response result: ServerResult = await handler(request) # After (fixed code): - read_result: ReadResourceResult = cast(ReadResourceResult, result.root) + read_result: ReadResourceResult = cast(ReadResourceResult, result) blob_content = read_result.contents[0] # First verify our test data actually produces different encodings diff --git a/tests/issues/test_552_windows_hang.py b/tests/issues/test_552_windows_hang.py index 972659c2b7..1adb5d80cb 100644 --- a/tests/issues/test_552_windows_hang.py +++ b/tests/issues/test_552_windows_hang.py @@ -13,8 +13,7 @@ @pytest.mark.skipif(sys.platform != "win32", reason="Windows-specific test") # pragma: no cover @pytest.mark.anyio async def test_windows_stdio_client_with_session(): - """ - Test the exact scenario from issue #552: Using ClientSession with stdio_client. + """Test the exact scenario from issue #552: Using ClientSession with stdio_client. This reproduces the original bug report where stdio_client hangs on Windows 11 when used with ClientSession. diff --git a/tests/issues/test_88_random_error.py b/tests/issues/test_88_random_error.py index 42f5ce407f..a29231d77d 100644 --- a/tests/issues/test_88_random_error.py +++ b/tests/issues/test_88_random_error.py @@ -1,7 +1,6 @@ """Test to reproduce issue #88: Random error thrown on response.""" from collections.abc import Sequence -from datetime import timedelta from pathlib import Path from typing import Any @@ -43,12 +42,12 @@ async def list_tools() -> list[types.Tool]: types.Tool( name="slow", description="A slow tool", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, ), types.Tool( name="fast", description="A fast tool", - inputSchema={"type": "object"}, + input_schema={"type": "object"}, ), ] @@ -93,11 +92,9 @@ async def client( assert not slow_request_lock.is_set() # Second call should timeout (slow operation with minimal timeout) - # Use 10ms timeout to trigger quickly without waiting + # Use very small timeout to trigger quickly without waiting with pytest.raises(McpError) as exc_info: - await session.call_tool( - "slow", read_timeout_seconds=timedelta(microseconds=1) - ) # artificial timeout that always fails + await session.call_tool("slow", read_timeout_seconds=0.000001) # artificial timeout that always fails assert "Timed out while waiting" in str(exc_info.value) # release the slow request not to have hanging process diff --git a/tests/issues/test_973_url_decoding.py b/tests/issues/test_973_url_decoding.py new file mode 100644 index 0000000000..32d5a16cc5 --- /dev/null +++ b/tests/issues/test_973_url_decoding.py @@ -0,0 +1,78 @@ +"""Test that URL-encoded parameters are decoded in resource templates. + +Regression test for https://github.com/modelcontextprotocol/python-sdk/issues/973 +""" + +from mcp.server.fastmcp.resources import ResourceTemplate + + +def test_template_matches_decodes_space(): + """Test that %20 is decoded to space.""" + + def search(query: str) -> str: # pragma: no cover + return f"Results for: {query}" + + template = ResourceTemplate.from_function( + fn=search, + uri_template="search://{query}", + name="search", + ) + + params = template.matches("search://hello%20world") + assert params is not None + assert params["query"] == "hello world" + + +def test_template_matches_decodes_accented_characters(): + """Test that %C3%A9 is decoded to e with accent.""" + + def search(query: str) -> str: # pragma: no cover + return f"Results for: {query}" + + template = ResourceTemplate.from_function( + fn=search, + uri_template="search://{query}", + name="search", + ) + + params = template.matches("search://caf%C3%A9") + assert params is not None + assert params["query"] == "café" + + +def test_template_matches_decodes_complex_phrase(): + """Test complex French phrase from the original issue.""" + + def search(query: str) -> str: # pragma: no cover + return f"Results for: {query}" + + template = ResourceTemplate.from_function( + fn=search, + uri_template="search://{query}", + name="search", + ) + + params = template.matches("search://stick%20correcteur%20teint%C3%A9%20anti-imperfections") + assert params is not None + assert params["query"] == "stick correcteur teinté anti-imperfections" + + +def test_template_matches_preserves_plus_sign(): + """Test that plus sign remains as plus (not converted to space). + + In URI encoding, %20 is space. Plus-as-space is only for + application/x-www-form-urlencoded (HTML forms). + """ + + def search(query: str) -> str: # pragma: no cover + return f"Results for: {query}" + + template = ResourceTemplate.from_function( + fn=search, + uri_template="search://{query}", + name="search", + ) + + params = template.matches("search://hello+world") + assert params is not None + assert params["query"] == "hello+world" diff --git a/tests/issues/test_malformed_input.py b/tests/issues/test_malformed_input.py index 078beb7a58..cb60ca42a6 100644 --- a/tests/issues/test_malformed_input.py +++ b/tests/issues/test_malformed_input.py @@ -1,27 +1,18 @@ # Claude Debug """Test for HackerOne vulnerability report #3156202 - malformed input DOS.""" -from typing import Any - import anyio import pytest from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession from mcp.shared.message import SessionMessage -from mcp.types import ( - INVALID_PARAMS, - JSONRPCError, - JSONRPCMessage, - JSONRPCRequest, - ServerCapabilities, -) +from mcp.types import INVALID_PARAMS, JSONRPCError, JSONRPCMessage, JSONRPCRequest, ServerCapabilities @pytest.mark.anyio async def test_malformed_initialize_request_does_not_crash_server(): - """ - Test that malformed initialize requests return proper error responses + """Test that malformed initialize requests return proper error responses instead of crashing the server (HackerOne #3156202). """ # Create in-memory streams for testing @@ -38,7 +29,7 @@ async def test_malformed_initialize_request_does_not_crash_server(): ) # Wrap in session message - request_message = SessionMessage(message=JSONRPCMessage(malformed_request)) + request_message = SessionMessage(message=malformed_request) # Start a server session async with ServerSession( @@ -59,7 +50,7 @@ async def test_malformed_initialize_request_does_not_crash_server(): # Check that we received an error response instead of a crash try: response_message = write_receive_stream.receive_nowait() - response = response_message.message.root + response = response_message.message # Verify it's a proper JSON-RPC error response assert isinstance(response, JSONRPCError) @@ -76,14 +67,14 @@ async def test_malformed_initialize_request_does_not_crash_server(): method="tools/call", # params=None # Missing required params ) - another_request_message = SessionMessage(message=JSONRPCMessage(another_malformed_request)) + another_request_message = SessionMessage(message=another_malformed_request) await read_send_stream.send(another_request_message) await anyio.sleep(0.1) # Should get another error response, not a crash second_response_message = write_receive_stream.receive_nowait() - second_response = second_response_message.message.root + second_response = second_response_message.message assert isinstance(second_response, JSONRPCError) assert second_response.id == "test_id_2" @@ -101,9 +92,7 @@ async def test_malformed_initialize_request_does_not_crash_server(): @pytest.mark.anyio async def test_multiple_concurrent_malformed_requests(): - """ - Test that multiple concurrent malformed requests don't crash the server. - """ + """Test that multiple concurrent malformed requests don't crash the server.""" # Create in-memory streams for testing read_send_stream, read_receive_stream = anyio.create_memory_object_stream[SessionMessage | Exception](100) write_send_stream, write_receive_stream = anyio.create_memory_object_stream[SessionMessage](100) @@ -128,7 +117,7 @@ async def test_multiple_concurrent_malformed_requests(): method="initialize", # params=None # Missing required params ) - request_message = SessionMessage(message=JSONRPCMessage(malformed_request)) + request_message = SessionMessage(message=malformed_request) malformed_requests.append(request_message) # Send all requests @@ -139,11 +128,11 @@ async def test_multiple_concurrent_malformed_requests(): await anyio.sleep(0.2) # Verify we get error responses for all requests - error_responses: list[Any] = [] + error_responses: list[JSONRPCMessage] = [] try: while True: response_message = write_receive_stream.receive_nowait() - error_responses.append(response_message.message.root) + error_responses.append(response_message.message) except anyio.WouldBlock: pass # No more messages diff --git a/tests/server/auth/__init__.py b/tests/server/auth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/server/auth/middleware/__init__.py b/tests/server/auth/middleware/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/server/auth/middleware/test_auth_context.py b/tests/server/auth/middleware/test_auth_context.py index 1cca4df5ab..2364909221 100644 --- a/tests/server/auth/middleware/test_auth_context.py +++ b/tests/server/auth/middleware/test_auth_context.py @@ -1,6 +1,4 @@ -""" -Tests for the AuthContext middleware components. -""" +"""Tests for the AuthContext middleware components.""" import time diff --git a/tests/server/auth/middleware/test_bearer_auth.py b/tests/server/auth/middleware/test_bearer_auth.py index e13ab96390..bd14e294c2 100644 --- a/tests/server/auth/middleware/test_bearer_auth.py +++ b/tests/server/auth/middleware/test_bearer_auth.py @@ -1,6 +1,4 @@ -""" -Tests for the BearerAuth middleware components. -""" +"""Tests for the BearerAuth middleware components.""" import time from typing import Any, cast diff --git a/tests/server/auth/test_error_handling.py b/tests/server/auth/test_error_handling.py index f331b2cb2d..f8c7991476 100644 --- a/tests/server/auth/test_error_handling.py +++ b/tests/server/auth/test_error_handling.py @@ -1,9 +1,10 @@ -""" -Tests for OAuth error handling in the auth handlers. -""" +"""Tests for OAuth error handling in the auth handlers.""" +import base64 +import hashlib +import secrets import unittest.mock -from typing import TYPE_CHECKING, Any +from typing import Any from urllib.parse import parse_qs, urlparse import httpx @@ -14,12 +15,8 @@ from mcp.server.auth.provider import AuthorizeError, RegistrationError, TokenError from mcp.server.auth.routes import create_auth_routes - -# TODO(Marcelo): This TYPE_CHECKING shouldn't be here, but pytest doesn't seem to get the module correctly. -if TYPE_CHECKING: - from ...server.fastmcp.auth.test_auth_integration import MockOAuthProvider -else: - from tests.server.fastmcp.auth.test_auth_integration import MockOAuthProvider +from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions +from tests.server.fastmcp.auth.test_auth_integration import MockOAuthProvider @pytest.fixture @@ -30,8 +27,6 @@ def oauth_provider(): @pytest.fixture def app(oauth_provider: MockOAuthProvider): - from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions - # Enable client registration client_registration_options = ClientRegistrationOptions(enabled=True) revocation_options = RevocationOptions(enabled=True) @@ -58,10 +53,6 @@ def client(app: Starlette): @pytest.fixture def pkce_challenge(): """Create a PKCE challenge with code_verifier and code_challenge.""" - import base64 - import hashlib - import secrets - # Generate a code verifier code_verifier = secrets.token_urlsafe(64)[:128] diff --git a/tests/server/auth/test_protected_resource.py b/tests/server/auth/test_protected_resource.py index 82af16c5b1..594541420e 100644 --- a/tests/server/auth/test_protected_resource.py +++ b/tests/server/auth/test_protected_resource.py @@ -1,6 +1,6 @@ -""" -Integration tests for MCP Oauth Protected Resource. -""" +"""Integration tests for MCP Oauth Protected Resource.""" + +from urllib.parse import urlparse import httpx import pytest @@ -159,8 +159,6 @@ def test_route_path_matches_metadata_url(self): ) # Extract path from metadata URL - from urllib.parse import urlparse - metadata_path = urlparse(str(metadata_url)).path # Verify consistency @@ -181,8 +179,6 @@ def test_consistent_paths_for_various_resources(self, resource_url: str, expecte # Test URL generation metadata_url = build_resource_metadata_url(resource_url_obj) - from urllib.parse import urlparse - url_path = urlparse(str(metadata_url)).path # Test route creation diff --git a/tests/server/auth/test_provider.py b/tests/server/auth/test_provider.py index 7fe6213497..89a7cbedeb 100644 --- a/tests/server/auth/test_provider.py +++ b/tests/server/auth/test_provider.py @@ -1,6 +1,4 @@ -""" -Tests for mcp.server.auth.provider module. -""" +"""Tests for mcp.server.auth.provider module.""" from mcp.server.auth.provider import construct_redirect_uri diff --git a/tests/server/fastmcp/auth/__init__.py b/tests/server/fastmcp/auth/__init__.py index 64d318ec46..c932e236e3 100644 --- a/tests/server/fastmcp/auth/__init__.py +++ b/tests/server/fastmcp/auth/__init__.py @@ -1,3 +1 @@ -""" -Tests for the MCP server auth components. -""" +"""Tests for the MCP server auth components.""" diff --git a/tests/server/fastmcp/auth/test_auth_integration.py b/tests/server/fastmcp/auth/test_auth_integration.py index 08fcabf276..5000c7b386 100644 --- a/tests/server/fastmcp/auth/test_auth_integration.py +++ b/tests/server/fastmcp/auth/test_auth_integration.py @@ -1,6 +1,4 @@ -""" -Integration tests for MCP authorization components. -""" +"""Integration tests for MCP authorization components.""" import base64 import hashlib @@ -339,9 +337,59 @@ async def test_token_validation_error(self, test_client: httpx.AsyncClient): }, ) error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # Per RFC 6749 Section 5.2, authentication failures (missing client_id) + # must return "invalid_client", not "unauthorized_client" + assert error_response["error"] == "invalid_client" assert "error_description" in error_response # Contains error message + @pytest.mark.anyio + async def test_token_invalid_client_secret_returns_invalid_client( + self, + test_client: httpx.AsyncClient, + registered_client: dict[str, Any], + pkce_challenge: dict[str, str], + mock_oauth_provider: MockOAuthProvider, + ): + """Test token endpoint returns 'invalid_client' for wrong client_secret per RFC 6749. + + RFC 6749 Section 5.2 defines: + - invalid_client: Client authentication failed (wrong credentials, unknown client) + - unauthorized_client: Authenticated client not authorized for grant type + + When client_secret is wrong, this is an authentication failure, so the + error code MUST be 'invalid_client'. + """ + # Create an auth code for the registered client + auth_code = f"code_{int(time.time())}" + mock_oauth_provider.auth_codes[auth_code] = AuthorizationCode( + code=auth_code, + client_id=registered_client["client_id"], + code_challenge=pkce_challenge["code_challenge"], + redirect_uri=AnyUrl("https://client.example.com/callback"), + redirect_uri_provided_explicitly=True, + scopes=["read", "write"], + expires_at=time.time() + 600, + ) + + # Try to exchange the auth code with a WRONG client_secret + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": "wrong_secret_that_does_not_match", + "code": auth_code, + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": "https://client.example.com/callback", + }, + ) + + assert response.status_code == 401 + error_response = response.json() + # RFC 6749 Section 5.2: authentication failures MUST return "invalid_client" + assert error_response["error"] == "invalid_client" + assert "Invalid client_secret" in error_response["error_description"] + @pytest.mark.anyio async def test_token_invalid_auth_code( self, @@ -889,19 +937,35 @@ async def test_client_registration_default_scopes( assert registered_client.scope == "read write" @pytest.mark.anyio - async def test_client_registration_invalid_grant_type(self, test_client: httpx.AsyncClient): + async def test_client_registration_with_authorization_code_only(self, test_client: httpx.AsyncClient): + """Test that registration succeeds with only authorization_code (refresh_token is optional per RFC 7591).""" client_metadata = { "redirect_uris": ["https://client.example.com/callback"], "client_name": "Test Client", "grant_types": ["authorization_code"], } + response = await test_client.post("/register", json=client_metadata) + assert response.status_code == 201 + client_info = response.json() + assert "client_id" in client_info + assert client_info["grant_types"] == ["authorization_code"] + + @pytest.mark.anyio + async def test_client_registration_missing_authorization_code(self, test_client: httpx.AsyncClient): + """Test that registration fails when authorization_code grant type is missing.""" + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "grant_types": ["refresh_token"], + } + response = await test_client.post("/register", json=client_metadata) assert response.status_code == 400 error_data = response.json() assert "error" in error_data assert error_data["error"] == "invalid_client_metadata" - assert error_data["error_description"] == "grant_types must be authorization_code and refresh_token" + assert error_data["error_description"] == "grant_types must include 'authorization_code'" @pytest.mark.anyio async def test_client_registration_with_additional_grant_type(self, test_client: httpx.AsyncClient): @@ -1070,7 +1134,8 @@ async def test_wrong_auth_method_without_valid_credentials_fails( ) assert response.status_code == 401 error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # RFC 6749: authentication failures return "invalid_client" + assert error_response["error"] == "invalid_client" assert "Client secret is required" in error_response["error_description"] @pytest.mark.anyio @@ -1114,7 +1179,8 @@ async def test_basic_auth_without_header_fails( ) assert response.status_code == 401 error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # RFC 6749: authentication failures return "invalid_client" + assert error_response["error"] == "invalid_client" assert "Missing or invalid Basic authentication" in error_response["error_description"] @pytest.mark.anyio @@ -1158,7 +1224,8 @@ async def test_basic_auth_invalid_base64_fails( ) assert response.status_code == 401 error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # RFC 6749: authentication failures return "invalid_client" + assert error_response["error"] == "invalid_client" assert "Invalid Basic authentication header" in error_response["error_description"] @pytest.mark.anyio @@ -1189,8 +1256,6 @@ async def test_basic_auth_no_colon_fails( ) # Send base64 without colon (invalid format) - import base64 - invalid_creds = base64.b64encode(b"no-colon-here").decode() response = await test_client.post( "/token", @@ -1205,7 +1270,8 @@ async def test_basic_auth_no_colon_fails( ) assert response.status_code == 401 error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # RFC 6749: authentication failures return "invalid_client" + assert error_response["error"] == "invalid_client" assert "Invalid Basic authentication header" in error_response["error_description"] @pytest.mark.anyio @@ -1236,8 +1302,6 @@ async def test_basic_auth_client_id_mismatch_fails( ) # Send different client_id in Basic auth header - import base64 - wrong_creds = base64.b64encode(f"wrong-client-id:{client_info['client_secret']}".encode()).decode() response = await test_client.post( "/token", @@ -1252,7 +1316,8 @@ async def test_basic_auth_client_id_mismatch_fails( ) assert response.status_code == 401 error_response = response.json() - assert error_response["error"] == "unauthorized_client" + # RFC 6749: authentication failures return "invalid_client" + assert error_response["error"] == "invalid_client" assert "Client ID mismatch" in error_response["error_description"] @pytest.mark.anyio diff --git a/tests/server/fastmcp/prompts/test_base.py b/tests/server/fastmcp/prompts/test_base.py index 488bd5002c..afc1ec6ea8 100644 --- a/tests/server/fastmcp/prompts/test_base.py +++ b/tests/server/fastmcp/prompts/test_base.py @@ -1,7 +1,6 @@ from typing import Any import pytest -from pydantic import FileUrl from mcp.server.fastmcp.prompts.base import AssistantMessage, Message, Prompt, TextContent, UserMessage from mcp.types import EmbeddedResource, TextResourceContents @@ -95,9 +94,9 @@ async def fn() -> UserMessage: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=FileUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ) @@ -108,9 +107,9 @@ async def fn() -> UserMessage: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=FileUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ) @@ -127,9 +126,9 @@ async def fn() -> list[Message]: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=FileUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ), @@ -143,9 +142,9 @@ async def fn() -> list[Message]: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=FileUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ), @@ -162,7 +161,7 @@ async def fn() -> dict[str, Any]: "content": { "type": "resource", "resource": { - "uri": FileUrl("file://file.txt"), + "uri": "file://file.txt", "text": "File contents", "mimeType": "text/plain", }, @@ -175,9 +174,9 @@ async def fn() -> dict[str, Any]: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=FileUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ) diff --git a/tests/server/fastmcp/resources/test_file_resources.py b/tests/server/fastmcp/resources/test_file_resources.py index c82cf85c5a..0eb24f0632 100644 --- a/tests/server/fastmcp/resources/test_file_resources.py +++ b/tests/server/fastmcp/resources/test_file_resources.py @@ -3,7 +3,6 @@ from tempfile import NamedTemporaryFile import pytest -from pydantic import FileUrl from mcp.server.fastmcp.resources import FileResource @@ -31,7 +30,7 @@ class TestFileResource: def test_file_resource_creation(self, temp_file: Path): """Test creating a FileResource.""" resource = FileResource( - uri=FileUrl(temp_file.as_uri()), + uri=temp_file.as_uri(), name="test", description="test file", path=temp_file, @@ -46,7 +45,7 @@ def test_file_resource_creation(self, temp_file: Path): def test_file_resource_str_path_conversion(self, temp_file: Path): """Test FileResource handles string paths.""" resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=Path(str(temp_file)), ) @@ -57,7 +56,7 @@ def test_file_resource_str_path_conversion(self, temp_file: Path): async def test_read_text_file(self, temp_file: Path): """Test reading a text file.""" resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -69,7 +68,7 @@ async def test_read_text_file(self, temp_file: Path): async def test_read_binary_file(self, temp_file: Path): """Test reading a file as binary.""" resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, is_binary=True, @@ -82,7 +81,7 @@ def test_relative_path_error(self): """Test error on relative path.""" with pytest.raises(ValueError, match="Path must be absolute"): FileResource( - uri=FileUrl("file:///test.txt"), + uri="file:///test.txt", name="test", path=Path("test.txt"), ) @@ -93,7 +92,7 @@ async def test_missing_file_error(self, temp_file: Path): # Create path to non-existent file missing = temp_file.parent / "missing.txt" resource = FileResource( - uri=FileUrl("file:///missing.txt"), + uri="file:///missing.txt", name="test", path=missing, ) @@ -107,7 +106,7 @@ async def test_permission_error(self, temp_file: Path): # pragma: no cover temp_file.chmod(0o000) # Remove all permissions try: resource = FileResource( - uri=FileUrl(temp_file.as_uri()), + uri=temp_file.as_uri(), name="test", path=temp_file, ) diff --git a/tests/server/fastmcp/resources/test_function_resources.py b/tests/server/fastmcp/resources/test_function_resources.py index fccada4750..61ed44f6c6 100644 --- a/tests/server/fastmcp/resources/test_function_resources.py +++ b/tests/server/fastmcp/resources/test_function_resources.py @@ -1,5 +1,5 @@ import pytest -from pydantic import AnyUrl, BaseModel +from pydantic import BaseModel from mcp.server.fastmcp.resources import FunctionResource @@ -14,7 +14,7 @@ def my_func() -> str: # pragma: no cover return "test content" resource = FunctionResource( - uri=AnyUrl("fn://test"), + uri="fn://test", name="test", description="test function", fn=my_func, @@ -33,7 +33,7 @@ def get_data() -> str: return "Hello, world!" resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=get_data, ) @@ -49,7 +49,7 @@ def get_data() -> bytes: return b"Hello, world!" resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=get_data, ) @@ -64,7 +64,7 @@ def get_data() -> dict[str, str]: return {"key": "value"} resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=get_data, ) @@ -80,7 +80,7 @@ def failing_func() -> str: raise ValueError("Test error") resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=failing_func, ) @@ -95,7 +95,7 @@ class MyModel(BaseModel): name: str resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=lambda: MyModel(name="test"), ) @@ -114,7 +114,7 @@ def get_data() -> CustomData: return CustomData() resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=get_data, ) @@ -129,7 +129,7 @@ async def get_data() -> str: return "Hello, world!" resource = FunctionResource( - uri=AnyUrl("function://test"), + uri="function://test", name="test", fn=get_data, ) @@ -154,4 +154,39 @@ async def get_data() -> str: # pragma: no cover assert resource.description == "get_data returns a string" assert resource.mime_type == "text/plain" assert resource.name == "test" - assert resource.uri == AnyUrl("function://test") + assert resource.uri == "function://test" + + +class TestFunctionResourceMetadata: + def test_from_function_with_metadata(self): + # from_function() accepts meta dict and stores it on the resource for static resources + + def get_data() -> str: # pragma: no cover + return "test data" + + metadata = {"cache_ttl": 300, "tags": ["data", "readonly"]} + + resource = FunctionResource.from_function( + fn=get_data, + uri="resource://data", + meta=metadata, + ) + + assert resource.meta is not None + assert resource.meta == metadata + assert resource.meta["cache_ttl"] == 300 + assert "data" in resource.meta["tags"] + assert "readonly" in resource.meta["tags"] + + def test_from_function_without_metadata(self): + # meta parameter is optional and defaults to None for backward compatibility + + def get_data() -> str: # pragma: no cover + return "test data" + + resource = FunctionResource.from_function( + fn=get_data, + uri="resource://data", + ) + + assert resource.meta is None diff --git a/tests/server/fastmcp/resources/test_resource_manager.py b/tests/server/fastmcp/resources/test_resource_manager.py index a0c06be86c..5fd4bc8529 100644 --- a/tests/server/fastmcp/resources/test_resource_manager.py +++ b/tests/server/fastmcp/resources/test_resource_manager.py @@ -2,7 +2,7 @@ from tempfile import NamedTemporaryFile import pytest -from pydantic import AnyUrl, FileUrl +from pydantic import AnyUrl from mcp.server.fastmcp.resources import FileResource, FunctionResource, ResourceManager, ResourceTemplate @@ -31,7 +31,7 @@ def test_add_resource(self, temp_file: Path): """Test adding a resource.""" manager = ResourceManager() resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -43,7 +43,7 @@ def test_add_duplicate_resource(self, temp_file: Path): """Test adding the same resource twice.""" manager = ResourceManager() resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -56,7 +56,7 @@ def test_warn_on_duplicate_resources(self, temp_file: Path, caplog: pytest.LogCa """Test warning on duplicate resources.""" manager = ResourceManager() resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -68,7 +68,7 @@ def test_disable_warn_on_duplicate_resources(self, temp_file: Path, caplog: pyte """Test disabling warning on duplicate resources.""" manager = ResourceManager(warn_on_duplicate_resources=False) resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -81,7 +81,7 @@ async def test_get_resource(self, temp_file: Path): """Test getting a resource by URI.""" manager = ResourceManager() resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test", path=temp_file, ) @@ -120,12 +120,12 @@ def test_list_resources(self, temp_file: Path): """Test listing all resources.""" manager = ResourceManager() resource1 = FileResource( - uri=FileUrl(f"file://{temp_file}"), + uri=f"file://{temp_file}", name="test1", path=temp_file, ) resource2 = FileResource( - uri=FileUrl(f"file://{temp_file}2"), + uri=f"file://{temp_file}2", name="test2", path=temp_file, ) @@ -134,3 +134,43 @@ def test_list_resources(self, temp_file: Path): resources = manager.list_resources() assert len(resources) == 2 assert resources == [resource1, resource2] + + +class TestResourceManagerMetadata: + """Test ResourceManager Metadata""" + + def test_add_template_with_metadata(self): + """Test that ResourceManager.add_template() accepts and passes meta parameter.""" + + manager = ResourceManager() + + def get_item(id: str) -> str: # pragma: no cover + return f"Item {id}" + + metadata = {"source": "database", "cached": True} + + template = manager.add_template( + fn=get_item, + uri_template="resource://items/{id}", + meta=metadata, + ) + + assert template.meta is not None + assert template.meta == metadata + assert template.meta["source"] == "database" + assert template.meta["cached"] is True + + def test_add_template_without_metadata(self): + """Test that ResourceManager.add_template() works without meta parameter.""" + + manager = ResourceManager() + + def get_item(id: str) -> str: # pragma: no cover + return f"Item {id}" + + template = manager.add_template( + fn=get_item, + uri_template="resource://items/{id}", + ) + + assert template.meta is None diff --git a/tests/server/fastmcp/resources/test_resource_template.py b/tests/server/fastmcp/resources/test_resource_template.py index c910f8fa85..f3d3ba5e45 100644 --- a/tests/server/fastmcp/resources/test_resource_template.py +++ b/tests/server/fastmcp/resources/test_resource_template.py @@ -258,3 +258,50 @@ def get_item(item_id: str) -> str: # pragma: no cover # Verify the resource works correctly content = await resource.read() assert content == "Item 123" + + +class TestResourceTemplateMetadata: + """Test ResourceTemplate meta handling.""" + + def test_template_from_function_with_metadata(self): + """Test that ResourceTemplate.from_function() accepts and stores meta parameter.""" + + def get_user(user_id: str) -> str: # pragma: no cover + return f"User {user_id}" + + metadata = {"requires_auth": True, "rate_limit": 100} + + template = ResourceTemplate.from_function( + fn=get_user, + uri_template="resource://users/{user_id}", + meta=metadata, + ) + + assert template.meta is not None + assert template.meta == metadata + assert template.meta["requires_auth"] is True + assert template.meta["rate_limit"] == 100 + + @pytest.mark.anyio + async def test_template_created_resources_inherit_metadata(self): + """Test that resources created from templates inherit meta from template.""" + + def get_item(item_id: str) -> str: + return f"Item {item_id}" + + metadata = {"category": "inventory", "cacheable": True} + + template = ResourceTemplate.from_function( + fn=get_item, + uri_template="resource://items/{item_id}", + meta=metadata, + ) + + # Create a resource from the template + resource = await template.create_resource("resource://items/123", {"item_id": "123"}) + + # The resource should inherit the template's metadata + assert resource.meta is not None + assert resource.meta == metadata + assert resource.meta["category"] == "inventory" + assert resource.meta["cacheable"] is True diff --git a/tests/server/fastmcp/resources/test_resources.py b/tests/server/fastmcp/resources/test_resources.py index 32fc23b174..6d346786dc 100644 --- a/tests/server/fastmcp/resources/test_resources.py +++ b/tests/server/fastmcp/resources/test_resources.py @@ -1,5 +1,4 @@ import pytest -from pydantic import AnyUrl from mcp.server.fastmcp import FastMCP from mcp.server.fastmcp.resources import FunctionResource, Resource @@ -9,35 +8,35 @@ class TestResourceValidation: """Test base Resource validation.""" - def test_resource_uri_validation(self): - """Test URI validation.""" + def test_resource_uri_accepts_any_string(self): + """Test that URI field accepts any string per MCP spec.""" def dummy_func() -> str: # pragma: no cover return "data" # Valid URI resource = FunctionResource( - uri=AnyUrl("http://example.com/data"), + uri="http://example.com/data", name="test", fn=dummy_func, ) - assert str(resource.uri) == "http://example.com/data" + assert resource.uri == "http://example.com/data" - # Missing protocol - with pytest.raises(ValueError, match="Input should be a valid URL"): - FunctionResource( - uri=AnyUrl("invalid"), - name="test", - fn=dummy_func, - ) + # Relative path - now accepted per MCP spec + resource = FunctionResource( + uri="users/me", + name="test", + fn=dummy_func, + ) + assert resource.uri == "users/me" - # Missing host - with pytest.raises(ValueError, match="Input should be a valid URL"): - FunctionResource( - uri=AnyUrl("http://"), - name="test", - fn=dummy_func, - ) + # Custom scheme + resource = FunctionResource( + uri="custom://resource", + name="test", + fn=dummy_func, + ) + assert resource.uri == "custom://resource" def test_resource_name_from_uri(self): """Test name is extracted from URI if not provided.""" @@ -46,7 +45,7 @@ def dummy_func() -> str: # pragma: no cover return "data" resource = FunctionResource( - uri=AnyUrl("resource://my-resource"), + uri="resource://my-resource", fn=dummy_func, ) assert resource.name == "resource://my-resource" @@ -65,7 +64,7 @@ def dummy_func() -> str: # pragma: no cover # Explicit name takes precedence over URI resource = FunctionResource( - uri=AnyUrl("resource://uri-name"), + uri="resource://uri-name", name="explicit-name", fn=dummy_func, ) @@ -79,14 +78,14 @@ def dummy_func() -> str: # pragma: no cover # Default mime type resource = FunctionResource( - uri=AnyUrl("resource://test"), + uri="resource://test", fn=dummy_func, ) assert resource.mime_type == "text/plain" # Custom mime type resource = FunctionResource( - uri=AnyUrl("resource://test"), + uri="resource://test", fn=dummy_func, mime_type="application/json", ) @@ -100,7 +99,7 @@ class ConcreteResource(Resource): pass with pytest.raises(TypeError, match="abstract method"): - ConcreteResource(uri=AnyUrl("test://test"), name="test") # type: ignore + ConcreteResource(uri="test://test", name="test") # type: ignore class TestResourceAnnotations: @@ -193,3 +192,41 @@ def test_audience_validation(self): # Invalid roles should raise validation error with pytest.raises(Exception): # Pydantic validation error Annotations(audience=["invalid_role"]) # type: ignore + + +class TestResourceMetadata: + """Test metadata field on base Resource class.""" + + def test_resource_with_metadata(self): + """Test that Resource base class accepts meta parameter.""" + + def dummy_func() -> str: # pragma: no cover + return "data" + + metadata = {"version": "1.0", "category": "test"} + + resource = FunctionResource( + uri="resource://test", + name="test", + fn=dummy_func, + meta=metadata, + ) + + assert resource.meta is not None + assert resource.meta == metadata + assert resource.meta["version"] == "1.0" + assert resource.meta["category"] == "test" + + def test_resource_without_metadata(self): + """Test that meta field defaults to None.""" + + def dummy_func() -> str: # pragma: no cover + return "data" + + resource = FunctionResource( + uri="resource://test", + name="test", + fn=dummy_func, + ) + + assert resource.meta is None diff --git a/tests/server/fastmcp/test_elicitation.py b/tests/server/fastmcp/test_elicitation.py index 597b291785..efed572e4b 100644 --- a/tests/server/fastmcp/test_elicitation.py +++ b/tests/server/fastmcp/test_elicitation.py @@ -1,18 +1,15 @@ -""" -Test the elicitation feature using stdio transport. -""" +"""Test the elicitation feature using stdio transport.""" from typing import Any import pytest from pydantic import BaseModel, Field -from mcp import types +from mcp import Client, types from mcp.client.session import ClientSession, ElicitationFnT from mcp.server.fastmcp import Context, FastMCP from mcp.server.session import ServerSession from mcp.shared.context import RequestContext -from mcp.shared.memory import create_connected_server_and_client_session from mcp.types import ElicitRequestParams, ElicitResult, TextContent @@ -47,12 +44,8 @@ async def call_tool_and_assert( text_contains: list[str] | None = None, ): """Helper to create session, call tool, and assert result.""" - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool(tool_name, args) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool(tool_name, args) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) @@ -134,14 +127,10 @@ async def elicitation_callback( ): # pragma: no cover return ElicitResult(action="accept", content={}) - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - + async with Client(mcp, elicitation_callback=elicitation_callback) as client: # Test both invalid schemas for tool_name, field_name in [("invalid_list", "numbers"), ("nested_model", "nested")]: - result = await client_session.call_tool(tool_name, {}) + result = await client.call_tool(tool_name, {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert "Validation failed as expected" in result.content[0].text @@ -290,7 +279,7 @@ async def defaults_tool(ctx: Context[ServerSession, None]) -> str: async def callback_schema_verify(context: RequestContext[ClientSession, None], params: ElicitRequestParams): # Verify the schema includes defaults assert isinstance(params, types.ElicitRequestFormParams), "Expected form mode elicitation" - schema = params.requestedSchema + schema = params.requested_schema props = schema["properties"] assert props["name"]["default"] == "Guest" diff --git a/tests/server/fastmcp/test_func_metadata.py b/tests/server/fastmcp/test_func_metadata.py index 61e524290e..8d3ac6ec50 100644 --- a/tests/server/fastmcp/test_func_metadata.py +++ b/tests/server/fastmcp/test_func_metadata.py @@ -5,13 +5,14 @@ # pyright: reportUnknownLambdaType=false from collections.abc import Callable from dataclasses import dataclass -from typing import Annotated, Any, Final, TypedDict +from typing import Annotated, Any, Final, NamedTuple, TypedDict import annotated_types import pytest from dirty_equals import IsPartialDict from pydantic import BaseModel, Field +from mcp.server.fastmcp.exceptions import InvalidSignature from mcp.server.fastmcp.utilities.func_metadata import func_metadata from mcp.types import CallToolResult @@ -447,8 +448,7 @@ def test_complex_function_json_schema(): def test_str_vs_int(): - """ - Test that string values are kept as strings even when they contain numbers, + """Test that string values are kept as strings even when they contain numbers, while numbers are parsed correctly. """ @@ -462,8 +462,7 @@ def func_with_str_and_int(a: str, b: int): # pragma: no cover def test_str_annotation_preserves_json_string(): - """ - Regression test for PR #1113: Ensure that when a parameter is annotated as str, + """Regression test for PR #1113: Ensure that when a parameter is annotated as str, valid JSON strings are NOT parsed into Python objects. This test would fail before the fix (JSON string would be parsed to dict) @@ -513,8 +512,7 @@ def process_json_config(config: str, enabled: bool = True) -> str: # pragma: no @pytest.mark.anyio async def test_str_annotation_runtime_validation(): - """ - Regression test for PR #1113: Test runtime validation with string parameters + """Regression test for PR #1113: Test runtime validation with string parameters containing valid JSON to ensure they are passed as strings, not parsed objects. """ @@ -558,7 +556,6 @@ def handle_json_payload(payload: str, strict_mode: bool = False) -> str: # prag def test_structured_output_requires_return_annotation(): """Test that structured_output=True requires a return annotation""" - from mcp.server.fastmcp.exceptions import InvalidSignature def func_no_annotation(): # pragma: no cover return "hello" @@ -850,7 +847,7 @@ class PersonClass(BaseModel): name: str def func_returning_annotated_tool_call_result() -> Annotated[CallToolResult, PersonClass]: # pragma: no cover - return CallToolResult(content=[], structuredContent={"name": "Brandon"}) + return CallToolResult(content=[], structured_content={"name": "Brandon"}) meta = func_metadata(func_returning_annotated_tool_call_result) @@ -870,7 +867,7 @@ class PersonClass(BaseModel): name: str def func_returning_annotated_tool_call_result() -> Annotated[CallToolResult, PersonClass]: # pragma: no cover - return CallToolResult(content=[], structuredContent={"person": "Brandon"}) + return CallToolResult(content=[], structured_content={"person": "Brandon"}) meta = func_metadata(func_returning_annotated_tool_call_result) @@ -881,8 +878,6 @@ def func_returning_annotated_tool_call_result() -> Annotated[CallToolResult, Per def test_tool_call_result_in_optional_is_rejected(): """Test that Optional[CallToolResult] raises InvalidSignature""" - from mcp.server.fastmcp.exceptions import InvalidSignature - def func_optional_call_tool_result() -> CallToolResult | None: # pragma: no cover return CallToolResult(content=[]) @@ -896,8 +891,6 @@ def func_optional_call_tool_result() -> CallToolResult | None: # pragma: no cov def test_tool_call_result_in_union_is_rejected(): """Test that Union[str, CallToolResult] raises InvalidSignature""" - from mcp.server.fastmcp.exceptions import InvalidSignature - def func_union_call_tool_result() -> str | CallToolResult: # pragma: no cover return CallToolResult(content=[]) @@ -910,7 +903,6 @@ def func_union_call_tool_result() -> str | CallToolResult: # pragma: no cover def test_tool_call_result_in_pipe_union_is_rejected(): """Test that str | CallToolResult raises InvalidSignature""" - from mcp.server.fastmcp.exceptions import InvalidSignature def func_pipe_union_call_tool_result() -> str | CallToolResult: # pragma: no cover return CallToolResult(content=[]) @@ -985,9 +977,6 @@ def func_nested() -> PersonWithAddress: # pragma: no cover def test_structured_output_unserializable_type_error(): """Test error when structured_output=True is used with unserializable types""" - from typing import NamedTuple - - from mcp.server.fastmcp.exceptions import InvalidSignature # Test with a class that has non-serializable default values class ConfigWithCallable: @@ -1185,8 +1174,6 @@ def func_with_reserved_json( # pragma: no cover def test_disallowed_type_qualifier(): - from mcp.server.fastmcp.exceptions import InvalidSignature - def func_disallowed_qualifier() -> Final[int]: # type: ignore pass # pragma: no cover diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 70948bd7e2..5f7caf7aca 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -1,5 +1,4 @@ -""" -Integration tests for FastMCP server functionality. +"""Integration tests for FastMCP server functionality. These tests validate the proper functioning of FastMCP features using focused, single-feature servers across different transports (SSE and StreamableHTTP). @@ -51,8 +50,10 @@ NotificationParams, ProgressNotification, ProgressNotificationParams, + PromptReference, ReadResourceResult, ResourceListChangedNotification, + ResourceTemplateReference, ServerNotification, ServerRequest, TextContent, @@ -76,14 +77,14 @@ async def handle_generic_notification( ) -> None: """Handle any server notification and route to appropriate handler.""" if isinstance(message, ServerNotification): # pragma: no branch - if isinstance(message.root, ProgressNotification): - self.progress_notifications.append(message.root.params) - elif isinstance(message.root, LoggingMessageNotification): - self.log_messages.append(message.root.params) - elif isinstance(message.root, ResourceListChangedNotification): - self.resource_notifications.append(message.root.params) - elif isinstance(message.root, ToolListChangedNotification): # pragma: no cover - self.tool_notifications.append(message.root.params) + if isinstance(message, ProgressNotification): + self.progress_notifications.append(message.params) + elif isinstance(message, LoggingMessageNotification): + self.log_messages.append(message.params) + elif isinstance(message, ResourceListChangedNotification): + self.resource_notifications.append(message.params) + elif isinstance(message, ToolListChangedNotification): # pragma: no cover + self.tool_notifications.append(message.params) # Common fixtures @@ -258,7 +259,7 @@ async def test_basic_tools(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Tool Example" + assert result.server_info.name == "Tool Example" assert result.capabilities.tools is not None # Test sum tool @@ -295,7 +296,7 @@ async def test_basic_resources(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Resource Example" + assert result.server_info.name == "Resource Example" assert result.capabilities.resources is not None # Test document resource @@ -336,7 +337,7 @@ async def test_basic_prompts(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Prompt Example" + assert result.server_info.name == "Prompt Example" assert result.capabilities.prompts is not None # Test review_code prompt @@ -396,7 +397,7 @@ async def message_handler(message: RequestResponder[ServerRequest, ClientResult] # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Progress Example" + assert result.server_info.name == "Progress Example" # Test progress callback progress_updates = [] @@ -449,7 +450,7 @@ async def test_sampling(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Sampling Example" + assert result.server_info.name == "Sampling Example" assert result.capabilities.tools is not None # Test sampling tool @@ -480,7 +481,7 @@ async def test_elicitation(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Elicitation Example" + assert result.server_info.name == "Elicitation Example" # Test booking with unavailable date (triggers elicitation) booking_result = await session.call_tool( @@ -537,7 +538,7 @@ async def message_handler(message: RequestResponder[ServerRequest, ClientResult] # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Notifications Example" + assert result.server_info.name == "Notifications Example" # Call tool that generates notifications tool_result = await session.call_tool("process_data", {"data": "test_data"}) @@ -578,13 +579,11 @@ async def test_completion(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Example" + assert result.server_info.name == "Example" assert result.capabilities.resources is not None assert result.capabilities.prompts is not None # Test resource completion - from mcp.types import ResourceTemplateReference - completion_result = await session.complete( ref=ResourceTemplateReference(type="ref/resource", uri="github://repos/{owner}/{repo}"), argument={"name": "repo", "value": ""}, @@ -600,8 +599,6 @@ async def test_completion(server_transport: str, server_url: str) -> None: assert "specification" in completion_result.completion.values # Test prompt completion - from mcp.types import PromptReference - completion_result = await session.complete( ref=PromptReference(type="ref/prompt", name="review_code"), argument={"name": "language", "value": "py"}, @@ -635,7 +632,7 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Demo" + assert result.server_info.name == "Demo" # Test add tool tool_result = await session.call_tool("add", {"a": 10, "b": 20}) @@ -644,8 +641,6 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non assert tool_result.content[0].text == "30" # Test greeting resource directly - from pydantic import AnyUrl - resource_result = await session.read_resource(AnyUrl("greeting://Alice")) assert len(resource_result.contents) == 1 assert isinstance(resource_result.contents[0], TextResourceContents) @@ -673,7 +668,7 @@ async def test_structured_output(server_transport: str, server_url: str) -> None # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Structured Output Example" + assert result.server_info.name == "Structured Output Example" # Test get_weather tool weather_result = await session.call_tool("get_weather", {"city": "New York"}) diff --git a/tests/server/fastmcp/test_parameter_descriptions.py b/tests/server/fastmcp/test_parameter_descriptions.py index 9f2386894c..340ca71603 100644 --- a/tests/server/fastmcp/test_parameter_descriptions.py +++ b/tests/server/fastmcp/test_parameter_descriptions.py @@ -23,7 +23,7 @@ def greet( tool = tools[0] # Check that parameter descriptions are present in the schema - properties = tool.inputSchema["properties"] + properties = tool.input_schema["properties"] assert "name" in properties assert properties["name"]["description"] == "The name to greet" assert "title" in properties diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index 3935f3bd13..6d1cee58ef 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -1,123 +1,72 @@ import base64 from pathlib import Path -from typing import TYPE_CHECKING, Any +from typing import Any from unittest.mock import patch import pytest -from pydantic import AnyUrl, BaseModel +from pydantic import BaseModel +from starlette.applications import Starlette from starlette.routing import Mount, Route +from mcp.client import Client from mcp.server.fastmcp import Context, FastMCP +from mcp.server.fastmcp.exceptions import ToolError from mcp.server.fastmcp.prompts.base import Message, UserMessage from mcp.server.fastmcp.resources import FileResource, FunctionResource from mcp.server.fastmcp.utilities.types import Audio, Image from mcp.server.session import ServerSession from mcp.server.transport_security import TransportSecuritySettings from mcp.shared.exceptions import McpError -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) from mcp.types import ( AudioContent, BlobResourceContents, ContentBlock, EmbeddedResource, + Icon, ImageContent, TextContent, TextResourceContents, ) -if TYPE_CHECKING: - from mcp.server.fastmcp import Context - class TestServer: @pytest.mark.anyio async def test_create_server(self): - mcp = FastMCP(instructions="Server instructions") + mcp = FastMCP( + title="FastMCP Server", + description="Server description", + instructions="Server instructions", + website_url="https://example.com/mcp_server", + version="1.0", + icons=[Icon(src="https://example.com/icon.png", mime_type="image/png", sizes=["48x48", "96x96"])], + ) assert mcp.name == "FastMCP" + assert mcp.title == "FastMCP Server" + assert mcp.description == "Server description" assert mcp.instructions == "Server instructions" + assert mcp.website_url == "https://example.com/mcp_server" + assert mcp.version == "1.0" + assert isinstance(mcp.icons, list) + assert len(mcp.icons) == 1 + assert mcp.icons[0].src == "https://example.com/icon.png" @pytest.mark.anyio - async def test_normalize_path(self): - """Test path normalization for mount paths.""" - mcp = FastMCP() - - # Test root path - assert mcp._normalize_path("/", "/messages/") == "/messages/" - - # Test path with trailing slash - assert mcp._normalize_path("/github/", "/messages/") == "/github/messages/" - - # Test path without trailing slash - assert mcp._normalize_path("/github", "/messages/") == "/github/messages/" + async def test_sse_app_returns_starlette_app(self): + """Test that sse_app returns a Starlette application with correct routes.""" + mcp = FastMCP("test") + # Use host="0.0.0.0" to avoid auto DNS protection + app = mcp.sse_app(host="0.0.0.0") - # Test endpoint without leading slash - assert mcp._normalize_path("/github", "messages/") == "/github/messages/" - - # Test both with trailing/leading slashes - assert mcp._normalize_path("/api/", "/v1/") == "/api/v1/" - - @pytest.mark.anyio - async def test_sse_app_with_mount_path(self): - """Test SSE app creation with different mount paths.""" - # Test with default mount path - mcp = FastMCP() - with patch.object(mcp, "_normalize_path", return_value="/messages/") as mock_normalize: - mcp.sse_app() - # Verify _normalize_path was called with correct args - mock_normalize.assert_called_once_with("/", "/messages/") - - # Test with custom mount path in settings - mcp = FastMCP() - mcp.settings.mount_path = "/custom" - with patch.object(mcp, "_normalize_path", return_value="/custom/messages/") as mock_normalize: - mcp.sse_app() - # Verify _normalize_path was called with correct args - mock_normalize.assert_called_once_with("/custom", "/messages/") - - # Test with mount_path parameter - mcp = FastMCP() - with patch.object(mcp, "_normalize_path", return_value="/param/messages/") as mock_normalize: - mcp.sse_app(mount_path="/param") - # Verify _normalize_path was called with correct args - mock_normalize.assert_called_once_with("/param", "/messages/") - - @pytest.mark.anyio - async def test_starlette_routes_with_mount_path(self): - """Test that Starlette routes are correctly configured with mount path.""" - # Test with mount path in settings - mcp = FastMCP() - mcp.settings.mount_path = "/api" - app = mcp.sse_app() - - # Find routes by type - sse_routes = [r for r in app.routes if isinstance(r, Route)] - mount_routes = [r for r in app.routes if isinstance(r, Mount)] + assert isinstance(app, Starlette) # Verify routes exist - assert len(sse_routes) == 1, "Should have one SSE route" - assert len(mount_routes) == 1, "Should have one mount route" - - # Verify path values - assert sse_routes[0].path == "/sse", "SSE route path should be /sse" - assert mount_routes[0].path == "/messages", "Mount route path should be /messages" - - # Test with mount path as parameter - mcp = FastMCP() - app = mcp.sse_app(mount_path="/param") - - # Find routes by type sse_routes = [r for r in app.routes if isinstance(r, Route)] mount_routes = [r for r in app.routes if isinstance(r, Mount)] - # Verify routes exist assert len(sse_routes) == 1, "Should have one SSE route" assert len(mount_routes) == 1, "Should have one mount route" - - # Verify path values - assert sse_routes[0].path == "/sse", "SSE route path should be /sse" - assert mount_routes[0].path == "/messages", "Mount route path should be /messages" + assert sse_routes[0].path == "/sse" + assert mount_routes[0].path == "/messages" @pytest.mark.anyio async def test_non_ascii_description(self): @@ -128,7 +77,7 @@ async def test_non_ascii_description(self): def hello_world(name: str = "世界") -> str: return f"¡Hola, {name}! 👋" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: tools = await client.list_tools() assert len(tools.tools) == 1 tool = tools.tools[0] @@ -185,49 +134,64 @@ def get_data(x: str) -> str: # pragma: no cover class TestDnsRebindingProtection: - """Tests for automatic DNS rebinding protection on localhost.""" - - def test_auto_enabled_for_127_0_0_1(self): - """DNS rebinding protection should auto-enable for host=127.0.0.1.""" - mcp = FastMCP(host="127.0.0.1") - assert mcp.settings.transport_security is not None - assert mcp.settings.transport_security.enable_dns_rebinding_protection is True - assert "127.0.0.1:*" in mcp.settings.transport_security.allowed_hosts - assert "localhost:*" in mcp.settings.transport_security.allowed_hosts - assert "http://127.0.0.1:*" in mcp.settings.transport_security.allowed_origins - assert "http://localhost:*" in mcp.settings.transport_security.allowed_origins - - def test_auto_enabled_for_localhost(self): - """DNS rebinding protection should auto-enable for host=localhost.""" - mcp = FastMCP(host="localhost") - assert mcp.settings.transport_security is not None - assert mcp.settings.transport_security.enable_dns_rebinding_protection is True - assert "127.0.0.1:*" in mcp.settings.transport_security.allowed_hosts - assert "localhost:*" in mcp.settings.transport_security.allowed_hosts - - def test_auto_enabled_for_ipv6_localhost(self): - """DNS rebinding protection should auto-enable for host=::1 (IPv6 localhost).""" - mcp = FastMCP(host="::1") - assert mcp.settings.transport_security is not None - assert mcp.settings.transport_security.enable_dns_rebinding_protection is True - assert "[::1]:*" in mcp.settings.transport_security.allowed_hosts - assert "http://[::1]:*" in mcp.settings.transport_security.allowed_origins - - def test_not_auto_enabled_for_other_hosts(self): - """DNS rebinding protection should NOT auto-enable for other hosts.""" - mcp = FastMCP(host="0.0.0.0") - assert mcp.settings.transport_security is None - - def test_explicit_settings_not_overridden(self): - """Explicit transport_security settings should not be overridden.""" + """Tests for automatic DNS rebinding protection on localhost. + + DNS rebinding protection is now configured in sse_app() and streamable_http_app() + based on the host parameter passed to those methods. + """ + + def test_auto_enabled_for_127_0_0_1_sse(self): + """DNS rebinding protection should auto-enable for host=127.0.0.1 in SSE app.""" + mcp = FastMCP() + # Call sse_app with host=127.0.0.1 to trigger auto-config + # We can't directly inspect the transport_security, but we can verify + # the app is created without error + app = mcp.sse_app(host="127.0.0.1") + assert app is not None + + def test_auto_enabled_for_127_0_0_1_streamable_http(self): + """DNS rebinding protection should auto-enable for host=127.0.0.1 in StreamableHTTP app.""" + mcp = FastMCP() + app = mcp.streamable_http_app(host="127.0.0.1") + assert app is not None + + def test_auto_enabled_for_localhost_sse(self): + """DNS rebinding protection should auto-enable for host=localhost in SSE app.""" + mcp = FastMCP() + app = mcp.sse_app(host="localhost") + assert app is not None + + def test_auto_enabled_for_ipv6_localhost_sse(self): + """DNS rebinding protection should auto-enable for host=::1 (IPv6 localhost) in SSE app.""" + mcp = FastMCP() + app = mcp.sse_app(host="::1") + assert app is not None + + def test_not_auto_enabled_for_other_hosts_sse(self): + """DNS rebinding protection should NOT auto-enable for other hosts in SSE app.""" + mcp = FastMCP() + app = mcp.sse_app(host="0.0.0.0") + assert app is not None + + def test_explicit_settings_not_overridden_sse(self): + """Explicit transport_security settings should not be overridden in SSE app.""" + custom_settings = TransportSecuritySettings( + enable_dns_rebinding_protection=False, + ) + mcp = FastMCP() + # Explicit transport_security passed to sse_app should be used as-is + app = mcp.sse_app(host="127.0.0.1", transport_security=custom_settings) + assert app is not None + + def test_explicit_settings_not_overridden_streamable_http(self): + """Explicit transport_security settings should not be overridden in StreamableHTTP app.""" custom_settings = TransportSecuritySettings( enable_dns_rebinding_protection=False, ) - mcp = FastMCP(host="127.0.0.1", transport_security=custom_settings) - # Settings are copied by pydantic, so check values not identity - assert mcp.settings.transport_security is not None - assert mcp.settings.transport_security.enable_dns_rebinding_protection is False - assert mcp.settings.transport_security.allowed_hosts == [] + mcp = FastMCP() + # Explicit transport_security passed to streamable_http_app should be used as-is + app = mcp.streamable_http_app(host="127.0.0.1", transport_security=custom_settings) + assert app is not None def tool_fn(x: int, y: int) -> int: @@ -249,8 +213,8 @@ def audio_tool_fn(path: str) -> Audio: def mixed_content_tool_fn() -> list[ContentBlock]: return [ TextContent(type="text", text="Hello"), - ImageContent(type="image", data="abc", mimeType="image/png"), - AudioContent(type="audio", data="def", mimeType="audio/wav"), + ImageContent(type="image", data="abc", mime_type="image/png"), + AudioContent(type="audio", data="def", mime_type="audio/wav"), ] @@ -266,7 +230,7 @@ async def test_add_tool(self): async def test_list_tools(self): mcp = FastMCP() mcp.add_tool(tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: tools = await client.list_tools() assert len(tools.tools) == 1 @@ -274,7 +238,7 @@ async def test_list_tools(self): async def test_call_tool(self): mcp = FastMCP() mcp.add_tool(tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("my_tool", {"arg1": "value"}) assert not hasattr(result, "error") assert len(result.content) > 0 @@ -283,52 +247,52 @@ async def test_call_tool(self): async def test_tool_exception_handling(self): mcp = FastMCP() mcp.add_tool(error_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("error_tool_fn", {}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, TextContent) assert "Test error" in content.text - assert result.isError is True + assert result.is_error is True @pytest.mark.anyio async def test_tool_error_handling(self): mcp = FastMCP() mcp.add_tool(error_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("error_tool_fn", {}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, TextContent) assert "Test error" in content.text - assert result.isError is True + assert result.is_error is True @pytest.mark.anyio async def test_tool_error_details(self): """Test that exception details are properly formatted in the response""" mcp = FastMCP() mcp.add_tool(error_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("error_tool_fn", {}) content = result.content[0] assert isinstance(content, TextContent) assert isinstance(content.text, str) assert "Test error" in content.text - assert result.isError is True + assert result.is_error is True @pytest.mark.anyio async def test_tool_return_value_conversion(self): mcp = FastMCP() mcp.add_tool(tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("tool_fn", {"x": 1, "y": 2}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, TextContent) assert content.text == "3" # Check structured content - int return type should have structured output - assert result.structuredContent is not None - assert result.structuredContent == {"result": 3} + assert result.structured_content is not None + assert result.structured_content == {"result": 3} @pytest.mark.anyio async def test_tool_image_helper(self, tmp_path: Path): @@ -338,18 +302,18 @@ async def test_tool_image_helper(self, tmp_path: Path): mcp = FastMCP() mcp.add_tool(image_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("image_tool_fn", {"path": str(image_path)}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, ImageContent) assert content.type == "image" - assert content.mimeType == "image/png" + assert content.mime_type == "image/png" # Verify base64 encoding decoded = base64.b64decode(content.data) assert decoded == b"fake png data" # Check structured content - Image return type should NOT have structured output - assert result.structuredContent is None + assert result.structured_content is None @pytest.mark.anyio async def test_tool_audio_helper(self, tmp_path: Path): @@ -359,18 +323,18 @@ async def test_tool_audio_helper(self, tmp_path: Path): mcp = FastMCP() mcp.add_tool(audio_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("audio_tool_fn", {"path": str(audio_path)}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, AudioContent) assert content.type == "audio" - assert content.mimeType == "audio/wav" + assert content.mime_type == "audio/wav" # Verify base64 encoding decoded = base64.b64decode(content.data) assert decoded == b"fake wav data" # Check structured content - Image return type should NOT have structured output - assert result.structuredContent is None + assert result.structured_content is None @pytest.mark.parametrize( "filename,expected_mime_type", @@ -394,13 +358,13 @@ async def test_tool_audio_suffix_detection(self, tmp_path: Path, filename: str, audio_path = tmp_path / filename audio_path.write_bytes(b"fake audio data") - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("audio_tool_fn", {"path": str(audio_path)}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, AudioContent) assert content.type == "audio" - assert content.mimeType == expected_mime_type + assert content.mime_type == expected_mime_type # Verify base64 encoding decoded = base64.b64decode(content.data) assert decoded == b"fake audio data" @@ -409,21 +373,21 @@ async def test_tool_audio_suffix_detection(self, tmp_path: Path, filename: str, async def test_tool_mixed_content(self): mcp = FastMCP() mcp.add_tool(mixed_content_tool_fn) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("mixed_content_tool_fn", {}) assert len(result.content) == 3 content1, content2, content3 = result.content assert isinstance(content1, TextContent) assert content1.text == "Hello" assert isinstance(content2, ImageContent) - assert content2.mimeType == "image/png" + assert content2.mime_type == "image/png" assert content2.data == "abc" assert isinstance(content3, AudioContent) - assert content3.mimeType == "audio/wav" + assert content3.mime_type == "audio/wav" assert content3.data == "def" - assert result.structuredContent is not None - assert "result" in result.structuredContent - structured_result = result.structuredContent["result"] + assert result.structured_content is not None + assert "result" in result.structured_content + structured_result = result.structured_content["result"] assert len(structured_result) == 3 expected_content = [ @@ -461,7 +425,7 @@ def mixed_list_fn() -> list: # type: ignore mcp = FastMCP() mcp.add_tool(mixed_list_fn) # type: ignore - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("mixed_list_fn", {}) assert len(result.content) == 5 # Check text conversion @@ -471,12 +435,12 @@ def mixed_list_fn() -> list: # type: ignore # Check image conversion content2 = result.content[1] assert isinstance(content2, ImageContent) - assert content2.mimeType == "image/png" + assert content2.mime_type == "image/png" assert base64.b64decode(content2.data) == b"test image data" # Check audio conversion content3 = result.content[2] assert isinstance(content3, AudioContent) - assert content3.mimeType == "audio/wav" + assert content3.mime_type == "audio/wav" assert base64.b64decode(content3.data) == b"test audio data" # Check dict conversion content4 = result.content[3] @@ -487,7 +451,7 @@ def mixed_list_fn() -> list: # type: ignore assert isinstance(content5, TextContent) assert content5.text == "direct content" # Check structured content - untyped list with Image objects should NOT have structured output - assert result.structuredContent is None + assert result.structured_content is None @pytest.mark.anyio async def test_tool_structured_output_basemodel(self): @@ -505,20 +469,20 @@ def get_user(user_id: int) -> UserOutput: mcp = FastMCP() mcp.add_tool(get_user) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Check that the tool has outputSchema tools = await client.list_tools() tool = next(t for t in tools.tools if t.name == "get_user") - assert tool.outputSchema is not None - assert tool.outputSchema["type"] == "object" - assert "name" in tool.outputSchema["properties"] - assert "age" in tool.outputSchema["properties"] + assert tool.output_schema is not None + assert tool.output_schema["type"] == "object" + assert "name" in tool.output_schema["properties"] + assert "age" in tool.output_schema["properties"] # Call the tool and check structured output result = await client.call_tool("get_user", {"user_id": 123}) - assert result.isError is False - assert result.structuredContent is not None - assert result.structuredContent == {"name": "John Doe", "age": 30, "active": True} + assert result.is_error is False + assert result.structured_content is not None + assert result.structured_content == {"name": "John Doe", "age": 30, "active": True} # Content should be JSON serialized version assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) @@ -535,21 +499,21 @@ def calculate_sum(a: int, b: int) -> int: mcp = FastMCP() mcp.add_tool(calculate_sum) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Check that the tool has outputSchema tools = await client.list_tools() tool = next(t for t in tools.tools if t.name == "calculate_sum") - assert tool.outputSchema is not None + assert tool.output_schema is not None # Primitive types are wrapped - assert tool.outputSchema["type"] == "object" - assert "result" in tool.outputSchema["properties"] - assert tool.outputSchema["properties"]["result"]["type"] == "integer" + assert tool.output_schema["type"] == "object" + assert "result" in tool.output_schema["properties"] + assert tool.output_schema["properties"]["result"]["type"] == "integer" # Call the tool result = await client.call_tool("calculate_sum", {"a": 5, "b": 7}) - assert result.isError is False - assert result.structuredContent is not None - assert result.structuredContent == {"result": 12} + assert result.is_error is False + assert result.structured_content is not None + assert result.structured_content == {"result": 12} @pytest.mark.anyio async def test_tool_structured_output_list(self): @@ -562,11 +526,11 @@ def get_numbers() -> list[int]: mcp = FastMCP() mcp.add_tool(get_numbers) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("get_numbers", {}) - assert result.isError is False - assert result.structuredContent is not None - assert result.structuredContent == {"result": [1, 2, 3, 4, 5]} + assert result.is_error is False + assert result.structured_content is not None + assert result.structured_content == {"result": [1, 2, 3, 4, 5]} @pytest.mark.anyio async def test_tool_structured_output_server_side_validation_error(self): @@ -578,10 +542,10 @@ def get_numbers() -> list[int]: mcp = FastMCP() mcp.add_tool(get_numbers) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("get_numbers", {}) - assert result.isError is True - assert result.structuredContent is None + assert result.is_error is True + assert result.structured_content is None assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) @@ -602,21 +566,22 @@ def get_metadata() -> dict[str, Any]: mcp = FastMCP() mcp.add_tool(get_metadata) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Check schema tools = await client.list_tools() tool = next(t for t in tools.tools if t.name == "get_metadata") - assert tool.outputSchema is not None - assert tool.outputSchema["type"] == "object" + assert tool.output_schema is not None + assert tool.output_schema["type"] == "object" # dict[str, Any] should have minimal schema assert ( - "additionalProperties" not in tool.outputSchema or tool.outputSchema.get("additionalProperties") is True + "additionalProperties" not in tool.output_schema + or tool.output_schema.get("additionalProperties") is True ) # Call tool result = await client.call_tool("get_metadata", {}) - assert result.isError is False - assert result.structuredContent is not None + assert result.is_error is False + assert result.structured_content is not None expected = { "version": "1.0.0", "enabled": True, @@ -624,7 +589,7 @@ def get_metadata() -> dict[str, Any]: "tags": ["production", "stable"], "config": {"nested": {"value": 123}}, } - assert result.structuredContent == expected + assert result.structured_content == expected @pytest.mark.anyio async def test_tool_structured_output_dict_str_typed(self): @@ -637,18 +602,18 @@ def get_settings() -> dict[str, str]: mcp = FastMCP() mcp.add_tool(get_settings) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Check schema tools = await client.list_tools() tool = next(t for t in tools.tools if t.name == "get_settings") - assert tool.outputSchema is not None - assert tool.outputSchema["type"] == "object" - assert tool.outputSchema["additionalProperties"]["type"] == "string" + assert tool.output_schema is not None + assert tool.output_schema["type"] == "object" + assert tool.output_schema["additionalProperties"]["type"] == "string" # Call tool result = await client.call_tool("get_settings", {}) - assert result.isError is False - assert result.structuredContent == {"theme": "dark", "language": "en", "timezone": "UTC"} + assert result.is_error is False + assert result.structured_content == {"theme": "dark", "language": "en", "timezone": "UTC"} @pytest.mark.anyio async def test_remove_tool(self): @@ -668,8 +633,6 @@ async def test_remove_tool(self): @pytest.mark.anyio async def test_remove_nonexistent_tool(self): """Test that removing a non-existent tool raises ToolError.""" - from mcp.server.fastmcp.exceptions import ToolError - mcp = FastMCP() with pytest.raises(ToolError, match="Unknown tool: nonexistent"): @@ -683,7 +646,7 @@ async def test_remove_tool_and_list(self): mcp.add_tool(error_tool_fn) # Verify both tools exist - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: tools = await client.list_tools() assert len(tools.tools) == 2 tool_names = [t.name for t in tools.tools] @@ -694,7 +657,7 @@ async def test_remove_tool_and_list(self): mcp.remove_tool("tool_fn") # Verify only one tool remains - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: tools = await client.list_tools() assert len(tools.tools) == 1 assert tools.tools[0].name == "error_tool_fn" @@ -706,9 +669,9 @@ async def test_remove_tool_and_call(self): mcp.add_tool(tool_fn) # Verify tool works before removal - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("tool_fn", {"x": 1, "y": 2}) - assert not result.isError + assert not result.is_error content = result.content[0] assert isinstance(content, TextContent) assert content.text == "3" @@ -717,9 +680,9 @@ async def test_remove_tool_and_call(self): mcp.remove_tool("tool_fn") # Verify calling removed tool returns an error - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("tool_fn", {"x": 1, "y": 2}) - assert result.isError + assert result.is_error content = result.content[0] assert isinstance(content, TextContent) assert "Unknown tool" in content.text @@ -733,11 +696,15 @@ async def test_text_resource(self): def get_text(): return "Hello, world!" - resource = FunctionResource(uri=AnyUrl("resource://test"), name="test", fn=get_text) + resource = FunctionResource(uri="resource://test", name="test", fn=get_text) mcp.add_resource(resource) - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://test")) + async with Client(mcp) as client: + result = await client.read_resource("resource://test") + + async with Client(mcp) as client: + result = await client.read_resource("resource://test") + assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "Hello, world!" @@ -749,15 +716,19 @@ def get_binary(): return b"Binary data" resource = FunctionResource( - uri=AnyUrl("resource://binary"), + uri="resource://binary", name="binary", fn=get_binary, mime_type="application/octet-stream", ) mcp.add_resource(resource) - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://binary")) + async with Client(mcp) as client: + result = await client.read_resource("resource://binary") + + async with Client(mcp) as client: + result = await client.read_resource("resource://binary") + assert isinstance(result.contents[0], BlobResourceContents) assert result.contents[0].blob == base64.b64encode(b"Binary data").decode() @@ -769,11 +740,15 @@ async def test_file_resource_text(self, tmp_path: Path): text_file = tmp_path / "test.txt" text_file.write_text("Hello from file!") - resource = FileResource(uri=AnyUrl("file://test.txt"), name="test.txt", path=text_file) + resource = FileResource(uri="file://test.txt", name="test.txt", path=text_file) mcp.add_resource(resource) - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("file://test.txt")) + async with Client(mcp) as client: + result = await client.read_resource("file://test.txt") + + async with Client(mcp) as client: + result = await client.read_resource("file://test.txt") + assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "Hello from file!" @@ -786,15 +761,19 @@ async def test_file_resource_binary(self, tmp_path: Path): binary_file.write_bytes(b"Binary file data") resource = FileResource( - uri=AnyUrl("file://test.bin"), + uri="file://test.bin", name="test.bin", path=binary_file, mime_type="application/octet-stream", ) mcp.add_resource(resource) - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("file://test.bin")) + async with Client(mcp) as client: + result = await client.read_resource("file://test.bin") + + async with Client(mcp) as client: + result = await client.read_resource("file://test.bin") + assert isinstance(result.contents[0], BlobResourceContents) assert result.contents[0].blob == base64.b64encode(b"Binary file data").decode() @@ -807,14 +786,14 @@ def get_data() -> str: # pragma: no cover """get_data returns a string""" return "Hello, world!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: resources = await client.list_resources() assert len(resources.resources) == 1 resource = resources.resources[0] assert resource.description == "get_data returns a string" - assert resource.uri == AnyUrl("function://test") + assert resource.uri == "function://test" assert resource.name == "test_get_data" - assert resource.mimeType == "text/plain" + assert resource.mime_type == "text/plain" class TestServerResourceTemplates: @@ -859,8 +838,12 @@ async def test_resource_matching_params(self): def get_data(name: str) -> str: return f"Data for {name}" - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://test/data")) + async with Client(mcp) as client: + result = await client.read_resource("resource://test/data") + + async with Client(mcp) as client: + result = await client.read_resource("resource://test/data") + assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "Data for test" @@ -884,8 +867,12 @@ async def test_resource_multiple_params(self): def get_data(org: str, repo: str) -> str: return f"Data for {org}/{repo}" - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://cursor/fastmcp/data")) + async with Client(mcp) as client: + result = await client.read_resource("resource://cursor/fastmcp/data") + + async with Client(mcp) as client: + result = await client.read_resource("resource://cursor/fastmcp/data") + assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "Data for cursor/fastmcp" @@ -907,8 +894,12 @@ def get_data_mismatched(org: str, repo_2: str) -> str: # pragma: no cover def get_static_data() -> str: return "Static data" - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://static")) + async with Client(mcp) as client: + result = await client.read_resource("resource://static") + + async with Client(mcp) as client: + result = await client.read_resource("resource://static") + assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "Static data" @@ -944,15 +935,90 @@ def get_csv(user: str) -> str: assert len(templates) == 1 template = templates[0] - assert hasattr(template, "mimeType") - assert template.mimeType == "text/csv" + assert hasattr(template, "mime_type") + assert template.mime_type == "text/csv" + + async with Client(mcp) as client: + result = await client.read_resource("resource://bob/csv") + + async with Client(mcp) as client: + result = await client.read_resource("resource://bob/csv") - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://bob/csv")) assert isinstance(result.contents[0], TextResourceContents) assert result.contents[0].text == "csv for bob" +class TestServerResourceMetadata: + """Test FastMCP @resource decorator meta parameter for list operations. + + Meta flows: @resource decorator -> resource/template storage -> list_resources/list_resource_templates. + Note: read_resource does NOT pass meta to protocol response (lowlevel/server.py only extracts content/mime_type). + """ + + @pytest.mark.anyio + async def test_resource_decorator_with_metadata(self): + """Test that @resource decorator accepts and passes meta parameter.""" + # Tests static resource flow: decorator -> FunctionResource -> list_resources (server.py:544,635,361) + mcp = FastMCP() + + metadata = {"ui": {"component": "file-viewer"}, "priority": "high"} + + @mcp.resource("resource://config", meta=metadata) + def get_config() -> str: # pragma: no cover + return '{"debug": false}' + + resources = await mcp.list_resources() + assert len(resources) == 1 + assert resources[0].meta is not None + assert resources[0].meta == metadata + assert resources[0].meta["ui"]["component"] == "file-viewer" + assert resources[0].meta["priority"] == "high" + + @pytest.mark.anyio + async def test_resource_template_decorator_with_metadata(self): + """Test that @resource decorator passes meta to templates.""" + # Tests template resource flow: decorator -> add_template() -> list_resource_templates (server.py:544,622,377) + mcp = FastMCP() + + metadata = {"api_version": "v2", "deprecated": False} + + @mcp.resource("resource://{city}/weather", meta=metadata) + def get_weather(city: str) -> str: # pragma: no cover + return f"Weather for {city}" + + templates = await mcp.list_resource_templates() + assert len(templates) == 1 + assert templates[0].meta is not None + assert templates[0].meta == metadata + assert templates[0].meta["api_version"] == "v2" + + @pytest.mark.anyio + async def test_read_resource_returns_meta(self): + """Test that read_resource includes meta in response.""" + # Tests end-to-end: Resource.meta -> ReadResourceContents.meta -> protocol _meta (lowlevel/server.py:341,371) + mcp = FastMCP() + + metadata = {"version": "1.0", "category": "config"} + + @mcp.resource("resource://data", meta=metadata) + def get_data() -> str: + return "test data" + + async with Client(mcp) as client: + result = await client.read_resource("resource://data") + + async with Client(mcp) as client: + result = await client.read_resource("resource://data") + + # Verify content and metadata in protocol response + assert isinstance(result.contents[0], TextResourceContents) + assert result.contents[0].text == "test data" + assert result.contents[0].meta is not None + assert result.contents[0].meta == metadata + assert result.contents[0].meta["version"] == "1.0" + assert result.contents[0].meta["category"] == "config" + + class TestContextInjection: """Test context injection in tools, resources, and prompts.""" @@ -977,7 +1043,7 @@ def tool_with_context(x: int, ctx: Context[ServerSession, None]) -> str: return f"Request {ctx.request_id}: {x}" mcp.add_tool(tool_with_context) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("tool_with_context", {"x": 42}) assert len(result.content) == 1 content = result.content[0] @@ -995,7 +1061,7 @@ async def async_tool(x: int, ctx: Context[ServerSession, None]) -> str: return f"Async request {ctx.request_id}: {x}" mcp.add_tool(async_tool) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("async_tool", {"x": 42}) assert len(result.content) == 1 content = result.content[0] @@ -1018,7 +1084,7 @@ async def logging_tool(msg: str, ctx: Context[ServerSession, None]) -> str: mcp.add_tool(logging_tool) with patch("mcp.server.session.ServerSession.send_log_message") as mock_log: - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("logging_tool", {"msg": "test"}) assert len(result.content) == 1 content = result.content[0] @@ -1060,7 +1126,7 @@ def no_context(x: int) -> int: return x * 2 mcp.add_tool(no_context) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("no_context", {"x": 21}) assert len(result.content) == 1 content = result.content[0] @@ -1084,7 +1150,7 @@ async def tool_with_resource(ctx: Context[ServerSession, None]) -> str: r = r_list[0] return f"Read resource: {r.content} with mime type {r.mime_type}" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("tool_with_resource", {}) assert len(result.content) == 1 content = result.content[0] @@ -1110,8 +1176,13 @@ def resource_with_context(name: str, ctx: Context[ServerSession, None]) -> str: assert template.context_kwarg == "ctx" # Test via client - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://context/test")) + + async with Client(mcp) as client: + result = await client.read_resource("resource://context/test") + + async with Client(mcp) as client: + result = await client.read_resource("resource://context/test") + assert len(result.contents) == 1 content = result.contents[0] assert isinstance(content, TextResourceContents) @@ -1135,8 +1206,13 @@ def resource_no_context(name: str) -> str: assert template.context_kwarg is None # Test via client - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://nocontext/test")) + + async with Client(mcp) as client: + result = await client.read_resource("resource://nocontext/test") + + async with Client(mcp) as client: + result = await client.read_resource("resource://nocontext/test") + assert len(result.contents) == 1 content = result.contents[0] assert isinstance(content, TextResourceContents) @@ -1160,8 +1236,13 @@ def resource_custom_ctx(id: str, my_ctx: Context[ServerSession, None]) -> str: assert template.context_kwarg == "my_ctx" # Test via client - async with client_session(mcp._mcp_server) as client: - result = await client.read_resource(AnyUrl("resource://custom/123")) + + async with Client(mcp) as client: + result = await client.read_resource("resource://custom/123") + + async with Client(mcp) as client: + result = await client.read_resource("resource://custom/123") + assert len(result.contents) == 1 content = result.contents[0] assert isinstance(content, TextResourceContents) @@ -1183,7 +1264,7 @@ def prompt_with_context(text: str, ctx: Context[ServerSession, None]) -> str: assert len(prompts) == 1 # Test via client - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: # Try calling without passing ctx explicitly result = await client.get_prompt("prompt_with_ctx", {"text": "test"}) # If this succeeds, check if context was injected @@ -1203,7 +1284,7 @@ def prompt_no_context(text: str) -> str: return f"Prompt '{text}' works" # Test via client - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("prompt_no_ctx", {"text": "test"}) assert len(result.messages) == 1 message = result.messages[0] @@ -1282,7 +1363,7 @@ async def test_list_prompts(self): def fn(name: str, optional: str = "default") -> str: # pragma: no cover return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.list_prompts() assert result.prompts is not None assert len(result.prompts) == 1 @@ -1304,7 +1385,7 @@ async def test_get_prompt(self): def fn(name: str) -> str: return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("fn", {"name": "World"}) assert len(result.messages) == 1 message = result.messages[0] @@ -1322,7 +1403,7 @@ async def test_get_prompt_with_description(self): def fn(name: str) -> str: return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("fn", {"name": "World"}) assert result.description == "Test prompt description" @@ -1335,7 +1416,7 @@ async def test_get_prompt_without_description(self): def fn(name: str) -> str: return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("fn", {"name": "World"}) assert result.description == "" @@ -1349,7 +1430,7 @@ def fn(name: str) -> str: """This is the function docstring.""" return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("fn", {"name": "World"}) assert result.description == "This is the function docstring." @@ -1364,14 +1445,14 @@ def fn() -> Message: content=EmbeddedResource( type="resource", resource=TextResourceContents( - uri=AnyUrl("file://file.txt"), + uri="file://file.txt", text="File contents", - mimeType="text/plain", + mime_type="text/plain", ), ) ) - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.get_prompt("fn") assert len(result.messages) == 1 message = result.messages[0] @@ -1381,13 +1462,13 @@ def fn() -> Message: resource = content.resource assert isinstance(resource, TextResourceContents) assert resource.text == "File contents" - assert resource.mimeType == "text/plain" + assert resource.mime_type == "text/plain" @pytest.mark.anyio async def test_get_unknown_prompt(self): """Test error when getting unknown prompt.""" mcp = FastMCP() - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: with pytest.raises(McpError, match="Unknown prompt"): await client.get_prompt("unknown") @@ -1400,7 +1481,7 @@ async def test_get_prompt_missing_args(self): def prompt_fn(name: str) -> str: # pragma: no cover return f"Hello, {name}!" - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: with pytest.raises(McpError, match="Missing required arguments"): await client.get_prompt("prompt_fn") @@ -1408,14 +1489,11 @@ def prompt_fn(name: str) -> str: # pragma: no cover def test_streamable_http_no_redirect() -> None: """Test that streamable HTTP routes are correctly configured.""" mcp = FastMCP() + # streamable_http_path defaults to "/mcp" app = mcp.streamable_http_app() # Find routes by type - streamable_http_app creates Route objects, not Mount objects - streamable_routes = [ - r - for r in app.routes - if isinstance(r, Route) and hasattr(r, "path") and r.path == mcp.settings.streamable_http_path - ] + streamable_routes = [r for r in app.routes if isinstance(r, Route) and hasattr(r, "path") and r.path == "/mcp"] # Verify routes exist assert len(streamable_routes) == 1, "Should have one streamable route" diff --git a/tests/server/fastmcp/test_title.py b/tests/server/fastmcp/test_title.py index 7cac570123..2cb1173b3e 100644 --- a/tests/server/fastmcp/test_title.py +++ b/tests/server/fastmcp/test_title.py @@ -1,15 +1,38 @@ """Integration tests for title field functionality.""" import pytest -from pydantic import AnyUrl +from mcp import Client from mcp.server.fastmcp import FastMCP from mcp.server.fastmcp.resources import FunctionResource -from mcp.shared.memory import create_connected_server_and_client_session from mcp.shared.metadata_utils import get_display_name from mcp.types import Prompt, Resource, ResourceTemplate, Tool, ToolAnnotations +@pytest.mark.anyio +async def test_server_name_title_description_version(): + """Test that server title and description are set and retrievable correctly.""" + mcp = FastMCP( + name="TestServer", + title="Test Server Title", + description="This is a test server description.", + version="1.0", + ) + + assert mcp.title == "Test Server Title" + assert mcp.description == "This is a test server description." + assert mcp.version == "1.0" + + # Start server and connect client + async with Client(mcp) as client: + # Access initialization result from session + init_result = await client.session.initialize() + assert init_result.server_info.name == "TestServer" + assert init_result.server_info.title == "Test Server Title" + assert init_result.server_info.description == "This is a test server description." + assert init_result.server_info.version == "1.0" + + @pytest.mark.anyio async def test_tool_title_precedence(): """Test that tool title precedence works correctly: title > annotations.title > name.""" @@ -38,9 +61,7 @@ def tool_with_both(message: str) -> str: # pragma: no cover return message # Start server and connect client - async with create_connected_server_and_client_session(mcp._mcp_server) as client: - await client.initialize() - + async with Client(mcp) as client: # List tools tools_result = await client.list_tools() tools = {tool.name: tool for tool in tools_result.tools} @@ -82,9 +103,7 @@ def titled_prompt(topic: str) -> str: # pragma: no cover return f"Tell me about {topic}" # Start server and connect client - async with create_connected_server_and_client_session(mcp._mcp_server) as client: - await client.initialize() - + async with Client(mcp) as client: # List prompts prompts_result = await client.list_prompts() prompts = {prompt.name: prompt for prompt in prompts_result.prompts} @@ -111,7 +130,7 @@ def get_basic_data() -> str: # pragma: no cover return "Basic data" basic_resource = FunctionResource( - uri=AnyUrl("resource://basic"), + uri="resource://basic", name="basic_resource", description="Basic resource", fn=get_basic_data, @@ -123,7 +142,7 @@ def get_titled_data() -> str: # pragma: no cover return "Titled data" titled_resource = FunctionResource( - uri=AnyUrl("resource://titled"), + uri="resource://titled", name="titled_resource", title="User-Friendly Resource", description="Resource with title", @@ -142,9 +161,7 @@ def titled_dynamic_resource(id: str) -> str: # pragma: no cover return f"Data for {id}" # Start server and connect client - async with create_connected_server_and_client_session(mcp._mcp_server) as client: - await client.initialize() - + async with Client(mcp) as client: # List resources resources_result = await client.list_resources() resources = {str(res.uri): res for res in resources_result.resources} @@ -162,7 +179,7 @@ def titled_dynamic_resource(id: str) -> str: # pragma: no cover # List resource templates templates_result = await client.list_resource_templates() - templates = {tpl.uriTemplate: tpl for tpl in templates_result.resourceTemplates} + templates = {tpl.uri_template: tpl for tpl in templates_result.resource_templates} # Verify dynamic resource template assert "resource://dynamic/{id}" in templates @@ -181,25 +198,25 @@ async def test_get_display_name_utility(): """Test the get_display_name utility function.""" # Test tool precedence: title > annotations.title > name - tool_name_only = Tool(name="test_tool", inputSchema={}) + tool_name_only = Tool(name="test_tool", input_schema={}) assert get_display_name(tool_name_only) == "test_tool" - tool_with_title = Tool(name="test_tool", title="Test Tool", inputSchema={}) + tool_with_title = Tool(name="test_tool", title="Test Tool", input_schema={}) assert get_display_name(tool_with_title) == "Test Tool" - tool_with_annotations = Tool(name="test_tool", inputSchema={}, annotations=ToolAnnotations(title="Annotated Tool")) + tool_with_annotations = Tool(name="test_tool", input_schema={}, annotations=ToolAnnotations(title="Annotated Tool")) assert get_display_name(tool_with_annotations) == "Annotated Tool" tool_with_both = Tool( - name="test_tool", title="Primary Title", inputSchema={}, annotations=ToolAnnotations(title="Secondary Title") + name="test_tool", title="Primary Title", input_schema={}, annotations=ToolAnnotations(title="Secondary Title") ) assert get_display_name(tool_with_both) == "Primary Title" # Test other types: title > name - resource = Resource(uri=AnyUrl("file://test"), name="test_res") + resource = Resource(uri="file://test", name="test_res") assert get_display_name(resource) == "test_res" - resource_with_title = Resource(uri=AnyUrl("file://test"), name="test_res", title="Test Resource") + resource_with_title = Resource(uri="file://test", name="test_res", title="Test Resource") assert get_display_name(resource_with_title) == "Test Resource" prompt = Prompt(name="test_prompt") @@ -208,8 +225,8 @@ async def test_get_display_name_utility(): prompt_with_title = Prompt(name="test_prompt", title="Test Prompt") assert get_display_name(prompt_with_title) == "Test Prompt" - template = ResourceTemplate(uriTemplate="file://{id}", name="test_template") + template = ResourceTemplate(uri_template="file://{id}", name="test_template") assert get_display_name(template) == "test_template" - template_with_title = ResourceTemplate(uriTemplate="file://{id}", name="test_template", title="Test Template") + template_with_title = ResourceTemplate(uri_template="file://{id}", name="test_template", title="Test Template") assert get_display_name(template_with_title) == "Test Template" diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index d83d484744..b09ae7de15 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -426,8 +426,8 @@ def read_data(path: str) -> str: # pragma: no cover annotations = ToolAnnotations( title="File Reader", - readOnlyHint=True, - openWorldHint=False, + read_only_hint=True, + open_world_hint=False, ) manager = ToolManager() @@ -435,8 +435,8 @@ def read_data(path: str) -> str: # pragma: no cover assert tool.annotations is not None assert tool.annotations.title == "File Reader" - assert tool.annotations.readOnlyHint is True - assert tool.annotations.openWorldHint is False + assert tool.annotations.read_only_hint is True + assert tool.annotations.open_world_hint is False @pytest.mark.anyio async def test_tool_annotations_in_fastmcp(self): @@ -444,7 +444,7 @@ async def test_tool_annotations_in_fastmcp(self): app = FastMCP() - @app.tool(annotations=ToolAnnotations(title="Echo Tool", readOnlyHint=True)) + @app.tool(annotations=ToolAnnotations(title="Echo Tool", read_only_hint=True)) def echo(message: str) -> str: # pragma: no cover """Echo a message back.""" return message @@ -453,7 +453,7 @@ def echo(message: str) -> str: # pragma: no cover assert len(tools) == 1 assert tools[0].annotations is not None assert tools[0].annotations.title == "Echo Tool" - assert tools[0].annotations.readOnlyHint is True + assert tools[0].annotations.read_only_hint is True class TestStructuredOutput: @@ -794,7 +794,7 @@ async def test_metadata_with_annotations(self): app = FastMCP() metadata = {"custom": "value"} - annotations = ToolAnnotations(title="Combined Tool", readOnlyHint=True) + annotations = ToolAnnotations(title="Combined Tool", read_only_hint=True) @app.tool(meta=metadata, annotations=annotations) def combined_tool(data: str) -> str: # pragma: no cover @@ -806,7 +806,7 @@ def combined_tool(data: str) -> str: # pragma: no cover assert tools[0].meta == metadata assert tools[0].annotations is not None assert tools[0].annotations.title == "Combined Tool" - assert tools[0].annotations.readOnlyHint is True + assert tools[0].annotations.read_only_hint is True class TestRemoveTools: diff --git a/tests/server/fastmcp/test_url_elicitation.py b/tests/server/fastmcp/test_url_elicitation.py index a4d3b2e643..cade2aa564 100644 --- a/tests/server/fastmcp/test_url_elicitation.py +++ b/tests/server/fastmcp/test_url_elicitation.py @@ -2,14 +2,14 @@ import anyio import pytest +from pydantic import BaseModel, Field -from mcp import types +from mcp import Client, types from mcp.client.session import ClientSession -from mcp.server.elicitation import CancelledElicitation, DeclinedElicitation +from mcp.server.elicitation import CancelledElicitation, DeclinedElicitation, elicit_url from mcp.server.fastmcp import Context, FastMCP from mcp.server.session import ServerSession from mcp.shared.context import RequestContext -from mcp.shared.memory import create_connected_server_and_client_session from mcp.types import ElicitRequestParams, ElicitResult, TextContent @@ -32,16 +32,12 @@ async def request_api_key(ctx: Context[ServerSession, None]) -> str: async def elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): assert params.mode == "url" assert params.url == "https://example.com/api_key_setup" - assert params.elicitationId == "test-elicitation-001" + assert params.elicitation_id == "test-elicitation-001" assert params.message == "Please provide your API key to continue." return ElicitResult(action="accept") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("request_api_key", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("request_api_key", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "User accept" @@ -66,12 +62,8 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par assert params.mode == "url" return ElicitResult(action="decline") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("oauth_flow", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("oauth_flow", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "User decline authorization" @@ -96,12 +88,8 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par assert params.mode == "url" return ElicitResult(action="cancel") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("payment_flow", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("payment_flow", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "User cancel payment" @@ -110,8 +98,6 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par @pytest.mark.anyio async def test_url_elicitation_helper_function(): """Test the elicit_url helper function.""" - from mcp.server.elicitation import elicit_url - mcp = FastMCP(name="URLElicitationHelperServer") @mcp.tool(description="Tool using elicit_url helper") @@ -128,12 +114,8 @@ async def setup_credentials(ctx: Context[ServerSession, None]) -> str: async def elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): return ElicitResult(action="accept") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("setup_credentials", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("setup_credentials", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "AcceptedUrlElicitation" @@ -160,18 +142,14 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par # Verify that this is URL mode assert params.mode == "url" assert isinstance(params, types.ElicitRequestURLParams) - # URL params have url and elicitationId, not requestedSchema + # URL params have url and elicitation_id, not requested_schema assert params.url == "https://example.com/test" - assert params.elicitationId == "test-001" + assert params.elicitation_id == "test-001" # Return without content - this is correct for URL mode return ElicitResult(action="accept") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("check_url_response", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("check_url_response", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert "Content: None" in result.content[0].text @@ -180,8 +158,6 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par @pytest.mark.anyio async def test_form_mode_still_works(): """Ensure form mode elicitation still works after SEP 1036.""" - from pydantic import BaseModel, Field - mcp = FastMCP(name="FormModeBackwardCompatServer") class NameSchema(BaseModel): @@ -199,16 +175,12 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par # Verify form mode parameters assert params.mode == "form" assert isinstance(params, types.ElicitRequestFormParams) - # Form params have requestedSchema, not url/elicitationId - assert params.requestedSchema is not None + # Form params have requested_schema, not url/elicitation_id + assert params.requested_schema is not None return ElicitResult(action="accept", content={"name": "Alice"}) - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("ask_name", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("ask_name", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Hello, Alice!" @@ -238,12 +210,8 @@ async def trigger_elicitation(ctx: Context[ServerSession, None]) -> str: async def elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): return ElicitResult(action="accept") # pragma: no cover - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("trigger_elicitation", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("trigger_elicitation", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Elicitation completed" @@ -267,8 +235,6 @@ async def test_url_elicitation_required_error_code(): @pytest.mark.anyio async def test_elicit_url_typed_results(): """Test that elicit_url returns properly typed result objects.""" - from mcp.server.elicitation import elicit_url - mcp = FastMCP(name="TypedResultsServer") @mcp.tool(description="Test declined result") @@ -301,12 +267,8 @@ async def test_cancel(ctx: Context[ServerSession, None]) -> str: async def decline_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): return ElicitResult(action="decline") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=decline_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("test_decline", {}) + async with Client(mcp, elicitation_callback=decline_callback) as client: + result = await client.call_tool("test_decline", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Declined" @@ -315,12 +277,8 @@ async def decline_callback(context: RequestContext[ClientSession, None], params: async def cancel_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): return ElicitResult(action="cancel") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=cancel_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("test_cancel", {}) + async with Client(mcp, elicitation_callback=cancel_callback) as client: + result = await client.call_tool("test_cancel", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Cancelled" @@ -329,8 +287,6 @@ async def cancel_callback(context: RequestContext[ClientSession, None], params: @pytest.mark.anyio async def test_deprecated_elicit_method(): """Test the deprecated elicit() method for backward compatibility.""" - from pydantic import BaseModel, Field - mcp = FastMCP(name="DeprecatedElicitServer") class EmailSchema(BaseModel): @@ -341,7 +297,7 @@ async def use_deprecated_elicit(ctx: Context[ServerSession, None]) -> str: # Use the deprecated elicit() method which should call elicit_form() result = await ctx.session.elicit( message="Enter your email", - requestedSchema=EmailSchema.model_json_schema(), + requested_schema=EmailSchema.model_json_schema(), ) if result.action == "accept" and result.content: @@ -351,15 +307,11 @@ async def use_deprecated_elicit(ctx: Context[ServerSession, None]) -> str: async def elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): # Verify this is form mode assert params.mode == "form" - assert params.requestedSchema is not None + assert params.requested_schema is not None return ElicitResult(action="accept", content={"email": "test@example.com"}) - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - - result = await client_session.call_tool("use_deprecated_elicit", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("use_deprecated_elicit", {}) assert len(result.content) == 1 assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Email: test@example.com" @@ -382,13 +334,10 @@ async def direct_elicit_url(ctx: Context[ServerSession, None]) -> str: async def elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): assert params.mode == "url" - assert params.elicitationId == "ctx-test-001" + assert params.elicitation_id == "ctx-test-001" return ElicitResult(action="accept") - async with create_connected_server_and_client_session( - mcp._mcp_server, elicitation_callback=elicitation_callback - ) as client_session: - await client_session.initialize() - result = await client_session.call_tool("direct_elicit_url", {}) + async with Client(mcp, elicitation_callback=elicitation_callback) as client: + result = await client.call_tool("direct_elicit_url", {}) assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Result: accept" diff --git a/tests/server/fastmcp/test_url_elicitation_error_throw.py b/tests/server/fastmcp/test_url_elicitation_error_throw.py index 2d7eda4ab4..cacc0b741c 100644 --- a/tests/server/fastmcp/test_url_elicitation_error_throw.py +++ b/tests/server/fastmcp/test_url_elicitation_error_throw.py @@ -2,11 +2,10 @@ import pytest -from mcp import types +from mcp import Client, types from mcp.server.fastmcp import Context, FastMCP from mcp.server.session import ServerSession from mcp.shared.exceptions import McpError, UrlElicitationRequiredError -from mcp.shared.memory import create_connected_server_and_client_session @pytest.mark.anyio @@ -23,17 +22,15 @@ async def connect_service(service_name: str, ctx: Context[ServerSession, None]) mode="url", message=f"Authorization required to connect to {service_name}", url=f"https://{service_name}.example.com/oauth/authorize", - elicitationId=f"{service_name}-auth-001", + elicitation_id=f"{service_name}-auth-001", ) ] ) - async with create_connected_server_and_client_session(mcp._mcp_server) as client_session: - await client_session.initialize() - + async with Client(mcp) as client: # Call the tool - it should raise McpError with URL_ELICITATION_REQUIRED code with pytest.raises(McpError) as exc_info: - await client_session.call_tool("connect_service", {"service_name": "github"}) + await client.call_tool("connect_service", {"service_name": "github"}) # Verify the error details error = exc_info.value.error @@ -63,23 +60,21 @@ async def multi_auth(ctx: Context[ServerSession, None]) -> str: mode="url", message="GitHub authorization required", url="https://github.example.com/oauth", - elicitationId="github-auth", + elicitation_id="github-auth", ), types.ElicitRequestURLParams( mode="url", message="Google Drive authorization required", url="https://drive.google.com/oauth", - elicitationId="gdrive-auth", + elicitation_id="gdrive-auth", ), ] ) - async with create_connected_server_and_client_session(mcp._mcp_server) as client_session: - await client_session.initialize() - + async with Client(mcp) as client: # Call the tool and catch the error with pytest.raises(McpError) as exc_info: - await client_session.call_tool("multi_auth", {}) + await client.call_tool("multi_auth", {}) # Reconstruct the typed error mcp_error = exc_info.value @@ -89,25 +84,23 @@ async def multi_auth(ctx: Context[ServerSession, None]) -> str: # Verify the reconstructed error has both elicitations assert len(url_error.elicitations) == 2 - assert url_error.elicitations[0].elicitationId == "github-auth" - assert url_error.elicitations[1].elicitationId == "gdrive-auth" + assert url_error.elicitations[0].elicitation_id == "github-auth" + assert url_error.elicitations[1].elicitation_id == "gdrive-auth" @pytest.mark.anyio async def test_normal_exceptions_still_return_error_result(): - """Test that normal exceptions still return CallToolResult with isError=True.""" + """Test that normal exceptions still return CallToolResult with is_error=True.""" mcp = FastMCP(name="NormalErrorServer") @mcp.tool(description="A tool that raises a normal exception") async def failing_tool(ctx: Context[ServerSession, None]) -> str: raise ValueError("Something went wrong") - async with create_connected_server_and_client_session(mcp._mcp_server) as client_session: - await client_session.initialize() - + async with Client(mcp) as client: # Normal exceptions should be returned as error results, not McpError - result = await client_session.call_tool("failing_tool", {}) - assert result.isError is True + result = await client.call_tool("failing_tool", {}) + assert result.is_error is True assert len(result.content) == 1 assert isinstance(result.content[0], types.TextContent) assert "Something went wrong" in result.content[0].text diff --git a/tests/server/lowlevel/test_helper_types.py b/tests/server/lowlevel/test_helper_types.py new file mode 100644 index 0000000000..27a8081b62 --- /dev/null +++ b/tests/server/lowlevel/test_helper_types.py @@ -0,0 +1,60 @@ +"""Test helper_types.py meta field. + +These tests verify the changes made to helper_types.py:11 where we added: + meta: dict[str, Any] | None = field(default=None) + +ReadResourceContents is the return type for resource read handlers. It's used internally +by the low-level server to package resource content before sending it over the MCP protocol. +""" + +from mcp.server.lowlevel.helper_types import ReadResourceContents + + +class TestReadResourceContentsMetadata: + """Test ReadResourceContents meta field. + + ReadResourceContents is an internal helper type used by the low-level MCP server. + When a resource is read, the server creates a ReadResourceContents instance that + contains the content, mime type, and now metadata. The low-level server then + extracts the meta field and includes it in the protocol response as _meta. + """ + + def test_read_resource_contents_with_metadata(self): + """Test that ReadResourceContents accepts meta parameter.""" + # Bridge between Resource.meta and MCP protocol _meta field (helper_types.py:11) + metadata = {"version": "1.0", "cached": True} + + contents = ReadResourceContents( + content="test content", + mime_type="text/plain", + meta=metadata, + ) + + assert contents.meta is not None + assert contents.meta == metadata + assert contents.meta["version"] == "1.0" + assert contents.meta["cached"] is True + + def test_read_resource_contents_without_metadata(self): + """Test that ReadResourceContents meta defaults to None.""" + # Ensures backward compatibility - meta defaults to None, _meta omitted from protocol (helper_types.py:11) + contents = ReadResourceContents( + content="test content", + mime_type="text/plain", + ) + + assert contents.meta is None + + def test_read_resource_contents_with_bytes(self): + """Test that ReadResourceContents works with bytes content and meta.""" + # Verifies meta works with both str and bytes content (binary resources like images, PDFs) + metadata = {"encoding": "utf-8"} + + contents = ReadResourceContents( + content=b"binary content", + mime_type="application/octet-stream", + meta=metadata, + ) + + assert contents.content == b"binary content" + assert contents.meta == metadata diff --git a/tests/server/lowlevel/test_server_listing.py b/tests/server/lowlevel/test_server_listing.py index 23ac7e4519..6bf4cddb39 100644 --- a/tests/server/lowlevel/test_server_listing.py +++ b/tests/server/lowlevel/test_server_listing.py @@ -3,7 +3,6 @@ import warnings import pytest -from pydantic import AnyUrl from mcp.server import Server from mcp.types import ( @@ -42,8 +41,8 @@ async def handle_list_prompts() -> list[Prompt]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListPromptsResult) - assert result.root.prompts == test_prompts + assert isinstance(result, ListPromptsResult) + assert result.prompts == test_prompts @pytest.mark.anyio @@ -52,8 +51,8 @@ async def test_list_resources_basic() -> None: server = Server("test") test_resources = [ - Resource(uri=AnyUrl("file:///test1.txt"), name="Test 1"), - Resource(uri=AnyUrl("file:///test2.txt"), name="Test 2"), + Resource(uri="file:///test1.txt", name="Test 1"), + Resource(uri="file:///test2.txt", name="Test 2"), ] with warnings.catch_warnings(): @@ -68,8 +67,8 @@ async def handle_list_resources() -> list[Resource]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListResourcesResult) - assert result.root.resources == test_resources + assert isinstance(result, ListResourcesResult) + assert result.resources == test_resources @pytest.mark.anyio @@ -81,7 +80,7 @@ async def test_list_tools_basic() -> None: Tool( name="tool1", description="First tool", - inputSchema={ + input_schema={ "type": "object", "properties": { "message": {"type": "string"}, @@ -92,7 +91,7 @@ async def test_list_tools_basic() -> None: Tool( name="tool2", description="Second tool", - inputSchema={ + input_schema={ "type": "object", "properties": { "count": {"type": "number"}, @@ -115,8 +114,8 @@ async def handle_list_tools() -> list[Tool]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListToolsResult) - assert result.root.tools == test_tools + assert isinstance(result, ListToolsResult) + assert result.tools == test_tools @pytest.mark.anyio @@ -136,8 +135,8 @@ async def handle_list_prompts() -> list[Prompt]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListPromptsResult) - assert result.root.prompts == [] + assert isinstance(result, ListPromptsResult) + assert result.prompts == [] @pytest.mark.anyio @@ -157,8 +156,8 @@ async def handle_list_resources() -> list[Resource]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListResourcesResult) - assert result.root.resources == [] + assert isinstance(result, ListResourcesResult) + assert result.resources == [] @pytest.mark.anyio @@ -178,5 +177,5 @@ async def handle_list_tools() -> list[Tool]: result = await handler(request) assert isinstance(result, ServerResult) - assert isinstance(result.root, ListToolsResult) - assert result.root.tools == [] + assert isinstance(result, ListToolsResult) + assert result.tools == [] diff --git a/tests/server/lowlevel/test_server_pagination.py b/tests/server/lowlevel/test_server_pagination.py index 8d64dd5253..081fb262ab 100644 --- a/tests/server/lowlevel/test_server_pagination.py +++ b/tests/server/lowlevel/test_server_pagination.py @@ -25,7 +25,7 @@ async def test_list_prompts_pagination() -> None: async def handle_list_prompts(request: ListPromptsRequest) -> ListPromptsResult: nonlocal received_request received_request = request - return ListPromptsResult(prompts=[], nextCursor="next") + return ListPromptsResult(prompts=[], next_cursor="next") handler = server.request_handlers[ListPromptsRequest] @@ -57,7 +57,7 @@ async def test_list_resources_pagination() -> None: async def handle_list_resources(request: ListResourcesRequest) -> ListResourcesResult: nonlocal received_request received_request = request - return ListResourcesResult(resources=[], nextCursor="next") + return ListResourcesResult(resources=[], next_cursor="next") handler = server.request_handlers[ListResourcesRequest] @@ -91,7 +91,7 @@ async def test_list_tools_pagination() -> None: async def handle_list_tools(request: ListToolsRequest) -> ListToolsResult: nonlocal received_request received_request = request - return ListToolsResult(tools=[], nextCursor="next") + return ListToolsResult(tools=[], next_cursor="next") handler = server.request_handlers[ListToolsRequest] diff --git a/tests/server/test_cancel_handling.py b/tests/server/test_cancel_handling.py index 47c49bb62b..98f34df465 100644 --- a/tests/server/test_cancel_handling.py +++ b/tests/server/test_cancel_handling.py @@ -6,17 +6,15 @@ import pytest import mcp.types as types +from mcp import Client from mcp.server.lowlevel.server import Server from mcp.shared.exceptions import McpError -from mcp.shared.memory import create_connected_server_and_client_session from mcp.types import ( CallToolRequest, CallToolRequestParams, CallToolResult, CancelledNotification, CancelledNotificationParams, - ClientNotification, - ClientRequest, Tool, ) @@ -38,7 +36,7 @@ async def handle_list_tools() -> list[Tool]: Tool( name="test_tool", description="Tool for testing", - inputSchema={}, + input_schema={}, ) ] @@ -54,16 +52,12 @@ async def handle_call_tool(name: str, arguments: dict[str, Any] | None) -> list[ return [types.TextContent(type="text", text=f"Call number: {call_count}")] raise ValueError(f"Unknown tool: {name}") # pragma: no cover - async with create_connected_server_and_client_session(server) as client: + async with Client(server) as client: # First request (will be cancelled) async def first_request(): try: - await client.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams(name="test_tool", arguments={}), - ) - ), + await client.session.send_request( + CallToolRequest(params=CallToolRequestParams(name="test_tool", arguments={})), CallToolResult, ) pytest.fail("First request should have been cancelled") # pragma: no cover @@ -79,26 +73,14 @@ async def first_request(): # Cancel it assert first_request_id is not None - await client.send_notification( - ClientNotification( - CancelledNotification( - params=CancelledNotificationParams( - requestId=first_request_id, - reason="Testing server recovery", - ), - ) + await client.session.send_notification( + CancelledNotification( + params=CancelledNotificationParams(request_id=first_request_id, reason="Testing server recovery"), ) ) # Second request (should work normally) - result = await client.send_request( - ClientRequest( - CallToolRequest( - params=CallToolRequestParams(name="test_tool", arguments={}), - ) - ), - CallToolResult, - ) + result = await client.call_tool("test_tool", {}) # Verify second request completed successfully assert len(result.content) == 1 diff --git a/tests/server/test_completion_with_context.py b/tests/server/test_completion_with_context.py index eb9604791a..19c591340d 100644 --- a/tests/server/test_completion_with_context.py +++ b/tests/server/test_completion_with_context.py @@ -1,13 +1,11 @@ -""" -Tests for completion handler with context functionality. -""" +"""Tests for completion handler with context functionality.""" from typing import Any import pytest +from mcp import Client from mcp.server.lowlevel import Server -from mcp.shared.memory import create_connected_server_and_client_session from mcp.types import ( Completion, CompletionArgument, @@ -36,9 +34,9 @@ async def handle_completion( received_args["context"] = context # Return test completion - return Completion(values=["test-completion"], total=1, hasMore=False) + return Completion(values=["test-completion"], total=1, has_more=False) - async with create_connected_server_and_client_session(server) as client: + async with Client(server) as client: # Test with context result = await client.complete( ref=ResourceTemplateReference(type="ref/resource", uri="test://resource/{param}"), @@ -68,9 +66,9 @@ async def handle_completion( nonlocal context_was_none context_was_none = context is None - return Completion(values=["no-context-completion"], total=1, hasMore=False) + return Completion(values=["no-context-completion"], total=1, has_more=False) - async with create_connected_server_and_client_session(server) as client: + async with Client(server) as client: # Test without context result = await client.complete( ref=PromptReference(type="ref/prompt", name="test-prompt"), argument={"name": "arg", "value": "val"} @@ -97,19 +95,19 @@ async def handle_completion( if ref.uri == "db://{database}/{table}": if argument.name == "database": # Complete database names - return Completion(values=["users_db", "products_db", "analytics_db"], total=3, hasMore=False) + return Completion(values=["users_db", "products_db", "analytics_db"], total=3, has_more=False) elif argument.name == "table": # Complete table names based on selected database if context and context.arguments: db = context.arguments.get("database") if db == "users_db": - return Completion(values=["users", "sessions", "permissions"], total=3, hasMore=False) + return Completion(values=["users", "sessions", "permissions"], total=3, has_more=False) elif db == "products_db": # pragma: no cover - return Completion(values=["products", "categories", "inventory"], total=3, hasMore=False) + return Completion(values=["products", "categories", "inventory"], total=3, has_more=False) - return Completion(values=[], total=0, hasMore=False) # pragma: no cover + return Completion(values=[], total=0, has_more=False) # pragma: no cover - async with create_connected_server_and_client_session(server) as client: + async with Client(server) as client: # First, complete database db_result = await client.complete( ref=ResourceTemplateReference(type="ref/resource", uri="db://{database}/{table}"), @@ -156,11 +154,11 @@ async def handle_completion( # Normal completion if context is provided db = context.arguments.get("database") if db == "test_db": # pragma: no cover - return Completion(values=["users", "orders", "products"], total=3, hasMore=False) + return Completion(values=["users", "orders", "products"], total=3, has_more=False) - return Completion(values=[], total=0, hasMore=False) # pragma: no cover + return Completion(values=[], total=0, has_more=False) # pragma: no cover - async with create_connected_server_and_client_session(server) as client: + async with Client(server) as client: # Try to complete table without database context - should raise error with pytest.raises(Exception) as exc_info: await client.complete( diff --git a/tests/server/test_lifespan.py b/tests/server/test_lifespan.py index 9d73fd47a0..caeb0530d5 100644 --- a/tests/server/test_lifespan.py +++ b/tests/server/test_lifespan.py @@ -76,19 +76,17 @@ async def run_server(): # Initialize the server params = InitializeRequestParams( - protocolVersion="2024-11-05", + protocol_version="2024-11-05", capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="0.1.0"), + client_info=Implementation(name="test-client", version="0.1.0"), ) await send_stream1.send( SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=TypeAdapter(InitializeRequestParams).dump_python(params), - ) + JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=TypeAdapter(InitializeRequestParams).dump_python(params), ) ) ) @@ -96,27 +94,16 @@ async def run_server(): response = response.message # Send initialized notification - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) - ) + await send_stream1.send(SessionMessage(JSONRPCNotification(jsonrpc="2.0", method="notifications/initialized"))) # Call the tool to verify lifespan context await send_stream1.send( SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=2, - method="tools/call", - params={"name": "check_lifespan", "arguments": {}}, - ) + JSONRPCRequest( + jsonrpc="2.0", + id=2, + method="tools/call", + params={"name": "check_lifespan", "arguments": {}}, ) ) ) @@ -125,8 +112,8 @@ async def run_server(): response = await receive_stream2.receive() response = response.message assert isinstance(response, JSONRPCMessage) - assert isinstance(response.root, JSONRPCResponse) - assert response.root.result["content"][0]["text"] == "true" + assert isinstance(response, JSONRPCResponse) + assert response.result["content"][0]["text"] == "true" # Cancel server task tg.cancel_scope.cancel() @@ -162,13 +149,7 @@ def check_lifespan(ctx: Context[ServerSession, None]) -> bool: return True # Run server in background task - async with ( - anyio.create_task_group() as tg, - send_stream1, - receive_stream1, - send_stream2, - receive_stream2, - ): + async with anyio.create_task_group() as tg, send_stream1, receive_stream1, send_stream2, receive_stream2: async def run_server(): await server._mcp_server.run( @@ -182,19 +163,17 @@ async def run_server(): # Initialize the server params = InitializeRequestParams( - protocolVersion="2024-11-05", + protocol_version="2024-11-05", capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="0.1.0"), + client_info=Implementation(name="test-client", version="0.1.0"), ) await send_stream1.send( SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=TypeAdapter(InitializeRequestParams).dump_python(params), - ) + JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=TypeAdapter(InitializeRequestParams).dump_python(params), ) ) ) @@ -202,27 +181,16 @@ async def run_server(): response = response.message # Send initialized notification - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) - ) + await send_stream1.send(SessionMessage(JSONRPCNotification(jsonrpc="2.0", method="notifications/initialized"))) # Call the tool to verify lifespan context await send_stream1.send( SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=2, - method="tools/call", - params={"name": "check_lifespan", "arguments": {}}, - ) + JSONRPCRequest( + jsonrpc="2.0", + id=2, + method="tools/call", + params={"name": "check_lifespan", "arguments": {}}, ) ) ) @@ -231,8 +199,8 @@ async def run_server(): response = await receive_stream2.receive() response = response.message assert isinstance(response, JSONRPCMessage) - assert isinstance(response.root, JSONRPCResponse) - assert response.root.result["content"][0]["text"] == "true" + assert isinstance(response, JSONRPCResponse) + assert response.result["content"][0]["text"] == "true" # Cancel server task tg.cancel_scope.cancel() diff --git a/tests/server/test_lowlevel_exception_handling.py b/tests/server/test_lowlevel_exception_handling.py index 5d4c3347f6..4767ea1177 100644 --- a/tests/server/test_lowlevel_exception_handling.py +++ b/tests/server/test_lowlevel_exception_handling.py @@ -60,7 +60,7 @@ async def test_normal_message_handling_not_affected(): # Create a mock RequestResponder responder = Mock(spec=RequestResponder) - responder.request = types.ClientRequest(root=types.PingRequest(method="ping")) + responder.request = types.PingRequest(method="ping") responder.__enter__ = Mock(return_value=responder) responder.__exit__ = Mock(return_value=None) diff --git a/tests/server/test_lowlevel_input_validation.py b/tests/server/test_lowlevel_input_validation.py index 47cb57232d..eb644938ff 100644 --- a/tests/server/test_lowlevel_input_validation.py +++ b/tests/server/test_lowlevel_input_validation.py @@ -70,7 +70,8 @@ async def run_server(): async with anyio.create_task_group() as tg: async def handle_messages(): - async for message in server_session.incoming_messages: + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + async for message in server_session.incoming_messages: # pragma: no cover await server._handle_message(message, server_session, {}, False) tg.start_soon(handle_messages) @@ -102,7 +103,7 @@ def create_add_tool() -> Tool: return Tool( name="add", description="Add two numbers", - inputSchema={ + input_schema={ "type": "object", "properties": { "a": {"type": "number"}, @@ -132,7 +133,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -154,7 +155,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -177,7 +178,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -192,7 +193,7 @@ async def test_cache_refresh_on_missing_tool(): Tool( name="multiply", description="Multiply two numbers", - inputSchema={ + input_schema={ "type": "object", "properties": { "x": {"type": "number"}, @@ -219,7 +220,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results - should work because cache will be refreshed assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -233,7 +234,7 @@ async def test_enum_constraint_validation(): Tool( name="greet", description="Greet someone", - inputSchema={ + input_schema={ "type": "object", "properties": { "name": {"type": "string"}, @@ -255,7 +256,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -270,7 +271,7 @@ async def test_tool_not_in_list_logs_warning(caplog: pytest.LogCaptureFixture): Tool( name="add", description="Add two numbers", - inputSchema={ + input_schema={ "type": "object", "properties": { "a": {"type": "number"}, @@ -299,7 +300,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results - should succeed because validation is skipped for unknown tools assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) diff --git a/tests/server/test_lowlevel_output_validation.py b/tests/server/test_lowlevel_output_validation.py index f735445212..3b1b7236b5 100644 --- a/tests/server/test_lowlevel_output_validation.py +++ b/tests/server/test_lowlevel_output_validation.py @@ -71,7 +71,8 @@ async def run_server(): async with anyio.create_task_group() as tg: async def handle_messages(): - async for message in server_session.incoming_messages: + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + async for message in server_session.incoming_messages: # pragma: no cover await server._handle_message(message, server_session, {}, False) tg.start_soon(handle_messages) @@ -105,7 +106,7 @@ async def test_content_only_without_output_schema(): Tool( name="echo", description="Echo a message", - inputSchema={ + input_schema={ "type": "object", "properties": { "message": {"type": "string"}, @@ -129,12 +130,12 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Echo: Hello" - assert result.structuredContent is None + assert result.structured_content is None @pytest.mark.anyio @@ -144,7 +145,7 @@ async def test_dict_only_without_output_schema(): Tool( name="get_info", description="Get structured information", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, @@ -165,13 +166,13 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) # Check that the content is the JSON serialization assert json.loads(result.content[0].text) == {"status": "ok", "data": {"value": 42}} - assert result.structuredContent == {"status": "ok", "data": {"value": 42}} + assert result.structured_content == {"status": "ok", "data": {"value": 42}} @pytest.mark.anyio @@ -181,7 +182,7 @@ async def test_both_content_and_dict_without_output_schema(): Tool( name="process", description="Process data", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, @@ -204,12 +205,12 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) assert result.content[0].text == "Processing complete" - assert result.structuredContent == {"result": "success", "count": 10} + assert result.structured_content == {"result": "success", "count": 10} @pytest.mark.anyio @@ -219,11 +220,11 @@ async def test_content_only_with_output_schema_error(): Tool( name="structured_tool", description="Tool expecting structured output", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, - outputSchema={ + output_schema={ "type": "object", "properties": { "result": {"type": "string"}, @@ -244,7 +245,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify error assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -258,7 +259,7 @@ async def test_valid_dict_with_output_schema(): Tool( name="calc", description="Calculate result", - inputSchema={ + input_schema={ "type": "object", "properties": { "x": {"type": "number"}, @@ -266,7 +267,7 @@ async def test_valid_dict_with_output_schema(): }, "required": ["x", "y"], }, - outputSchema={ + output_schema={ "type": "object", "properties": { "sum": {"type": "number"}, @@ -292,12 +293,12 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" # Check JSON serialization assert json.loads(result.content[0].text) == {"sum": 7, "product": 12} - assert result.structuredContent == {"sum": 7, "product": 12} + assert result.structured_content == {"sum": 7, "product": 12} @pytest.mark.anyio @@ -307,11 +308,11 @@ async def test_invalid_dict_with_output_schema(): Tool( name="user_info", description="Get user information", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, - outputSchema={ + output_schema={ "type": "object", "properties": { "name": {"type": "string"}, @@ -336,7 +337,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify error assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert isinstance(result.content[0], TextContent) @@ -351,14 +352,14 @@ async def test_both_content_and_valid_dict_with_output_schema(): Tool( name="analyze", description="Analyze data", - inputSchema={ + input_schema={ "type": "object", "properties": { "text": {"type": "string"}, }, "required": ["text"], }, - outputSchema={ + output_schema={ "type": "object", "properties": { "sentiment": {"type": "string", "enum": ["positive", "negative", "neutral"]}, @@ -384,11 +385,11 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert result.content[0].text == "Analysis of: Great job!" - assert result.structuredContent == {"sentiment": "positive", "confidence": 0.95} + assert result.structured_content == {"sentiment": "positive", "confidence": 0.95} @pytest.mark.anyio @@ -398,7 +399,7 @@ async def test_tool_call_result(): Tool( name="get_info", description="Get structured information", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, @@ -410,7 +411,7 @@ async def call_tool_handler(name: str, arguments: dict[str, Any]) -> CallToolRes if name == "get_info": return CallToolResult( content=[TextContent(type="text", text="Results calculated")], - structuredContent={"status": "ok", "data": {"value": 42}}, + structured_content={"status": "ok", "data": {"value": 42}}, _meta={"some": "metadata"}, ) else: # pragma: no cover @@ -423,12 +424,12 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify results assert result is not None - assert not result.isError + assert not result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert result.content[0].text == "Results calculated" assert isinstance(result.content[0], TextContent) - assert result.structuredContent == {"status": "ok", "data": {"value": 42}} + assert result.structured_content == {"status": "ok", "data": {"value": 42}} assert result.meta == {"some": "metadata"} @@ -439,11 +440,11 @@ async def test_output_schema_type_validation(): Tool( name="stats", description="Get statistics", - inputSchema={ + input_schema={ "type": "object", "properties": {}, }, - outputSchema={ + output_schema={ "type": "object", "properties": { "count": {"type": "integer"}, @@ -469,7 +470,7 @@ async def test_callback(client_session: ClientSession) -> CallToolResult: # Verify error assert result is not None - assert result.isError + assert result.is_error assert len(result.content) == 1 assert result.content[0].type == "text" assert "Output validation error:" in result.content[0].text diff --git a/tests/server/test_lowlevel_tool_annotations.py b/tests/server/test_lowlevel_tool_annotations.py index f812c48777..614ca2dce5 100644 --- a/tests/server/test_lowlevel_tool_annotations.py +++ b/tests/server/test_lowlevel_tool_annotations.py @@ -25,7 +25,7 @@ async def list_tools(): # pragma: no cover Tool( name="echo", description="Echo a message back", - inputSchema={ + input_schema={ "type": "object", "properties": { "message": {"type": "string"}, @@ -34,7 +34,7 @@ async def list_tools(): # pragma: no cover }, annotations=ToolAnnotations( title="Echo Tool", - readOnlyHint=True, + read_only_hint=True, ), ) ] @@ -67,7 +67,8 @@ async def run_server(): async with anyio.create_task_group() as tg: async def handle_messages(): - async for message in server_session.incoming_messages: + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + async for message in server_session.incoming_messages: # pragma: no cover await server._handle_message(message, server_session, {}, False) tg.start_soon(handle_messages) @@ -97,4 +98,4 @@ async def handle_messages(): assert tools_result.tools[0].name == "echo" assert tools_result.tools[0].annotations is not None assert tools_result.tools[0].annotations.title == "Echo Tool" - assert tools_result.tools[0].annotations.readOnlyHint is True + assert tools_result.tools[0].annotations.read_only_hint is True diff --git a/tests/server/test_read_resource.py b/tests/server/test_read_resource.py index c31b90c557..10349846cc 100644 --- a/tests/server/test_read_resource.py +++ b/tests/server/test_read_resource.py @@ -3,7 +3,6 @@ from tempfile import NamedTemporaryFile import pytest -from pydantic import AnyUrl, FileUrl import mcp.types as types from mcp.server.lowlevel.server import ReadResourceContents, Server @@ -27,7 +26,7 @@ async def test_read_resource_text(temp_file: Path): server = Server("test") @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + async def read_resource(uri: str) -> Iterable[ReadResourceContents]: return [ReadResourceContents(content="Hello World", mime_type="text/plain")] # Get the handler directly from the server @@ -35,18 +34,18 @@ async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: # Create a request request = types.ReadResourceRequest( - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + params=types.ReadResourceRequestParams(uri=temp_file.as_uri()), ) # Call the handler result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 + assert isinstance(result, types.ReadResourceResult) + assert len(result.contents) == 1 - content = result.root.contents[0] + content = result.contents[0] assert isinstance(content, types.TextResourceContents) assert content.text == "Hello World" - assert content.mimeType == "text/plain" + assert content.mime_type == "text/plain" @pytest.mark.anyio @@ -54,7 +53,7 @@ async def test_read_resource_binary(temp_file: Path): server = Server("test") @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + async def read_resource(uri: str) -> Iterable[ReadResourceContents]: return [ReadResourceContents(content=b"Hello World", mime_type="application/octet-stream")] # Get the handler directly from the server @@ -62,17 +61,17 @@ async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: # Create a request request = types.ReadResourceRequest( - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + params=types.ReadResourceRequestParams(uri=temp_file.as_uri()), ) # Call the handler result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 + assert isinstance(result, types.ReadResourceResult) + assert len(result.contents) == 1 - content = result.root.contents[0] + content = result.contents[0] assert isinstance(content, types.BlobResourceContents) - assert content.mimeType == "application/octet-stream" + assert content.mime_type == "application/octet-stream" @pytest.mark.anyio @@ -80,7 +79,7 @@ async def test_read_resource_default_mime(temp_file: Path): server = Server("test") @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + async def read_resource(uri: str) -> Iterable[ReadResourceContents]: return [ ReadResourceContents( content="Hello World", @@ -93,15 +92,15 @@ async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: # Create a request request = types.ReadResourceRequest( - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + params=types.ReadResourceRequestParams(uri=temp_file.as_uri()), ) # Call the handler result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 + assert isinstance(result, types.ReadResourceResult) + assert len(result.contents) == 1 - content = result.root.contents[0] + content = result.contents[0] assert isinstance(content, types.TextResourceContents) assert content.text == "Hello World" - assert content.mimeType == "text/plain" + assert content.mime_type == "text/plain" diff --git a/tests/server/test_session.py b/tests/server/test_session.py index 34f9c6e28e..5de9882223 100644 --- a/tests/server/test_session.py +++ b/tests/server/test_session.py @@ -60,7 +60,7 @@ async def run_server(): raise message if isinstance(message, ClientNotification) and isinstance( - message.root, InitializedNotification + message, InitializedNotification ): # pragma: no branch received_initialized = True return @@ -101,7 +101,7 @@ async def list_prompts() -> list[Prompt]: # pragma: no cover return [] caps = server.get_capabilities(notification_options, experimental_capabilities) - assert caps.prompts == PromptsCapability(listChanged=False) + assert caps.prompts == PromptsCapability(list_changed=False) assert caps.resources is None assert caps.completions is None @@ -111,8 +111,8 @@ async def list_resources() -> list[Resource]: # pragma: no cover return [] caps = server.get_capabilities(notification_options, experimental_capabilities) - assert caps.prompts == PromptsCapability(listChanged=False) - assert caps.resources == ResourcesCapability(subscribe=False, listChanged=False) + assert caps.prompts == PromptsCapability(list_changed=False) + assert caps.resources == ResourcesCapability(subscribe=False, list_changed=False) assert caps.completions is None # Add a complete handler @@ -127,8 +127,8 @@ async def complete( # pragma: no cover ) caps = server.get_capabilities(notification_options, experimental_capabilities) - assert caps.prompts == PromptsCapability(listChanged=False) - assert caps.resources == ResourcesCapability(subscribe=False, listChanged=False) + assert caps.prompts == PromptsCapability(list_changed=False) + assert caps.resources == ResourcesCapability(subscribe=False, list_changed=False) assert caps.completions == CompletionsCapability() @@ -158,7 +158,7 @@ async def run_server(): raise message if isinstance(message, types.ClientNotification) and isinstance( - message.root, InitializedNotification + message, InitializedNotification ): # pragma: no branch received_initialized = True return @@ -169,41 +169,32 @@ async def mock_client(): # Send initialization request with older protocol version (2024-11-05) await client_to_server_send.send( SessionMessage( - types.JSONRPCMessage( - types.JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=types.InitializeRequestParams( - protocolVersion="2024-11-05", - capabilities=types.ClientCapabilities(), - clientInfo=types.Implementation(name="test-client", version="1.0.0"), - ).model_dump(by_alias=True, mode="json", exclude_none=True), - ) + types.JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=types.InitializeRequestParams( + protocol_version="2024-11-05", + capabilities=types.ClientCapabilities(), + client_info=types.Implementation(name="test-client", version="1.0.0"), + ).model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) # Wait for the initialize response init_response_message = await server_to_client_receive.receive() - assert isinstance(init_response_message.message.root, types.JSONRPCResponse) - result_data = init_response_message.message.root.result + assert isinstance(init_response_message.message, types.JSONRPCResponse) + result_data = init_response_message.message.result init_result = types.InitializeResult.model_validate(result_data) # Check that the server responded with the requested protocol version - received_protocol_version = init_result.protocolVersion + received_protocol_version = init_result.protocol_version assert received_protocol_version == "2024-11-05" # Send initialized notification await client_to_server_send.send( - SessionMessage( - types.JSONRPCMessage( - types.JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) + SessionMessage(types.JSONRPCNotification(jsonrpc="2.0", method="notifications/initialized")) ) async with ( @@ -245,35 +236,25 @@ async def run_server(): # We should receive a ping request before initialization if isinstance(message, RequestResponder) and isinstance( - message.request.root, types.PingRequest + message.request, types.PingRequest ): # pragma: no branch # Respond to the ping with message: - await message.respond(types.ServerResult(types.EmptyResult())) + await message.respond(types.EmptyResult()) return async def mock_client(): nonlocal ping_response_received, ping_response_id # Send ping request before any initialization - await client_to_server_send.send( - SessionMessage( - types.JSONRPCMessage( - types.JSONRPCRequest( - jsonrpc="2.0", - id=42, - method="ping", - ) - ) - ) - ) + await client_to_server_send.send(SessionMessage(types.JSONRPCRequest(jsonrpc="2.0", id=42, method="ping"))) # Wait for the ping response ping_response_message = await server_to_client_receive.receive() - assert isinstance(ping_response_message.message.root, types.JSONRPCResponse) + assert isinstance(ping_response_message.message, types.JSONRPCResponse) ping_response_received = True - ping_response_id = ping_response_message.message.root.id + ping_response_id = ping_response_message.message.id async with ( client_to_server_send, @@ -312,17 +293,17 @@ async def test_create_message_tool_result_validation(): ) as session: # Set up client params with sampling.tools capability for the test session._client_params = types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, + protocol_version=types.LATEST_PROTOCOL_VERSION, capabilities=types.ClientCapabilities( sampling=types.SamplingCapability(tools=types.SamplingToolsCapability()) ), - clientInfo=types.Implementation(name="test", version="1.0"), + client_info=types.Implementation(name="test", version="1.0"), ) - tool = types.Tool(name="test_tool", inputSchema={"type": "object"}) + tool = types.Tool(name="test_tool", input_schema={"type": "object"}) text = types.TextContent(type="text", text="hello") tool_use = types.ToolUseContent(type="tool_use", id="call_1", name="test_tool", input={}) - tool_result = types.ToolResultContent(type="tool_result", toolUseId="call_1", content=[]) + tool_result = types.ToolResultContent(type="tool_result", tool_use_id="call_1", content=[]) # Case 1: tool_result mixed with other content with pytest.raises(ValueError, match="only tool_result content"): @@ -363,7 +344,7 @@ async def test_create_message_tool_result_validation(): types.SamplingMessage(role="assistant", content=tool_use), types.SamplingMessage( role="user", - content=types.ToolResultContent(type="tool_result", toolUseId="wrong_id", content=[]), + content=types.ToolResultContent(type="tool_result", tool_use_id="wrong_id", content=[]), ), ], max_tokens=100, @@ -410,11 +391,9 @@ async def test_create_message_tool_result_validation(): # Case 8: empty messages list - skips validation entirely # Covers the `if messages:` branch (line 280->302) - with anyio.move_on_after(0.01): - await session.create_message( - messages=[], - max_tokens=100, - ) + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.move_on_after(0.01): # pragma: no cover + await session.create_message(messages=[], max_tokens=100) @pytest.mark.anyio @@ -440,12 +419,12 @@ async def test_create_message_without_tools_capability(): ) as session: # Set up client params WITHOUT sampling.tools capability session._client_params = types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, + protocol_version=types.LATEST_PROTOCOL_VERSION, capabilities=types.ClientCapabilities(sampling=types.SamplingCapability()), - clientInfo=types.Implementation(name="test", version="1.0"), + client_info=types.Implementation(name="test", version="1.0"), ) - tool = types.Tool(name="test_tool", inputSchema={"type": "object"}) + tool = types.Tool(name="test_tool", input_schema={"type": "object"}) text = types.TextContent(type="text", text="hello") # Should raise McpError when tools are provided but client lacks capability @@ -495,22 +474,14 @@ async def mock_client(): # Try to send a non-ping request before initialization await client_to_server_send.send( - SessionMessage( - types.JSONRPCMessage( - types.JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="prompts/list", - ) - ) - ) + SessionMessage(types.JSONRPCRequest(jsonrpc="2.0", id=1, method="prompts/list")) ) # Wait for the error response error_message = await server_to_client_receive.receive() - if isinstance(error_message.message.root, types.JSONRPCError): # pragma: no branch + if isinstance(error_message.message, types.JSONRPCError): # pragma: no branch error_response_received = True - error_code = error_message.message.root.error.code + error_code = error_message.message.error.code async with ( client_to_server_send, diff --git a/tests/server/test_session_race_condition.py b/tests/server/test_session_race_condition.py index b5388167ad..18c6b5fc6a 100644 --- a/tests/server/test_session_race_condition.py +++ b/tests/server/test_session_race_condition.py @@ -1,5 +1,4 @@ -""" -Test for race condition fix in initialization flow. +"""Test for race condition fix in initialization flow. This test verifies that requests can be processed immediately after responding to InitializeRequest, without waiting for InitializedNotification. @@ -20,8 +19,7 @@ @pytest.mark.anyio async def test_request_immediately_after_initialize_response(): - """ - Test that requests are accepted immediately after initialize response. + """Test that requests are accepted immediately after initialize response. This reproduces the race condition in stateful HTTP mode where: 1. Client sends InitializeRequest @@ -49,7 +47,7 @@ async def run_server(): server_name="test-server", server_version="1.0.0", capabilities=ServerCapabilities( - tools=types.ToolsCapability(listChanged=False), + tools=types.ToolsCapability(list_changed=False), ), ), ) as server_session: @@ -59,27 +57,25 @@ async def run_server(): # Handle tools/list request if isinstance(message, RequestResponder): - if isinstance(message.request.root, types.ListToolsRequest): # pragma: no branch + if isinstance(message.request, types.ListToolsRequest): # pragma: no branch tools_list_success = True # Respond with a tool list with message: await message.respond( - types.ServerResult( - types.ListToolsResult( - tools=[ - Tool( - name="example_tool", - description="An example tool", - inputSchema={"type": "object", "properties": {}}, - ) - ] - ) + types.ListToolsResult( + tools=[ + Tool( + name="example_tool", + description="An example tool", + input_schema={"type": "object", "properties": {}}, + ) + ] ) ) # Handle InitializedNotification if isinstance(message, types.ClientNotification): - if isinstance(message.root, types.InitializedNotification): # pragma: no branch + if isinstance(message, types.InitializedNotification): # pragma: no branch # Done - exit gracefully return @@ -89,54 +85,35 @@ async def mock_client(): # Step 1: Send InitializeRequest await client_to_server_send.send( SessionMessage( - types.JSONRPCMessage( - types.JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, - capabilities=types.ClientCapabilities(), - clientInfo=types.Implementation(name="test-client", version="1.0.0"), - ).model_dump(by_alias=True, mode="json", exclude_none=True), - ) + types.JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=types.InitializeRequestParams( + protocol_version=types.LATEST_PROTOCOL_VERSION, + capabilities=types.ClientCapabilities(), + client_info=types.Implementation(name="test-client", version="1.0.0"), + ).model_dump(by_alias=True, mode="json", exclude_none=True), ) ) ) # Step 2: Wait for InitializeResult init_msg = await server_to_client_receive.receive() - assert isinstance(init_msg.message.root, types.JSONRPCResponse) + assert isinstance(init_msg.message, types.JSONRPCResponse) # Step 3: Immediately send tools/list BEFORE InitializedNotification # This is the race condition scenario - await client_to_server_send.send( - SessionMessage( - types.JSONRPCMessage( - types.JSONRPCRequest( - jsonrpc="2.0", - id=2, - method="tools/list", - ) - ) - ) - ) + await client_to_server_send.send(SessionMessage(types.JSONRPCRequest(jsonrpc="2.0", id=2, method="tools/list"))) # Step 4: Check the response tools_msg = await server_to_client_receive.receive() - if isinstance(tools_msg.message.root, types.JSONRPCError): # pragma: no cover - error_received = tools_msg.message.root.error.message + if isinstance(tools_msg.message, types.JSONRPCError): # pragma: no cover + error_received = tools_msg.message.error.message # Step 5: Send InitializedNotification await client_to_server_send.send( - SessionMessage( - types.JSONRPCMessage( - types.JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) + SessionMessage(types.JSONRPCNotification(jsonrpc="2.0", method="notifications/initialized")) ) async with ( diff --git a/tests/server/test_stateless_mode.py b/tests/server/test_stateless_mode.py new file mode 100644 index 0000000000..2a40d60982 --- /dev/null +++ b/tests/server/test_stateless_mode.py @@ -0,0 +1,177 @@ +"""Tests for stateless HTTP mode limitations. + +Stateless HTTP mode does not support server-to-client requests because there +is no persistent connection for bidirectional communication. These tests verify +that appropriate errors are raised when attempting to use unsupported features. + +See: https://github.com/modelcontextprotocol/python-sdk/issues/1097 +""" + +from collections.abc import AsyncGenerator +from typing import Any + +import anyio +import pytest + +import mcp.types as types +from mcp.server.models import InitializationOptions +from mcp.server.session import ServerSession +from mcp.shared.exceptions import StatelessModeNotSupported +from mcp.shared.message import SessionMessage +from mcp.types import ServerCapabilities + + +@pytest.fixture +async def stateless_session() -> AsyncGenerator[ServerSession, None]: + """Create a stateless ServerSession for testing.""" + server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[SessionMessage](1) + client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[SessionMessage | Exception](1) + + init_options = InitializationOptions( + server_name="test", + server_version="0.1.0", + capabilities=ServerCapabilities(), + ) + + async with ( + client_to_server_send, + client_to_server_receive, + server_to_client_send, + server_to_client_receive, + ): + async with ServerSession( + client_to_server_receive, + server_to_client_send, + init_options, + stateless=True, + ) as session: + yield session + + +@pytest.mark.anyio +async def test_list_roots_fails_in_stateless_mode(stateless_session: ServerSession): + """Test that list_roots raises StatelessModeNotSupported in stateless mode.""" + with pytest.raises(StatelessModeNotSupported, match="list_roots"): + await stateless_session.list_roots() + + +@pytest.mark.anyio +async def test_create_message_fails_in_stateless_mode(stateless_session: ServerSession): + """Test that create_message raises StatelessModeNotSupported in stateless mode.""" + with pytest.raises(StatelessModeNotSupported, match="sampling"): + await stateless_session.create_message( + messages=[ + types.SamplingMessage( + role="user", + content=types.TextContent(type="text", text="hello"), + ) + ], + max_tokens=100, + ) + + +@pytest.mark.anyio +async def test_elicit_form_fails_in_stateless_mode(stateless_session: ServerSession): + """Test that elicit_form raises StatelessModeNotSupported in stateless mode.""" + with pytest.raises(StatelessModeNotSupported, match="elicitation"): + await stateless_session.elicit_form( + message="Please provide input", + requested_schema={"type": "object", "properties": {}}, + ) + + +@pytest.mark.anyio +async def test_elicit_url_fails_in_stateless_mode(stateless_session: ServerSession): + """Test that elicit_url raises StatelessModeNotSupported in stateless mode.""" + with pytest.raises(StatelessModeNotSupported, match="elicitation"): + await stateless_session.elicit_url( + message="Please authenticate", + url="https://example.com/auth", + elicitation_id="test-123", + ) + + +@pytest.mark.anyio +async def test_elicit_deprecated_fails_in_stateless_mode(stateless_session: ServerSession): + """Test that the deprecated elicit method also fails in stateless mode.""" + with pytest.raises(StatelessModeNotSupported, match="elicitation"): + await stateless_session.elicit( + message="Please provide input", + requested_schema={"type": "object", "properties": {}}, + ) + + +@pytest.mark.anyio +async def test_stateless_error_message_is_actionable(stateless_session: ServerSession): + """Test that the error message provides actionable guidance.""" + with pytest.raises(StatelessModeNotSupported) as exc_info: + await stateless_session.list_roots() + + error_message = str(exc_info.value) + # Should mention it's stateless mode + assert "stateless HTTP mode" in error_message + # Should explain why it doesn't work + assert "server-to-client requests" in error_message + # Should tell user how to fix it + assert "stateless_http=False" in error_message + + +@pytest.mark.anyio +async def test_exception_has_method_attribute(stateless_session: ServerSession): + """Test that the exception has a method attribute for programmatic access.""" + with pytest.raises(StatelessModeNotSupported) as exc_info: + await stateless_session.list_roots() + + assert exc_info.value.method == "list_roots" + + +@pytest.fixture +async def stateful_session() -> AsyncGenerator[ServerSession, None]: + """Create a stateful ServerSession for testing.""" + server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[SessionMessage](1) + client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[SessionMessage | Exception](1) + + init_options = InitializationOptions( + server_name="test", + server_version="0.1.0", + capabilities=ServerCapabilities(), + ) + + async with ( + client_to_server_send, + client_to_server_receive, + server_to_client_send, + server_to_client_receive, + ): + async with ServerSession( + client_to_server_receive, + server_to_client_send, + init_options, + stateless=False, + ) as session: + yield session + + +@pytest.mark.anyio +async def test_stateful_mode_does_not_raise_stateless_error( + stateful_session: ServerSession, monkeypatch: pytest.MonkeyPatch +): + """Test that StatelessModeNotSupported is not raised in stateful mode. + + We mock send_request to avoid blocking on I/O while still verifying + that the stateless check passes. + """ + send_request_called = False + + async def mock_send_request(*_: Any, **__: Any) -> types.ListRootsResult: + nonlocal send_request_called + send_request_called = True + return types.ListRootsResult(roots=[]) + + monkeypatch.setattr(stateful_session, "send_request", mock_send_request) + + # This should NOT raise StatelessModeNotSupported + result = await stateful_session.list_roots() + + assert send_request_called + assert isinstance(result, types.ListRootsResult) diff --git a/tests/server/test_stdio.py b/tests/server/test_stdio.py index 13cdde3d61..9a7ddaab40 100644 --- a/tests/server/test_stdio.py +++ b/tests/server/test_stdio.py @@ -5,7 +5,7 @@ from mcp.server.stdio import stdio_server from mcp.shared.message import SessionMessage -from mcp.types import JSONRPCMessage, JSONRPCRequest, JSONRPCResponse +from mcp.types import JSONRPCMessage, JSONRPCRequest, JSONRPCResponse, jsonrpc_message_adapter @pytest.mark.anyio @@ -14,8 +14,8 @@ async def test_stdio_server(): stdout = io.StringIO() messages = [ - JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")), - JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})), + JSONRPCRequest(jsonrpc="2.0", id=1, method="ping"), + JSONRPCResponse(jsonrpc="2.0", id=2, result={}), ] for message in messages: @@ -37,13 +37,13 @@ async def test_stdio_server(): # Verify received messages assert len(received_messages) == 2 - assert received_messages[0] == JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")) - assert received_messages[1] == JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})) + assert received_messages[0] == JSONRPCRequest(jsonrpc="2.0", id=1, method="ping") + assert received_messages[1] == JSONRPCResponse(jsonrpc="2.0", id=2, result={}) # Test sending responses from the server responses = [ - JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping")), - JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=4, result={})), + JSONRPCRequest(jsonrpc="2.0", id=3, method="ping"), + JSONRPCResponse(jsonrpc="2.0", id=4, result={}), ] async with write_stream: @@ -55,7 +55,7 @@ async def test_stdio_server(): output_lines = stdout.readlines() assert len(output_lines) == 2 - received_responses = [JSONRPCMessage.model_validate_json(line.strip()) for line in output_lines] + received_responses = [jsonrpc_message_adapter.validate_json(line.strip()) for line in output_lines] assert len(received_responses) == 2 - assert received_responses[0] == JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping")) - assert received_responses[1] == JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=4, result={})) + assert received_responses[0] == JSONRPCRequest(jsonrpc="2.0", id=3, method="ping") + assert received_responses[1] == JSONRPCResponse(jsonrpc="2.0", id=4, result={}) diff --git a/tests/server/test_streamable_http_manager.py b/tests/server/test_streamable_http_manager.py index 6fcf08aa00..af1b23619f 100644 --- a/tests/server/test_streamable_http_manager.py +++ b/tests/server/test_streamable_http_manager.py @@ -1,5 +1,6 @@ """Tests for StreamableHTTPSessionManager.""" +import json from typing import Any from unittest.mock import AsyncMock, patch @@ -11,6 +12,7 @@ from mcp.server.lowlevel import Server from mcp.server.streamable_http import MCP_SESSION_ID_HEADER, StreamableHTTPServerTransport from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from mcp.types import INVALID_REQUEST @pytest.mark.anyio @@ -262,3 +264,52 @@ async def mock_receive(): # Verify internal state is cleaned up assert len(transport._request_streams) == 0, "Transport should have no active request streams" + + +@pytest.mark.anyio +async def test_unknown_session_id_returns_404(): + """Test that requests with unknown session IDs return HTTP 404 per MCP spec.""" + app = Server("test-unknown-session") + manager = StreamableHTTPSessionManager(app=app) + + async with manager.run(): + sent_messages: list[Message] = [] + response_body = b"" + + async def mock_send(message: Message): + nonlocal response_body + sent_messages.append(message) + if message["type"] == "http.response.body": + response_body += message.get("body", b"") + + # Request with a non-existent session ID + scope = { + "type": "http", + "method": "POST", + "path": "/mcp", + "headers": [ + (b"content-type", b"application/json"), + (b"accept", b"application/json, text/event-stream"), + (b"mcp-session-id", b"non-existent-session-id"), + ], + } + + async def mock_receive(): + return {"type": "http.request", "body": b"{}", "more_body": False} # pragma: no cover + + await manager.handle_request(scope, mock_receive, mock_send) + + # Find the response start message + response_start = next( + (msg for msg in sent_messages if msg["type"] == "http.response.start"), + None, + ) + assert response_start is not None, "Should have sent a response" + assert response_start["status"] == 404, "Should return HTTP 404 for unknown session ID" + + # Verify JSON-RPC error format + error_data = json.loads(response_body) + assert error_data["jsonrpc"] == "2.0" + assert error_data["id"] == "server-error" + assert error_data["error"]["code"] == INVALID_REQUEST + assert error_data["error"]["message"] == "Session not found" diff --git a/tests/server/test_validation.py b/tests/server/test_validation.py index 56044460df..11c61d93b7 100644 --- a/tests/server/test_validation.py +++ b/tests/server/test_validation.py @@ -53,7 +53,7 @@ def test_no_error_when_tools_none(self) -> None: def test_raises_when_tools_provided_but_no_capability(self) -> None: """Raises McpError when tools provided but client doesn't support.""" - tool = Tool(name="test", inputSchema={"type": "object"}) + tool = Tool(name="test", input_schema={"type": "object"}) with pytest.raises(McpError) as exc_info: validate_sampling_tools(None, [tool], None) assert "sampling tools capability" in str(exc_info.value) @@ -67,7 +67,7 @@ def test_raises_when_tool_choice_provided_but_no_capability(self) -> None: def test_no_error_when_capability_present(self) -> None: """No error when client has sampling.tools capability.""" caps = ClientCapabilities(sampling=SamplingCapability(tools=SamplingToolsCapability())) - tool = Tool(name="test", inputSchema={"type": "object"}) + tool = Tool(name="test", input_schema={"type": "object"}) validate_sampling_tools(caps, [tool], ToolChoice(mode="auto")) # Should not raise @@ -92,7 +92,7 @@ def test_raises_when_tool_result_mixed_with_other_content(self) -> None: SamplingMessage( role="user", content=[ - ToolResultContent(type="tool_result", toolUseId="123"), + ToolResultContent(type="tool_result", tool_use_id="123"), TextContent(type="text", text="also this"), ], ), @@ -105,7 +105,7 @@ def test_raises_when_tool_result_without_previous_tool_use(self) -> None: messages = [ SamplingMessage( role="user", - content=ToolResultContent(type="tool_result", toolUseId="123"), + content=ToolResultContent(type="tool_result", tool_use_id="123"), ), ] with pytest.raises(ValueError, match="previous message containing tool_use"): @@ -120,7 +120,7 @@ def test_raises_when_tool_result_ids_dont_match_tool_use(self) -> None: ), SamplingMessage( role="user", - content=ToolResultContent(type="tool_result", toolUseId="tool-2"), + content=ToolResultContent(type="tool_result", tool_use_id="tool-2"), ), ] with pytest.raises(ValueError, match="do not match"): @@ -135,7 +135,7 @@ def test_no_error_when_tool_result_matches_tool_use(self) -> None: ), SamplingMessage( role="user", - content=ToolResultContent(type="tool_result", toolUseId="tool-1"), + content=ToolResultContent(type="tool_result", tool_use_id="tool-1"), ), ] validate_tool_use_result_messages(messages) # Should not raise diff --git a/tests/shared/__init__.py b/tests/shared/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/shared/test_auth_utils.py b/tests/shared/test_auth_utils.py index 5b12dc6775..d658385cb9 100644 --- a/tests/shared/test_auth_utils.py +++ b/tests/shared/test_auth_utils.py @@ -1,5 +1,7 @@ """Tests for OAuth 2.0 Resource Indicators utilities.""" +from pydantic import HttpUrl + from mcp.shared.auth_utils import check_resource_allowed, resource_url_from_server_url @@ -37,8 +39,6 @@ def test_lowercase_scheme_and_host(self): def test_handles_pydantic_urls(self): """Should handle Pydantic URL types.""" - from pydantic import HttpUrl - url = HttpUrl("https://example.com/path") assert resource_url_from_server_url(url) == "https://example.com/path" diff --git a/tests/shared/test_exceptions.py b/tests/shared/test_exceptions.py index 8845dfe781..1a42e7aefe 100644 --- a/tests/shared/test_exceptions.py +++ b/tests/shared/test_exceptions.py @@ -15,14 +15,14 @@ def test_create_with_single_elicitation(self) -> None: mode="url", message="Auth required", url="https://example.com/auth", - elicitationId="test-123", + elicitation_id="test-123", ) error = UrlElicitationRequiredError([elicitation]) assert error.error.code == URL_ELICITATION_REQUIRED assert error.error.message == "URL elicitation required" assert len(error.elicitations) == 1 - assert error.elicitations[0].elicitationId == "test-123" + assert error.elicitations[0].elicitation_id == "test-123" def test_create_with_multiple_elicitations(self) -> None: """Test creating error with multiple elicitations uses plural message.""" @@ -31,13 +31,13 @@ def test_create_with_multiple_elicitations(self) -> None: mode="url", message="Auth 1", url="https://example.com/auth1", - elicitationId="test-1", + elicitation_id="test-1", ), ElicitRequestURLParams( mode="url", message="Auth 2", url="https://example.com/auth2", - elicitationId="test-2", + elicitation_id="test-2", ), ] error = UrlElicitationRequiredError(elicitations) @@ -51,7 +51,7 @@ def test_custom_message(self) -> None: mode="url", message="Auth required", url="https://example.com/auth", - elicitationId="test-123", + elicitation_id="test-123", ) error = UrlElicitationRequiredError([elicitation], message="Custom message") @@ -77,7 +77,7 @@ def test_from_error_data(self) -> None: error = UrlElicitationRequiredError.from_error(error_data) assert len(error.elicitations) == 1 - assert error.elicitations[0].elicitationId == "test-123" + assert error.elicitations[0].elicitation_id == "test-123" assert error.elicitations[0].url == "https://example.com/auth" def test_from_error_data_wrong_code(self) -> None: @@ -99,7 +99,7 @@ def test_serialization_roundtrip(self) -> None: mode="url", message="Auth required", url="https://example.com/auth", - elicitationId="test-123", + elicitation_id="test-123", ) ] ) @@ -110,7 +110,7 @@ def test_serialization_roundtrip(self) -> None: # Reconstruct reconstructed = UrlElicitationRequiredError.from_error(error_data) - assert reconstructed.elicitations[0].elicitationId == original.elicitations[0].elicitationId + assert reconstructed.elicitations[0].elicitation_id == original.elicitations[0].elicitation_id assert reconstructed.elicitations[0].url == original.elicitations[0].url assert reconstructed.elicitations[0].message == original.elicitations[0].message @@ -120,7 +120,7 @@ def test_error_data_contains_elicitations(self) -> None: mode="url", message="Please authenticate", url="https://example.com/oauth", - elicitationId="oauth-flow-1", + elicitation_id="oauth-flow-1", ) error = UrlElicitationRequiredError([elicitation]) @@ -138,7 +138,7 @@ def test_inherits_from_mcp_error(self) -> None: mode="url", message="Auth required", url="https://example.com/auth", - elicitationId="test-123", + elicitation_id="test-123", ) error = UrlElicitationRequiredError([elicitation]) @@ -151,7 +151,7 @@ def test_exception_message(self) -> None: mode="url", message="Auth required", url="https://example.com/auth", - elicitationId="test-123", + elicitation_id="test-123", ) error = UrlElicitationRequiredError([elicitation]) diff --git a/tests/shared/test_memory.py b/tests/shared/test_memory.py index ca4368e9f8..31238b9ffd 100644 --- a/tests/shared/test_memory.py +++ b/tests/shared/test_memory.py @@ -1,10 +1,7 @@ import pytest -from pydantic import AnyUrl -from typing_extensions import AsyncGenerator -from mcp.client.session import ClientSession +from mcp import Client from mcp.server import Server -from mcp.shared.memory import create_connected_server_and_client_session from mcp.types import EmptyResult, Resource @@ -16,7 +13,7 @@ def mcp_server() -> Server: async def handle_list_resources(): # pragma: no cover return [ Resource( - uri=AnyUrl("memory://test"), + uri="memory://test", name="Test Resource", description="A test resource", ) @@ -25,18 +22,9 @@ async def handle_list_resources(): # pragma: no cover return server -@pytest.fixture -async def client_connected_to_server( - mcp_server: Server, -) -> AsyncGenerator[ClientSession, None]: - async with create_connected_server_and_client_session(mcp_server) as client_session: - yield client_session - - @pytest.mark.anyio -async def test_memory_server_and_client_connection( - client_connected_to_server: ClientSession, -): +async def test_memory_server_and_client_connection(mcp_server: Server): """Shows how a client and server can communicate over memory streams.""" - response = await client_connected_to_server.send_ping() - assert isinstance(response, EmptyResult) + async with Client(mcp_server) as client: + response = await client.send_ping() + assert isinstance(response, EmptyResult) diff --git a/tests/shared/test_progress_notifications.py b/tests/shared/test_progress_notifications.py index 1552711d2e..d65622822f 100644 --- a/tests/shared/test_progress_notifications.py +++ b/tests/shared/test_progress_notifications.py @@ -5,15 +5,16 @@ import pytest import mcp.types as types +from mcp import Client from mcp.client.session import ClientSession from mcp.server import Server from mcp.server.lowlevel import NotificationOptions from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession from mcp.shared.context import RequestContext -from mcp.shared.memory import create_connected_server_and_client_session +from mcp.shared.message import SessionMessage from mcp.shared.progress import progress -from mcp.shared.session import BaseSession, RequestResponder, SessionMessage +from mcp.shared.session import BaseSession, RequestResponder @pytest.mark.anyio @@ -79,7 +80,7 @@ async def handle_list_tools() -> list[types.Tool]: types.Tool( name="test_tool", description="A tool that sends progress notifications None: + def mock_log_exception(msg: str, *args: Any, **kwargs: Any) -> None: logged_errors.append(msg % args if args else msg) # Create a progress callback that raises an exception @@ -362,22 +363,17 @@ async def handle_list_tools() -> list[types.Tool]: types.Tool( name="progress_tool", description="A tool that sends progress notifications", - inputSchema={}, + input_schema={}, ) ] # Test with mocked logging - with patch("mcp.shared.session.logging.error", side_effect=mock_log_error): - async with create_connected_server_and_client_session(server) as client_session: - # Send a request with a failing progress callback - result = await client_session.send_request( - types.ClientRequest( - types.CallToolRequest( - method="tools/call", - params=types.CallToolRequestParams(name="progress_tool", arguments={}), - ) - ), - types.CallToolResult, + with patch("mcp.shared.session.logging.exception", side_effect=mock_log_exception): + async with Client(server) as client: + # Call tool with a failing progress callback + result = await client.call_tool( + "progress_tool", + arguments={}, progress_callback=failing_progress_callback, ) diff --git a/tests/shared/test_session.py b/tests/shared/test_session.py index e609397e5e..89fe18ebbc 100644 --- a/tests/shared/test_session.py +++ b/tests/shared/test_session.py @@ -1,101 +1,78 @@ -from collections.abc import AsyncGenerator from typing import Any import anyio import pytest import mcp.types as types +from mcp import Client from mcp.client.session import ClientSession from mcp.server.lowlevel.server import Server from mcp.shared.exceptions import McpError -from mcp.shared.memory import create_client_server_memory_streams, create_connected_server_and_client_session +from mcp.shared.memory import create_client_server_memory_streams from mcp.shared.message import SessionMessage from mcp.types import ( CancelledNotification, CancelledNotificationParams, - ClientNotification, - ClientRequest, EmptyResult, ErrorData, JSONRPCError, - JSONRPCMessage, JSONRPCRequest, JSONRPCResponse, TextContent, ) -@pytest.fixture -def mcp_server() -> Server: - return Server(name="test server") - - -@pytest.fixture -async def client_connected_to_server( - mcp_server: Server, -) -> AsyncGenerator[ClientSession, None]: - async with create_connected_server_and_client_session(mcp_server) as client_session: - yield client_session - - @pytest.mark.anyio -async def test_in_flight_requests_cleared_after_completion( - client_connected_to_server: ClientSession, -): +async def test_in_flight_requests_cleared_after_completion(): """Verify that _in_flight is empty after all requests complete.""" - # Send a request and wait for response - response = await client_connected_to_server.send_ping() - assert isinstance(response, EmptyResult) + server = Server(name="test server") + async with Client(server) as client: + # Send a request and wait for response + response = await client.send_ping() + assert isinstance(response, EmptyResult) - # Verify _in_flight is empty - assert len(client_connected_to_server._in_flight) == 0 + # Verify _in_flight is empty + assert len(client.session._in_flight) == 0 @pytest.mark.anyio async def test_request_cancellation(): """Test that requests can be cancelled while in-flight.""" - # The tool is already registered in the fixture - ev_tool_called = anyio.Event() ev_cancelled = anyio.Event() request_id = None - # Start the request in a separate task so we can cancel it - def make_server() -> Server: - server = Server(name="TestSessionServer") - - # Register the tool handler - @server.call_tool() - async def handle_call_tool(name: str, arguments: dict[str, Any] | None) -> list[TextContent]: - nonlocal request_id, ev_tool_called - if name == "slow_tool": - request_id = server.request_context.request_id - ev_tool_called.set() - await anyio.sleep(10) # Long enough to ensure we can cancel - return [] # pragma: no cover - raise ValueError(f"Unknown tool: {name}") # pragma: no cover - - # Register the tool so it shows up in list_tools - @server.list_tools() - async def handle_list_tools() -> list[types.Tool]: - return [ - types.Tool( - name="slow_tool", - description="A slow tool that takes 10 seconds to complete", - inputSchema={}, - ) - ] - - return server + # Create a server with a slow tool + server = Server(name="TestSessionServer") + + # Register the tool handler + @server.call_tool() + async def handle_call_tool(name: str, arguments: dict[str, Any] | None) -> list[TextContent]: + nonlocal request_id, ev_tool_called + if name == "slow_tool": + request_id = server.request_context.request_id + ev_tool_called.set() + await anyio.sleep(10) # Long enough to ensure we can cancel + return [] # pragma: no cover + raise ValueError(f"Unknown tool: {name}") # pragma: no cover + + # Register the tool so it shows up in list_tools + @server.list_tools() + async def handle_list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="slow_tool", + description="A slow tool that takes 10 seconds to complete", + input_schema={}, + ) + ] - async def make_request(client_session: ClientSession): + async def make_request(client: Client): nonlocal ev_cancelled try: - await client_session.send_request( - ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams(name="slow_tool", arguments={}), - ) + await client.session.send_request( + types.CallToolRequest( + params=types.CallToolRequestParams(name="slow_tool", arguments={}), ), types.CallToolResult, ) @@ -105,9 +82,9 @@ async def make_request(client_session: ClientSession): assert "Request cancelled" in str(e) ev_cancelled.set() - async with create_connected_server_and_client_session(make_server()) as client_session: - async with anyio.create_task_group() as tg: - tg.start_soon(make_request, client_session) + async with Client(server) as client: + async with anyio.create_task_group() as tg: # pragma: no branch + tg.start_soon(make_request, client) # Wait for the request to be in-flight with anyio.fail_after(1): # Timeout after 1 second @@ -115,23 +92,19 @@ async def make_request(client_session: ClientSession): # Send cancellation notification assert request_id is not None - await client_session.send_notification( - ClientNotification( - CancelledNotification( - params=CancelledNotificationParams(requestId=request_id), - ) - ) + await client.session.send_notification( + CancelledNotification(params=CancelledNotificationParams(request_id=request_id)) ) # Give cancellation time to process - with anyio.fail_after(1): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.fail_after(1): # pragma: no cover await ev_cancelled.wait() @pytest.mark.anyio async def test_response_id_type_mismatch_string_to_int(): - """ - Test that responses with string IDs are correctly matched to requests sent with + """Test that responses with string IDs are correctly matched to requests sent with integer IDs. This handles the case where a server returns "id": "0" (string) but the client @@ -148,7 +121,7 @@ async def mock_server(): """Receive a request and respond with a string ID instead of integer.""" message = await server_read.receive() assert isinstance(message, SessionMessage) - root = message.message.root + root = message.message assert isinstance(root, JSONRPCRequest) # Get the original request ID (which is an integer) request_id = root.id @@ -160,7 +133,7 @@ async def mock_server(): id=str(request_id), # Convert to string to simulate mismatch result={}, ) - await server_write.send(SessionMessage(message=JSONRPCMessage(response))) + await server_write.send(SessionMessage(message=response)) async def make_request(client_session: ClientSession): nonlocal result_holder @@ -176,7 +149,8 @@ async def make_request(client_session: ClientSession): tg.start_soon(mock_server) tg.start_soon(make_request, client_session) - with anyio.fail_after(2): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.fail_after(2): # pragma: no cover await ev_response_received.wait() assert len(result_holder) == 1 @@ -185,8 +159,7 @@ async def make_request(client_session: ClientSession): @pytest.mark.anyio async def test_error_response_id_type_mismatch_string_to_int(): - """ - Test that error responses with string IDs are correctly matched to requests + """Test that error responses with string IDs are correctly matched to requests sent with integer IDs. This handles the case where a server returns an error with "id": "0" (string) @@ -203,7 +176,7 @@ async def mock_server(): """Receive a request and respond with an error using a string ID.""" message = await server_read.receive() assert isinstance(message, SessionMessage) - root = message.message.root + root = message.message assert isinstance(root, JSONRPCRequest) request_id = root.id assert isinstance(request_id, int) @@ -214,7 +187,7 @@ async def mock_server(): id=str(request_id), # Convert to string to simulate mismatch error=ErrorData(code=-32600, message="Test error"), ) - await server_write.send(SessionMessage(message=JSONRPCMessage(error_response))) + await server_write.send(SessionMessage(message=error_response)) async def make_request(client_session: ClientSession): nonlocal error_holder @@ -232,7 +205,8 @@ async def make_request(client_session: ClientSession): tg.start_soon(mock_server) tg.start_soon(make_request, client_session) - with anyio.fail_after(2): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.fail_after(2): # pragma: no cover await ev_error_received.wait() assert len(error_holder) == 1 @@ -241,8 +215,7 @@ async def make_request(client_session: ClientSession): @pytest.mark.anyio async def test_response_id_non_numeric_string_no_match(): - """ - Test that responses with non-numeric string IDs don't incorrectly match + """Test that responses with non-numeric string IDs don't incorrectly match integer request IDs. If a server returns "id": "abc" (non-numeric string), it should not match @@ -265,17 +238,15 @@ async def mock_server(): id="not_a_number", # Non-numeric string result={}, ) - await server_write.send(SessionMessage(message=JSONRPCMessage(response))) + await server_write.send(SessionMessage(message=response)) async def make_request(client_session: ClientSession): try: # Use a short timeout since we expect this to fail - from datetime import timedelta - await client_session.send_request( - ClientRequest(types.PingRequest()), + types.PingRequest(), types.EmptyResult, - request_read_timeout_seconds=timedelta(seconds=0.5), + request_read_timeout_seconds=0.5, ) pytest.fail("Expected timeout") # pragma: no cover except McpError as e: @@ -289,15 +260,14 @@ async def make_request(client_session: ClientSession): tg.start_soon(mock_server) tg.start_soon(make_request, client_session) - with anyio.fail_after(2): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.fail_after(2): # pragma: no cover await ev_timeout.wait() @pytest.mark.anyio async def test_connection_closed(): - """ - Test that pending requests are cancelled when the connection is closed remotely. - """ + """Test that pending requests are cancelled when the connection is closed remotely.""" ev_closed = anyio.Event() ev_response = anyio.Event() @@ -335,7 +305,8 @@ async def mock_server(): tg.start_soon(make_request, client_session) tg.start_soon(mock_server) - with anyio.fail_after(1): + # TODO(Marcelo): Drop the pragma once https://github.com/coveragepy/coveragepy/issues/1987 is fixed. + with anyio.fail_after(1): # pragma: no cover await ev_closed.wait() - with anyio.fail_after(1): + with anyio.fail_after(1): # pragma: no cover await ev_response.wait() diff --git a/tests/shared/test_sse.py b/tests/shared/test_sse.py index 7604450f81..fb006424c6 100644 --- a/tests/shared/test_sse.py +++ b/tests/shared/test_sse.py @@ -5,6 +5,7 @@ from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +from urllib.parse import urlparse import anyio import httpx @@ -12,7 +13,6 @@ import uvicorn from httpx_sse import ServerSentEvent from inline_snapshot import snapshot -from pydantic import AnyUrl from starlette.applications import Starlette from starlette.requests import Request from starlette.responses import Response @@ -61,13 +61,14 @@ def __init__(self): super().__init__(SERVER_NAME) @self.read_resource() - async def handle_read_resource(uri: AnyUrl) -> str | bytes: - if uri.scheme == "foobar": - return f"Read {uri.host}" - elif uri.scheme == "slow": + async def handle_read_resource(uri: str) -> str | bytes: + parsed = urlparse(uri) + if parsed.scheme == "foobar": + return f"Read {parsed.netloc}" + if parsed.scheme == "slow": # Simulate a slow resource await anyio.sleep(2.0) - return f"Slow response from {uri.host}" + return f"Slow response from {parsed.netloc}" raise McpError(error=ErrorData(code=404, message="OOPS! no resource with that URI was found")) @@ -77,7 +78,7 @@ async def handle_list_tools() -> list[Tool]: Tool( name="test_tool", description="A test tool", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ) ] @@ -183,7 +184,7 @@ async def test_sse_client_basic_connection(server: None, server_url: str) -> Non # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME # Test ping ping_result = await session.send_ping() @@ -254,7 +255,7 @@ async def test_sse_client_happy_request_and_response( initialized_sse_client_session: ClientSession, ) -> None: session = initialized_sse_client_session - response = await session.read_resource(uri=AnyUrl("foobar://should-work")) + response = await session.read_resource(uri="foobar://should-work") assert len(response.contents) == 1 assert isinstance(response.contents[0], TextResourceContents) assert response.contents[0].text == "Read should-work" @@ -266,7 +267,7 @@ async def test_sse_client_exception_handling( ) -> None: session = initialized_sse_client_session with pytest.raises(McpError, match="OOPS! no resource with that URI was found"): - await session.read_resource(uri=AnyUrl("xxx://will-not-work")) + await session.read_resource(uri="xxx://will-not-work") @pytest.mark.anyio @@ -277,12 +278,12 @@ async def test_sse_client_timeout( # pragma: no cover session = initialized_sse_client_session # sanity check that normal, fast responses are working - response = await session.read_resource(uri=AnyUrl("foobar://1")) + response = await session.read_resource(uri="foobar://1") assert isinstance(response, ReadResourceResult) with anyio.move_on_after(3): with pytest.raises(McpError, match="Read timed out"): - response = await session.read_resource(uri=AnyUrl("slow://2")) + response = await session.read_resource(uri="slow://2") # we should receive an error here return @@ -329,7 +330,7 @@ async def test_sse_client_basic_connection_mounted_app(mounted_server: None, ser # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME # Test ping ping_result = await session.send_ping() @@ -365,12 +366,12 @@ async def handle_list_tools() -> list[Tool]: Tool( name="echo_headers", description="Echoes request headers", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="echo_context", description="Echoes request context", - inputSchema={ + input_schema={ "type": "object", "properties": {"request_id": {"type": "string"}}, "required": ["request_id"], @@ -502,12 +503,12 @@ def test_sse_message_id_coercion(): See for more details. """ json_message = '{"jsonrpc": "2.0", "id": "123", "method": "ping", "params": null}' - msg = types.JSONRPCMessage.model_validate_json(json_message) - assert msg == snapshot(types.JSONRPCMessage(root=types.JSONRPCRequest(method="ping", jsonrpc="2.0", id="123"))) + msg = types.JSONRPCRequest.model_validate_json(json_message) + assert msg == snapshot(types.JSONRPCRequest(method="ping", jsonrpc="2.0", id="123")) json_message = '{"jsonrpc": "2.0", "id": 123, "method": "ping", "params": null}' - msg = types.JSONRPCMessage.model_validate_json(json_message) - assert msg == snapshot(types.JSONRPCMessage(root=types.JSONRPCRequest(method="ping", jsonrpc="2.0", id=123))) + msg = types.JSONRPCRequest.model_validate_json(json_message) + assert msg == snapshot(types.JSONRPCRequest(method="ping", jsonrpc="2.0", id=123)) @pytest.mark.parametrize( @@ -557,9 +558,9 @@ async def test_sse_client_handles_empty_keepalive_pings() -> None: """ # Build a proper JSON-RPC response using types (not hardcoded strings) init_result = InitializeResult( - protocolVersion="2024-11-05", + protocol_version="2024-11-05", capabilities=ServerCapabilities(), - serverInfo=Implementation(name="test", version="1.0"), + server_info=Implementation(name="test", version="1.0"), ) response = JSONRPCResponse( jsonrpc="2.0", @@ -600,5 +601,5 @@ async def mock_aiter_sse() -> AsyncGenerator[ServerSentEvent, None]: msg = await read_stream.receive() # If we get here without error, the empty message was skipped successfully assert not isinstance(msg, Exception) - assert isinstance(msg.message.root, types.JSONRPCResponse) - assert msg.message.root.id == 1 + assert isinstance(msg.message, types.JSONRPCResponse) + assert msg.message.id == 1 diff --git a/tests/shared/test_streamable_http.py b/tests/shared/test_streamable_http.py index 731dd20dd3..ed86f9860e 100644 --- a/tests/shared/test_streamable_http.py +++ b/tests/shared/test_streamable_http.py @@ -1,17 +1,19 @@ -""" -Tests for the StreamableHTTP server and client transport. +"""Tests for the StreamableHTTP server and client transport. Contains tests for both server and client sides of the StreamableHTTP transport. """ +from __future__ import annotations as _annotations + import json import multiprocessing import socket import time +import traceback from collections.abc import Generator -from datetime import timedelta from typing import Any from unittest.mock import MagicMock +from urllib.parse import urlparse import anyio import httpx @@ -19,18 +21,13 @@ import requests import uvicorn from httpx_sse import ServerSentEvent -from pydantic import AnyUrl from starlette.applications import Starlette from starlette.requests import Request from starlette.routing import Mount import mcp.types as types from mcp.client.session import ClientSession -from mcp.client.streamable_http import ( - StreamableHTTPTransport, - streamable_http_client, - streamablehttp_client, # pyright: ignore[reportDeprecated] -) +from mcp.client.streamable_http import StreamableHTTPTransport, streamable_http_client from mcp.server import Server from mcp.server.streamable_http import ( MCP_PROTOCOL_VERSION_HEADER, @@ -52,7 +49,6 @@ from mcp.shared.session import RequestResponder from mcp.types import ( InitializeResult, - JSONRPCMessage, JSONRPCRequest, TextContent, TextResourceContents, @@ -140,13 +136,14 @@ def __init__(self): self._lock = None # Will be initialized in async context @self.read_resource() - async def handle_read_resource(uri: AnyUrl) -> str | bytes: - if uri.scheme == "foobar": - return f"Read {uri.host}" - elif uri.scheme == "slow": + async def handle_read_resource(uri: str) -> str | bytes: + parsed = urlparse(uri) + if parsed.scheme == "foobar": + return f"Read {parsed.netloc}" + if parsed.scheme == "slow": # Simulate a slow resource await anyio.sleep(2.0) - return f"Slow response from {uri.host}" + return f"Slow response from {parsed.netloc}" raise ValueError(f"Unknown resource: {uri}") @@ -156,47 +153,47 @@ async def handle_list_tools() -> list[Tool]: Tool( name="test_tool", description="A test tool", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="test_tool_with_standalone_notification", description="A test tool that sends a notification", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="long_running_with_checkpoints", description="A long-running tool that sends periodic notifications", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="test_sampling_tool", description="A tool that triggers server-side sampling", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="wait_for_lock_with_notification", description="A tool that sends a notification and waits for lock", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="release_lock", description="A tool that releases the lock", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="tool_with_stream_close", description="A tool that closes SSE stream mid-operation", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="tool_with_multiple_notifications_and_close", description="Tool that sends notification1, closes stream, sends notification2, notification3", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="tool_with_multiple_stream_closes", description="Tool that closes SSE stream multiple times during execution", - inputSchema={ + input_schema={ "type": "object", "properties": { "checkpoints": {"type": "integer", "default": 3}, @@ -207,7 +204,7 @@ async def handle_list_tools() -> list[Tool]: Tool( name="tool_with_standalone_stream_close", description="Tool that closes standalone GET stream mid-operation", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), ] @@ -217,7 +214,7 @@ async def handle_call_tool(name: str, args: dict[str, Any]) -> list[TextContent] # When the tool is called, send a notification to test GET stream if name == "test_tool_with_standalone_notification": - await ctx.session.send_resource_updated(uri=AnyUrl("http://test_resource")) + await ctx.session.send_resource_updated(uri="http://test_resource") return [TextContent(type="text", text=f"Called {name}")] elif name == "long_running_with_checkpoints": @@ -371,7 +368,7 @@ async def handle_call_tool(name: str, args: dict[str, Any]) -> list[TextContent] elif name == "tool_with_standalone_stream_close": # Test for GET stream reconnection # 1. Send unsolicited notification via GET stream (no related_request_id) - await ctx.session.send_resource_updated(uri=AnyUrl("http://notification_1")) + await ctx.session.send_resource_updated(uri="http://notification_1") # Small delay to ensure notification is flushed before closing await anyio.sleep(0.1) @@ -384,7 +381,7 @@ async def handle_call_tool(name: str, args: dict[str, Any]) -> list[TextContent] await anyio.sleep(1.5) # 4. Send another notification on the new GET stream connection - await ctx.session.send_resource_updated(uri=AnyUrl("http://notification_2")) + await ctx.session.send_resource_updated(uri="http://notification_2") return [TextContent(type="text", text="Standalone stream close test done")] @@ -464,8 +461,6 @@ def run_server( try: server.run() except Exception: - import traceback - traceback.print_exc() @@ -1006,15 +1001,15 @@ async def test_streamable_http_client_basic_connection(basic_server: None, basic # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME @pytest.mark.anyio async def test_streamable_http_client_resource_read(initialized_client_session: ClientSession): """Test client resource read functionality.""" - response = await initialized_client_session.read_resource(uri=AnyUrl("foobar://test-resource")) + response = await initialized_client_session.read_resource(uri="foobar://test-resource") assert len(response.contents) == 1 - assert response.contents[0].uri == AnyUrl("foobar://test-resource") + assert response.contents[0].uri == "foobar://test-resource" assert isinstance(response.contents[0], TextResourceContents) assert response.contents[0].text == "Read test-resource" @@ -1038,7 +1033,7 @@ async def test_streamable_http_client_tool_invocation(initialized_client_session async def test_streamable_http_client_error_handling(initialized_client_session: ClientSession): """Test error handling in client.""" with pytest.raises(McpError) as exc_info: - await initialized_client_session.read_resource(uri=AnyUrl("unknown://test-error")) + await initialized_client_session.read_resource(uri="unknown://test-error") assert exc_info.value.error.code == 0 assert "Unknown resource: unknown://test-error" in exc_info.value.error.message @@ -1064,7 +1059,7 @@ async def test_streamable_http_client_session_persistence(basic_server: None, ba assert len(tools.tools) == 10 # Read a resource - resource = await session.read_resource(uri=AnyUrl("foobar://test-persist")) + resource = await session.read_resource(uri="foobar://test-persist") assert isinstance(resource.contents[0], TextResourceContents) is True content = resource.contents[0] assert isinstance(content, TextResourceContents) @@ -1086,7 +1081,7 @@ async def test_streamable_http_client_json_response(json_response_server: None, # Initialize the session result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME # Check tool listing tools = await session.list_tools() @@ -1102,8 +1097,6 @@ async def test_streamable_http_client_json_response(json_response_server: None, @pytest.mark.anyio async def test_streamable_http_client_get_stream(basic_server: None, basic_server_url: str): """Test GET stream functionality for server-initiated messages.""" - import mcp.types as types - notifications_received: list[types.ServerNotification] = [] # Define message handler to capture notifications @@ -1132,8 +1125,8 @@ async def message_handler( # pragma: no branch # Verify the notification is a ResourceUpdatedNotification resource_update_found = False for notif in notifications_received: - if isinstance(notif.root, types.ResourceUpdatedNotification): # pragma: no branch - assert str(notif.root.params.uri) == "http://test_resource/" + if isinstance(notif, types.ResourceUpdatedNotification): # pragma: no branch + assert str(notif.params.uri) == "http://test_resource" resource_update_found = True assert resource_update_found, "ResourceUpdatedNotification not received via GET stream" @@ -1174,10 +1167,7 @@ async def test_streamable_http_client_session_termination(basic_server: None, ba ): async with ClientSession(read_stream, write_stream) as session: # pragma: no branch # Attempt to make a request after termination - with pytest.raises( # pragma: no branch - McpError, - match="Session terminated", - ): + with pytest.raises(McpError, match="Session terminated"): # pragma: no branch await session.list_tools() @@ -1266,8 +1256,8 @@ async def message_handler( # pragma: no branch if isinstance(message, types.ServerNotification): # pragma: no branch captured_notifications.append(message) # Look for our first notification - if isinstance(message.root, types.LoggingMessageNotification): # pragma: no branch - if message.root.params.data == "First notification before lock": + if isinstance(message, types.LoggingMessageNotification): # pragma: no branch + if message.params.data == "First notification before lock": nonlocal first_notification_received first_notification_received = True @@ -1288,7 +1278,7 @@ async def on_resumption_token_update(token: str) -> None: captured_session_id = get_session_id() assert captured_session_id is not None # Capture the negotiated protocol version - captured_protocol_version = result.protocolVersion + captured_protocol_version = result.protocol_version # Start the tool that will wait on lock in a task async with anyio.create_task_group() as tg: @@ -1298,12 +1288,8 @@ async def run_tool(): on_resumption_token_update=on_resumption_token_update, ) await session.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams( - name="wait_for_lock_with_notification", arguments={} - ), - ) + types.CallToolRequest( + params=types.CallToolRequestParams(name="wait_for_lock_with_notification", arguments={}), ), types.CallToolResult, metadata=metadata, @@ -1320,8 +1306,8 @@ async def run_tool(): # Verify we received exactly one notification assert len(captured_notifications) == 1 # pragma: no cover - assert isinstance(captured_notifications[0].root, types.LoggingMessageNotification) # pragma: no cover - assert captured_notifications[0].root.params.data == "First notification before lock" # pragma: no cover + assert isinstance(captured_notifications[0], types.LoggingMessageNotification) # pragma: no cover + assert captured_notifications[0].params.data == "First notification before lock" # pragma: no cover # Clear notifications for the second phase captured_notifications = [] # pragma: no cover @@ -1341,11 +1327,7 @@ async def run_tool(): ): async with ClientSession(read_stream, write_stream, message_handler=message_handler) as session: result = await session.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams(name="release_lock", arguments={}), - ) - ), + types.CallToolRequest(params=types.CallToolRequestParams(name="release_lock", arguments={})), types.CallToolResult, ) metadata = ClientMessageMetadata( @@ -1353,10 +1335,8 @@ async def run_tool(): ) result = await session.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams(name="wait_for_lock_with_notification", arguments={}), - ) + types.CallToolRequest( + params=types.CallToolRequestParams(name="wait_for_lock_with_notification", arguments={}), ), types.CallToolResult, metadata=metadata, @@ -1368,8 +1348,8 @@ async def run_tool(): # We should have received the remaining notifications assert len(captured_notifications) == 1 - assert isinstance(captured_notifications[0].root, types.LoggingMessageNotification) # pragma: no cover - assert captured_notifications[0].root.params.data == "Second notification after lock" # pragma: no cover + assert isinstance(captured_notifications[0], types.LoggingMessageNotification) # pragma: no cover + assert captured_notifications[0].params.data == "Second notification after lock" # pragma: no cover @pytest.mark.anyio @@ -1397,7 +1377,7 @@ async def sampling_callback( text=f"Received message from server: {message_received}", ), model="test-model", - stopReason="endTurn", + stop_reason="endTurn", ) # Create client with sampling callback @@ -1441,12 +1421,12 @@ async def handle_list_tools() -> list[Tool]: Tool( name="echo_headers", description="Echo request headers from context", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ), Tool( name="echo_context", description="Echo request context with custom data", - inputSchema={ + input_schema={ "type": "object", "properties": { "request_id": {"type": "string"}, @@ -1555,7 +1535,7 @@ async def test_streamablehttp_request_context_propagation(context_aware_server: async with ClientSession(read_stream, write_stream) as session: # pragma: no branch result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "ContextAwareServer" + assert result.server_info.name == "ContextAwareServer" # Call the tool that echoes headers back tool_result = await session.call_tool("echo_headers", {}) @@ -1621,7 +1601,7 @@ async def test_client_includes_protocol_version_header_after_init(context_aware_ async with ClientSession(read_stream, write_stream) as session: # Initialize and get the negotiated version init_result = await session.initialize() - negotiated_version = init_result.protocolVersion + negotiated_version = init_result.protocol_version # Call a tool that echoes headers to verify the header is present tool_result = await session.call_tool("echo_headers", {}) @@ -1729,11 +1709,7 @@ async def test_client_crash_handled(basic_server: None, basic_server_url: str): # Simulate bad client that crashes after init async def bad_client(): """Client that triggers ClosedResourceError""" - async with streamable_http_client(f"{basic_server_url}/mcp") as ( - read_stream, - write_stream, - _, - ): + async with streamable_http_client(f"{basic_server_url}/mcp") as (read_stream, write_stream, _): async with ClientSession(read_stream, write_stream) as session: await session.initialize() raise Exception("client crash") @@ -1747,11 +1723,7 @@ async def bad_client(): await anyio.sleep(0.1) # Try a good client, it should still be able to connect and list tools - async with streamable_http_client(f"{basic_server_url}/mcp") as ( - read_stream, - write_stream, - _, - ): + async with streamable_http_client(f"{basic_server_url}/mcp") as (read_stream, write_stream, _): async with ClientSession(read_stream, write_stream) as session: result = await session.initialize() assert isinstance(result, InitializeResult) @@ -1873,7 +1845,7 @@ async def test_close_sse_stream_callback_not_provided_for_old_protocol_version() ) # Create a mock message and request - mock_message = JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id="test-1", method="tools/list")) + mock_message = JSONRPCRequest(jsonrpc="2.0", id="test-1", method="tools/list") mock_request = MagicMock() # Call _create_session_message with OLD protocol version @@ -1920,11 +1892,7 @@ async def on_resumption_token_update(token: str) -> None: on_resumption_token_update=on_resumption_token_update, ) result = await session.send_request( - types.ClientRequest( - types.CallToolRequest( - params=types.CallToolRequestParams(name="test_tool", arguments={}), - ) - ), + types.CallToolRequest(params=types.CallToolRequestParams(name="test_tool", arguments={})), types.CallToolResult, metadata=metadata, ) @@ -1982,8 +1950,8 @@ async def message_handler( if isinstance(message, Exception): # pragma: no branch return # pragma: no cover if isinstance(message, types.ServerNotification): # pragma: no branch - if isinstance(message.root, types.LoggingMessageNotification): # pragma: no branch - captured_notifications.append(str(message.root.params.data)) + if isinstance(message, types.LoggingMessageNotification): # pragma: no branch + captured_notifications.append(str(message.params.data)) async with streamable_http_client(f"{server_url}/mcp") as ( read_stream, @@ -2058,8 +2026,8 @@ async def message_handler( if isinstance(message, Exception): # pragma: no branch return # pragma: no cover if isinstance(message, types.ServerNotification): # pragma: no branch - if isinstance(message.root, types.LoggingMessageNotification): # pragma: no branch - all_notifications.append(str(message.root.params.data)) + if isinstance(message, types.LoggingMessageNotification): # pragma: no branch + all_notifications.append(str(message.params.data)) async with streamable_http_client(f"{server_url}/mcp") as ( read_stream, @@ -2106,8 +2074,8 @@ async def message_handler( if isinstance(message, Exception): # pragma: no branch return # pragma: no cover if isinstance(message, types.ServerNotification): # pragma: no branch - if isinstance(message.root, types.LoggingMessageNotification): # pragma: no branch - notification_data.append(str(message.root.params.data)) + if isinstance(message, types.LoggingMessageNotification): # pragma: no branch + notification_data.append(str(message.params.data)) async with streamable_http_client(f"{server_url}/mcp") as ( read_stream, @@ -2168,15 +2136,13 @@ async def on_resumption_token(token: str) -> None: # Use send_request with metadata to track resumption tokens metadata = ClientMessageMetadata(on_resumption_token_update=on_resumption_token) result = await session.send_request( - types.ClientRequest( - types.CallToolRequest( - method="tools/call", - params=types.CallToolRequestParams( - name="tool_with_multiple_stream_closes", - # retry_interval=500ms, so sleep 600ms to ensure reconnect completes - arguments={"checkpoints": 3, "sleep_time": 0.6}, - ), - ) + types.CallToolRequest( + method="tools/call", + params=types.CallToolRequestParams( + name="tool_with_multiple_stream_closes", + # retry_interval=500ms, so sleep 600ms to ensure reconnect completes + arguments={"checkpoints": 3, "sleep_time": 0.6}, + ), ), types.CallToolResult, metadata=metadata, @@ -2197,8 +2163,7 @@ async def on_resumption_token(token: str) -> None: async def test_standalone_get_stream_reconnection( event_server: tuple[SimpleEventStore, str], ) -> None: - """ - Test that standalone GET stream automatically reconnects after server closes it. + """Test that standalone GET stream automatically reconnects after server closes it. Verifies: 1. Client receives notification 1 via GET stream @@ -2218,8 +2183,8 @@ async def message_handler( if isinstance(message, Exception): return # pragma: no cover if isinstance(message, types.ServerNotification): # pragma: no branch - if isinstance(message.root, types.ResourceUpdatedNotification): # pragma: no branch - received_notifications.append(str(message.root.params.uri)) + if isinstance(message, types.ResourceUpdatedNotification): # pragma: no branch + received_notifications.append(str(message.params.uri)) async with streamable_http_client(f"{server_url}/mcp") as ( read_stream, @@ -2246,10 +2211,10 @@ async def message_handler( assert result.content[0].text == "Standalone stream close test done" # Verify both notifications were received - assert "http://notification_1/" in received_notifications, ( + assert "http://notification_1" in received_notifications, ( f"Should receive notification 1 (sent before GET stream close), got: {received_notifications}" ) - assert "http://notification_2/" in received_notifications, ( + assert "http://notification_2" in received_notifications, ( f"Should receive notification 2 after reconnect, got: {received_notifications}" ) @@ -2361,36 +2326,3 @@ async def test_streamable_http_client_preserves_custom_with_mcp_headers( assert "content-type" in headers_data assert headers_data["content-type"] == "application/json" - - -@pytest.mark.anyio -async def test_streamable_http_transport_deprecated_params_ignored(basic_server: None, basic_server_url: str) -> None: - """Test that deprecated parameters passed to StreamableHTTPTransport are properly ignored.""" - with pytest.warns(DeprecationWarning): - transport = StreamableHTTPTransport( # pyright: ignore[reportDeprecated] - url=f"{basic_server_url}/mcp", - headers={"X-Should-Be-Ignored": "ignored"}, - timeout=999, - sse_read_timeout=timedelta(seconds=999), - auth=None, - ) - - headers = transport._prepare_headers() - assert "X-Should-Be-Ignored" not in headers - assert headers["accept"] == "application/json, text/event-stream" - assert headers["content-type"] == "application/json" - - -@pytest.mark.anyio -async def test_streamablehttp_client_deprecation_warning(basic_server: None, basic_server_url: str) -> None: - """Test that the old streamablehttp_client() function issues a deprecation warning.""" - with pytest.warns(DeprecationWarning, match="Use `streamable_http_client` instead"): - async with streamablehttp_client(f"{basic_server_url}/mcp") as ( # pyright: ignore[reportDeprecated] - read_stream, - write_stream, - _, - ): - async with ClientSession(read_stream, write_stream) as session: # pragma: no branch - await session.initialize() - tools = await session.list_tools() - assert len(tools.tools) > 0 diff --git a/tests/shared/test_ws.py b/tests/shared/test_ws.py index f093cb4927..06b56c63c7 100644 --- a/tests/shared/test_ws.py +++ b/tests/shared/test_ws.py @@ -3,6 +3,7 @@ import time from collections.abc import AsyncGenerator, Generator from typing import Any +from urllib.parse import urlparse import anyio import pytest @@ -49,13 +50,14 @@ def __init__(self): super().__init__(SERVER_NAME) @self.read_resource() - async def handle_read_resource(uri: AnyUrl) -> str | bytes: - if uri.scheme == "foobar": - return f"Read {uri.host}" - elif uri.scheme == "slow": + async def handle_read_resource(uri: str) -> str | bytes: + parsed = urlparse(uri) + if parsed.scheme == "foobar": + return f"Read {parsed.netloc}" + elif parsed.scheme == "slow": # Simulate a slow resource await anyio.sleep(2.0) - return f"Slow response from {uri.host}" + return f"Slow response from {parsed.netloc}" raise McpError(error=ErrorData(code=404, message="OOPS! no resource with that URI was found")) @@ -65,7 +67,7 @@ async def handle_list_tools() -> list[Tool]: Tool( name="test_tool", description="A test tool", - inputSchema={"type": "object", "properties": {}}, + input_schema={"type": "object", "properties": {}}, ) ] @@ -132,7 +134,7 @@ async def initialized_ws_client_session(server: None, server_url: str) -> AsyncG # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME # Test ping ping_result = await session.send_ping() @@ -150,7 +152,7 @@ async def test_ws_client_basic_connection(server: None, server_url: str) -> None # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME + assert result.server_info.name == SERVER_NAME # Test ping ping_result = await session.send_ping() diff --git a/tests/test_examples.py b/tests/test_examples.py index 6f5464e394..187cda3218 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -6,11 +6,13 @@ # pyright: reportUnknownMemberType=false import sys +from pathlib import Path import pytest +from pydantic import AnyUrl from pytest_examples import CodeExample, EvalExample, find_examples -from mcp.shared.memory import create_connected_server_and_client_session as client_session +from mcp import Client from mcp.types import TextContent, TextResourceContents @@ -19,7 +21,7 @@ async def test_simple_echo(): """Test the simple echo server""" from examples.fastmcp.simple_echo import mcp - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("echo", {"text": "hello"}) assert len(result.content) == 1 content = result.content[0] @@ -32,7 +34,7 @@ async def test_complex_inputs(): """Test the complex inputs server""" from examples.fastmcp.complex_inputs import mcp - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: tank = {"shrimp": [{"name": "bob"}, {"name": "alice"}]} result = await client.call_tool("name_shrimp", {"tank": tank, "extra_names": ["charlie"]}) assert len(result.content) == 3 @@ -49,14 +51,14 @@ async def test_direct_call_tool_result_return(): """Test the CallToolResult echo server""" from examples.fastmcp.direct_call_tool_result_return import mcp - async with client_session(mcp._mcp_server) as client: + async with Client(mcp) as client: result = await client.call_tool("echo", {"text": "hello"}) assert len(result.content) == 1 content = result.content[0] assert isinstance(content, TextContent) assert content.text == "hello" - assert result.structuredContent - assert result.structuredContent["text"] == "hello" + assert result.structured_content + assert result.structured_content["text"] == "hello" assert isinstance(result.meta, dict) assert result.meta["some"] == "metadata" @@ -64,18 +66,14 @@ async def test_direct_call_tool_result_return(): @pytest.mark.anyio async def test_desktop(monkeypatch: pytest.MonkeyPatch): """Test the desktop server""" - from pathlib import Path - - from pydantic import AnyUrl - - from examples.fastmcp.desktop import mcp - # Mock desktop directory listing mock_files = [Path("/fake/path/file1.txt"), Path("/fake/path/file2.txt")] monkeypatch.setattr(Path, "iterdir", lambda self: mock_files) # type: ignore[reportUnknownArgumentType] monkeypatch.setattr(Path, "home", lambda: Path("/fake/home")) - async with client_session(mcp._mcp_server) as client: + from examples.fastmcp.desktop import mcp + + async with Client(mcp) as client: # Test the sum function result = await client.call_tool("sum", {"a": 1, "b": 2}) assert len(result.content) == 1 diff --git a/tests/test_types.py b/tests/test_types.py index 1c16c3cc6e..f424efdbf7 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -5,14 +5,12 @@ from mcp.types import ( LATEST_PROTOCOL_VERSION, ClientCapabilities, - ClientRequest, CreateMessageRequestParams, CreateMessageResult, CreateMessageResultWithTools, Implementation, InitializeRequest, InitializeRequestParams, - JSONRPCMessage, JSONRPCRequest, ListToolsResult, SamplingCapability, @@ -22,6 +20,8 @@ ToolChoice, ToolResultContent, ToolUseContent, + client_request_adapter, + jsonrpc_message_adapter, ) @@ -38,28 +38,27 @@ async def test_jsonrpc_request(): }, } - request = JSONRPCMessage.model_validate(json_data) - assert isinstance(request.root, JSONRPCRequest) - ClientRequest.model_validate(request.model_dump(by_alias=True, exclude_none=True)) + request = jsonrpc_message_adapter.validate_python(json_data) + assert isinstance(request, JSONRPCRequest) + client_request_adapter.validate_python(request.model_dump(by_alias=True, exclude_none=True)) - assert request.root.jsonrpc == "2.0" - assert request.root.id == 1 - assert request.root.method == "initialize" - assert request.root.params is not None - assert request.root.params["protocolVersion"] == LATEST_PROTOCOL_VERSION + assert request.jsonrpc == "2.0" + assert request.id == 1 + assert request.method == "initialize" + assert request.params is not None + assert request.params["protocolVersion"] == LATEST_PROTOCOL_VERSION @pytest.mark.anyio async def test_method_initialization(): - """ - Test that the method is automatically set on object creation. + """Test that the method is automatically set on object creation. Testing just for InitializeRequest to keep the test simple, but should be set for other types as well. """ initialize_request = InitializeRequest( params=InitializeRequestParams( - protocolVersion=LATEST_PROTOCOL_VERSION, + protocol_version=LATEST_PROTOCOL_VERSION, capabilities=ClientCapabilities(), - clientInfo=Implementation( + client_info=Implementation( name="mcp", version="0.1.0", ), @@ -68,7 +67,7 @@ async def test_method_initialization(): assert initialize_request.method == "initialize", "method should be set to 'initialize'" assert initialize_request.params is not None - assert initialize_request.params.protocolVersion == LATEST_PROTOCOL_VERSION + assert initialize_request.params.protocol_version == LATEST_PROTOCOL_VERSION @pytest.mark.anyio @@ -105,9 +104,9 @@ async def test_tool_result_content(): tool_result = ToolResultContent.model_validate(tool_result_data) assert tool_result.type == "tool_result" - assert tool_result.toolUseId == "call_abc123" + assert tool_result.tool_use_id == "call_abc123" assert len(tool_result.content) == 1 - assert tool_result.isError is False + assert tool_result.is_error is False # Test with empty content (should default to []) minimal_result_data = {"type": "tool_result", "toolUseId": "call_xyz"} @@ -221,21 +220,21 @@ async def test_create_message_request_params_with_tools(): tool = Tool( name="get_weather", description="Get weather information", - inputSchema={"type": "object", "properties": {"location": {"type": "string"}}}, + input_schema={"type": "object", "properties": {"location": {"type": "string"}}}, ) params = CreateMessageRequestParams( messages=[SamplingMessage(role="user", content=TextContent(type="text", text="What's the weather?"))], - maxTokens=1000, + max_tokens=1000, tools=[tool], - toolChoice=ToolChoice(mode="auto"), + tool_choice=ToolChoice(mode="auto"), ) assert params.tools is not None assert len(params.tools) == 1 assert params.tools[0].name == "get_weather" - assert params.toolChoice is not None - assert params.toolChoice.mode == "auto" + assert params.tool_choice is not None + assert params.tool_choice.mode == "auto" @pytest.mark.anyio @@ -252,7 +251,7 @@ async def test_create_message_result_with_tool_use(): result = CreateMessageResultWithTools.model_validate(result_data) assert result.role == "assistant" assert isinstance(result.content, ToolUseContent) - assert result.stopReason == "toolUse" + assert result.stop_reason == "toolUse" assert result.model == "claude-3" # Test content_as_list with single content (covers else branch) @@ -276,7 +275,7 @@ async def test_create_message_result_basic(): assert result.role == "assistant" assert isinstance(result.content, TextContent) assert result.content.text == "Hello!" - assert result.stopReason == "endTurn" + assert result.stop_reason == "endTurn" assert result.model == "claude-3" @@ -322,13 +321,13 @@ def test_tool_preserves_json_schema_2020_12_fields(): "additionalProperties": False, } - tool = Tool(name="test_tool", description="A test tool", inputSchema=input_schema) + tool = Tool(name="test_tool", description="A test tool", input_schema=input_schema) # Verify fields are preserved in the model - assert tool.inputSchema["$schema"] == "https://json-schema.org/draft/2020-12/schema" - assert "$defs" in tool.inputSchema - assert "address" in tool.inputSchema["$defs"] - assert tool.inputSchema["additionalProperties"] is False + assert tool.input_schema["$schema"] == "https://json-schema.org/draft/2020-12/schema" + assert "$defs" in tool.input_schema + assert "address" in tool.input_schema["$defs"] + assert tool.input_schema["additionalProperties"] is False # Verify fields survive serialization round-trip serialized = tool.model_dump(mode="json", by_alias=True) @@ -358,6 +357,6 @@ def test_list_tools_result_preserves_json_schema_2020_12_fields(): result = ListToolsResult.model_validate(raw_response) tool = result.tools[0] - assert tool.inputSchema["$schema"] == "https://json-schema.org/draft/2020-12/schema" - assert "$defs" in tool.inputSchema - assert tool.inputSchema["additionalProperties"] is False + assert tool.input_schema["$schema"] == "https://json-schema.org/draft/2020-12/schema" + assert "$defs" in tool.input_schema + assert tool.input_schema["additionalProperties"] is False diff --git a/uv.lock b/uv.lock index 757709acdf..5d36da2e3a 100644 --- a/uv.lock +++ b/uv.lock @@ -1,11 +1,14 @@ version = 1 revision = 3 requires-python = ">=3.10" +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version < '3.14'", +] [manifest] members = [ "mcp", - "mcp-conformance-auth-client", "mcp-everything-server", "mcp-simple-auth", "mcp-simple-auth-client", @@ -125,34 +128,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, ] -[[package]] -name = "cairocffi" -version = "1.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/70/c5/1a4dc131459e68a173cbdab5fad6b524f53f9c1ef7861b7698e998b837cc/cairocffi-1.7.1.tar.gz", hash = "sha256:2e48ee864884ec4a3a34bfa8c9ab9999f688286eb714a15a43ec9d068c36557b", size = 88096, upload-time = "2024-06-18T10:56:06.741Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/93/d8/ba13451aa6b745c49536e87b6bf8f629b950e84bd0e8308f7dc6883b67e2/cairocffi-1.7.1-py3-none-any.whl", hash = "sha256:9803a0e11f6c962f3b0ae2ec8ba6ae45e957a146a004697a1ac1bbf16b073b3f", size = 75611, upload-time = "2024-06-18T10:55:59.489Z" }, -] - -[[package]] -name = "cairosvg" -version = "2.8.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cairocffi" }, - { name = "cssselect2" }, - { name = "defusedxml" }, - { name = "pillow" }, - { name = "tinycss2" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/b9/5106168bd43d7cd8b7cc2a2ee465b385f14b63f4c092bb89eee2d48c8e67/cairosvg-2.8.2.tar.gz", hash = "sha256:07cbf4e86317b27a92318a4cac2a4bb37a5e9c1b8a27355d06874b22f85bef9f", size = 8398590, upload-time = "2025-05-15T06:56:32.653Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/67/48/816bd4aaae93dbf9e408c58598bc32f4a8c65f4b86ab560864cb3ee60adb/cairosvg-2.8.2-py3-none-any.whl", hash = "sha256:eab46dad4674f33267a671dce39b64be245911c901c70d65d2b7b0821e852bf5", size = 45773, upload-time = "2025-05-15T06:56:28.552Z" }, -] - [[package]] name = "certifi" version = "2025.8.3" @@ -331,101 +306,101 @@ wheels = [ [[package]] name = "coverage" -version = "7.10.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, - { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, - { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, - { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, - { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, - { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, - { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, - { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, - { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, - { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, - { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, - { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, - { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, - { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, - { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, - { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, - { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, - { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, - { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, - { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, - { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, - { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, - { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, - { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, - { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, - { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, - { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, - { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, - { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, - { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, - { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, - { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, - { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, - { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, - { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, - { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, - { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, - { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, - { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, - { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, - { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, - { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, - { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, - { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, - { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, - { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, - { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, - { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, - { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, - { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, - { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, - { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, - { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, - { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, - { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, - { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, - { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, - { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, - { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, - { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, - { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, - { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, - { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, - { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, - { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, - { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, - { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, - { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, - { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, - { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, - { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, - { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, - { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, - { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +version = "7.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, + { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, + { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, + { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, + { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, + { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, + { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, + { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, + { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, + { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, + { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" }, + { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" }, + { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" }, + { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" }, + { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" }, + { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" }, + { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" }, + { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" }, + { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, + { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, + { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, + { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, + { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, + { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, + { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, + { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, + { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, + { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, + { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, + { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, + { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, + { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, + { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, + { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, + { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, + { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, + { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, + { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, + { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, + { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, + { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, + { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, ] [package.optional-dependencies] @@ -480,28 +455,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" }, ] -[[package]] -name = "cssselect2" -version = "0.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "tinycss2" }, - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9f/86/fd7f58fc498b3166f3a7e8e0cddb6e620fe1da35b02248b1bd59e95dbaaa/cssselect2-0.8.0.tar.gz", hash = "sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a", size = 35716, upload-time = "2025-03-05T14:46:07.988Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/e7/aa315e6a749d9b96c2504a1ba0ba031ba2d0517e972ce22682e3fccecb09/cssselect2-0.8.0-py3-none-any.whl", hash = "sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e", size = 15454, upload-time = "2025-03-05T14:46:06.463Z" }, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, -] - [[package]] name = "dirty-equals" version = "0.9.0" @@ -771,7 +724,8 @@ dependencies = [ { name = "httpx" }, { name = "httpx-sse" }, { name = "jsonschema" }, - { name = "pydantic" }, + { name = "pydantic", version = "2.11.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "pydantic", version = "2.12.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "pydantic-settings" }, { name = "pyjwt", extra = ["crypto"] }, { name = "python-multipart" }, @@ -779,7 +733,8 @@ dependencies = [ { name = "sse-starlette" }, { name = "starlette" }, { name = "typing-extensions" }, - { name = "typing-inspection" }, + { name = "typing-inspection", version = "0.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "typing-inspection", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] @@ -800,6 +755,8 @@ dev = [ { name = "coverage", extra = ["toml"] }, { name = "dirty-equals" }, { name = "inline-snapshot" }, + { name = "mcp", extra = ["cli", "ws"] }, + { name = "pillow" }, { name = "pyright" }, { name = "pytest" }, { name = "pytest-examples" }, @@ -812,7 +769,7 @@ dev = [ docs = [ { name = "mkdocs" }, { name = "mkdocs-glightbox" }, - { name = "mkdocs-material", extra = ["imaging"] }, + { name = "mkdocs-material" }, { name = "mkdocstrings-python" }, ] @@ -822,15 +779,17 @@ requires-dist = [ { name = "httpx", specifier = ">=0.27.1" }, { name = "httpx-sse", specifier = ">=0.4" }, { name = "jsonschema", specifier = ">=4.20.0" }, - { name = "pydantic", specifier = ">=2.11.0,<3.0.0" }, + { name = "pydantic", marker = "python_full_version < '3.14'", specifier = ">=2.11.0" }, + { name = "pydantic", marker = "python_full_version >= '3.14'", specifier = ">=2.12.0" }, { name = "pydantic-settings", specifier = ">=2.5.2" }, { name = "pyjwt", extras = ["crypto"], specifier = ">=2.10.1" }, { name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" }, { name = "python-multipart", specifier = ">=0.0.9" }, - { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=310" }, + { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=311" }, { name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" }, { name = "sse-starlette", specifier = ">=1.6.1" }, - { name = "starlette", specifier = ">=0.27" }, + { name = "starlette", marker = "python_full_version < '3.14'", specifier = ">=0.27" }, + { name = "starlette", marker = "python_full_version >= '3.14'", specifier = ">=0.48.0" }, { name = "typer", marker = "extra == 'cli'", specifier = ">=0.16.0" }, { name = "typing-extensions", specifier = ">=4.9.0" }, { name = "typing-inspection", specifier = ">=0.4.1" }, @@ -841,9 +800,11 @@ provides-extras = ["cli", "rich", "ws"] [package.metadata.requires-dev] dev = [ - { name = "coverage", extras = ["toml"], specifier = "==7.10.7" }, + { name = "coverage", extras = ["toml"], specifier = ">=7.13.1" }, { name = "dirty-equals", specifier = ">=0.9.0" }, { name = "inline-snapshot", specifier = ">=0.23.0" }, + { name = "mcp", extras = ["cli", "ws"], editable = "." }, + { name = "pillow", specifier = ">=12.0" }, { name = "pyright", specifier = ">=1.1.400" }, { name = "pytest", specifier = ">=8.3.4" }, { name = "pytest-examples", specifier = ">=0.0.14" }, @@ -856,37 +817,8 @@ dev = [ docs = [ { name = "mkdocs", specifier = ">=1.6.1" }, { name = "mkdocs-glightbox", specifier = ">=0.4.0" }, - { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.45" }, - { name = "mkdocstrings-python", specifier = ">=1.12.2" }, -] - -[[package]] -name = "mcp-conformance-auth-client" -version = "0.1.0" -source = { editable = "examples/clients/conformance-auth-client" } -dependencies = [ - { name = "httpx" }, - { name = "mcp" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pyright" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "httpx", specifier = ">=0.28.1" }, - { name = "mcp", editable = "." }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.379" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "ruff", specifier = ">=0.6.9" }, + { name = "mkdocs-material", specifier = ">=9.5.45" }, + { name = "mkdocstrings-python", specifier = ">=2.0.1" }, ] [[package]] @@ -935,7 +867,8 @@ dependencies = [ { name = "click" }, { name = "httpx" }, { name = "mcp" }, - { name = "pydantic" }, + { name = "pydantic", version = "2.11.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "pydantic", version = "2.12.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "pydantic-settings" }, { name = "sse-starlette" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, @@ -1527,12 +1460,11 @@ wheels = [ [[package]] name = "mkdocs-material" -version = "9.6.19" +version = "9.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, { name = "backrefs" }, - { name = "click" }, { name = "colorama" }, { name = "jinja2" }, { name = "markdown" }, @@ -1543,15 +1475,9 @@ dependencies = [ { name = "pymdown-extensions" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/94/eb0fca39b19c2251b16bc759860a50f232655c4377116fa9c0e7db11b82c/mkdocs_material-9.6.19.tar.gz", hash = "sha256:80e7b3f9acabfee9b1f68bd12c26e59c865b3d5bbfb505fd1344e970db02c4aa", size = 4038202, upload-time = "2025-09-07T17:46:40.468Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/e2/2ffc356cd72f1473d07c7719d82a8f2cbd261666828614ecb95b12169f41/mkdocs_material-9.7.1.tar.gz", hash = "sha256:89601b8f2c3e6c6ee0a918cc3566cb201d40bf37c3cd3c2067e26fadb8cce2b8", size = 4094392, upload-time = "2025-12-18T09:49:00.308Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/23/a2551d1038bedc2771366f65ff3680bb3a89674cd7ca6140850c859f1f71/mkdocs_material-9.6.19-py3-none-any.whl", hash = "sha256:7492d2ac81952a467ca8a10cac915d6ea5c22876932f44b5a0f4f8e7d68ac06f", size = 9240205, upload-time = "2025-09-07T17:46:36.484Z" }, -] - -[package.optional-dependencies] -imaging = [ - { name = "cairosvg" }, - { name = "pillow" }, + { url = "https://files.pythonhosted.org/packages/3e/32/ed071cb721aca8c227718cffcf7bd539620e9799bbf2619e90c757bfd030/mkdocs_material-9.7.1-py3-none-any.whl", hash = "sha256:3f6100937d7d731f87f1e3e3b021c97f7239666b9ba1151ab476cabb96c60d5c", size = 9297166, upload-time = "2025-12-18T09:48:56.664Z" }, ] [[package]] @@ -1582,7 +1508,7 @@ wheels = [ [[package]] name = "mkdocstrings-python" -version = "1.18.2" +version = "2.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe" }, @@ -1590,9 +1516,9 @@ dependencies = [ { name = "mkdocstrings" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/ae/58ab2bfbee2792e92a98b97e872f7c003deb903071f75d8d83aa55db28fa/mkdocstrings_python-1.18.2.tar.gz", hash = "sha256:4ad536920a07b6336f50d4c6d5603316fafb1172c5c882370cbbc954770ad323", size = 207972, upload-time = "2025-08-28T16:11:19.847Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/75/d30af27a2906f00eb90143470272376d728521997800f5dce5b340ba35bc/mkdocstrings_python-2.0.1.tar.gz", hash = "sha256:843a562221e6a471fefdd4b45cc6c22d2607ccbad632879234fa9692e9cf7732", size = 199345, upload-time = "2025-12-03T14:26:11.755Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/8f/ce008599d9adebf33ed144e7736914385e8537f5fc686fdb7cceb8c22431/mkdocstrings_python-1.18.2-py3-none-any.whl", hash = "sha256:944fe6deb8f08f33fa936d538233c4036e9f53e840994f6146e8e94eb71b600d", size = 138215, upload-time = "2025-08-28T16:11:18.176Z" }, + { url = "https://files.pythonhosted.org/packages/81/06/c5f8deba7d2cbdfa7967a716ae801aa9ca5f734b8f54fd473ef77a088dbe/mkdocstrings_python-2.0.1-py3-none-any.whl", hash = "sha256:66ecff45c5f8b71bf174e11d49afc845c2dfc7fc0ab17a86b6b337e0f24d8d90", size = 105055, upload-time = "2025-12-03T14:26:10.184Z" }, ] [[package]] @@ -1654,104 +1580,100 @@ wheels = [ [[package]] name = "pillow" -version = "11.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/5d/45a3553a253ac8763f3561371432a90bdbe6000fbdcf1397ffe502aa206c/pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860", size = 5316554, upload-time = "2025-07-01T09:13:39.342Z" }, - { url = "https://files.pythonhosted.org/packages/7c/c8/67c12ab069ef586a25a4a79ced553586748fad100c77c0ce59bb4983ac98/pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad", size = 4686548, upload-time = "2025-07-01T09:13:41.835Z" }, - { url = "https://files.pythonhosted.org/packages/2f/bd/6741ebd56263390b382ae4c5de02979af7f8bd9807346d068700dd6d5cf9/pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0", size = 5859742, upload-time = "2025-07-03T13:09:47.439Z" }, - { url = "https://files.pythonhosted.org/packages/ca/0b/c412a9e27e1e6a829e6ab6c2dca52dd563efbedf4c9c6aa453d9a9b77359/pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b", size = 7633087, upload-time = "2025-07-03T13:09:51.796Z" }, - { url = "https://files.pythonhosted.org/packages/59/9d/9b7076aaf30f5dd17e5e5589b2d2f5a5d7e30ff67a171eb686e4eecc2adf/pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50", size = 5963350, upload-time = "2025-07-01T09:13:43.865Z" }, - { url = "https://files.pythonhosted.org/packages/f0/16/1a6bf01fb622fb9cf5c91683823f073f053005c849b1f52ed613afcf8dae/pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae", size = 6631840, upload-time = "2025-07-01T09:13:46.161Z" }, - { url = "https://files.pythonhosted.org/packages/7b/e6/6ff7077077eb47fde78739e7d570bdcd7c10495666b6afcd23ab56b19a43/pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9", size = 6074005, upload-time = "2025-07-01T09:13:47.829Z" }, - { url = "https://files.pythonhosted.org/packages/c3/3a/b13f36832ea6d279a697231658199e0a03cd87ef12048016bdcc84131601/pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e", size = 6708372, upload-time = "2025-07-01T09:13:52.145Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e4/61b2e1a7528740efbc70b3d581f33937e38e98ef3d50b05007267a55bcb2/pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6", size = 6277090, upload-time = "2025-07-01T09:13:53.915Z" }, - { url = "https://files.pythonhosted.org/packages/a9/d3/60c781c83a785d6afbd6a326ed4d759d141de43aa7365725cbcd65ce5e54/pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f", size = 6985988, upload-time = "2025-07-01T09:13:55.699Z" }, - { url = "https://files.pythonhosted.org/packages/9f/28/4f4a0203165eefb3763939c6789ba31013a2e90adffb456610f30f613850/pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f", size = 2422899, upload-time = "2025-07-01T09:13:57.497Z" }, - { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, - { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, - { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, - { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, - { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, - { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, - { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, - { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, - { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, - { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, - { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, - { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, - { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, - { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, - { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, - { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, - { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, - { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, - { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, - { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, - { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, - { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, - { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, - { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, - { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, - { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, - { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, - { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, - { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, - { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, - { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, - { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, - { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, - { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, - { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, - { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, - { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, - { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, - { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, - { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, - { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, - { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, - { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, - { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, - { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, - { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, - { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, - { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, - { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, - { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, - { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, - { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, - { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, - { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, - { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, - { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8b/209bd6b62ce8367f47e68a218bffac88888fdf2c9fcf1ecadc6c3ec1ebc7/pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967", size = 5270556, upload-time = "2025-07-01T09:16:09.961Z" }, - { url = "https://files.pythonhosted.org/packages/2e/e6/231a0b76070c2cfd9e260a7a5b504fb72da0a95279410fa7afd99d9751d6/pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe", size = 4654625, upload-time = "2025-07-01T09:16:11.913Z" }, - { url = "https://files.pythonhosted.org/packages/13/f4/10cf94fda33cb12765f2397fc285fa6d8eb9c29de7f3185165b702fc7386/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c", size = 4874207, upload-time = "2025-07-03T13:11:10.201Z" }, - { url = "https://files.pythonhosted.org/packages/72/c9/583821097dc691880c92892e8e2d41fe0a5a3d6021f4963371d2f6d57250/pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25", size = 6583939, upload-time = "2025-07-03T13:11:15.68Z" }, - { url = "https://files.pythonhosted.org/packages/3b/8e/5c9d410f9217b12320efc7c413e72693f48468979a013ad17fd690397b9a/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27", size = 4957166, upload-time = "2025-07-01T09:16:13.74Z" }, - { url = "https://files.pythonhosted.org/packages/62/bb/78347dbe13219991877ffb3a91bf09da8317fbfcd4b5f9140aeae020ad71/pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a", size = 5581482, upload-time = "2025-07-01T09:16:16.107Z" }, - { url = "https://files.pythonhosted.org/packages/d9/28/1000353d5e61498aaeaaf7f1e4b49ddb05f2c6575f9d4f9f914a3538b6e1/pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f", size = 6984596, upload-time = "2025-07-01T09:16:18.07Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, - { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, - { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, - { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, - { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, - { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, +version = "12.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/41/f73d92b6b883a579e79600d391f2e21cb0df767b2714ecbd2952315dfeef/pillow-12.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:fb125d860738a09d363a88daa0f59c4533529a90e564785e20fe875b200b6dbd", size = 5304089, upload-time = "2026-01-02T09:10:24.953Z" }, + { url = "https://files.pythonhosted.org/packages/94/55/7aca2891560188656e4a91ed9adba305e914a4496800da6b5c0a15f09edf/pillow-12.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cad302dc10fac357d3467a74a9561c90609768a6f73a1923b0fd851b6486f8b0", size = 4657815, upload-time = "2026-01-02T09:10:27.063Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d2/b28221abaa7b4c40b7dba948f0f6a708bd7342c4d47ce342f0ea39643974/pillow-12.1.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a40905599d8079e09f25027423aed94f2823adaf2868940de991e53a449e14a8", size = 6222593, upload-time = "2026-01-02T09:10:29.115Z" }, + { url = "https://files.pythonhosted.org/packages/71/b8/7a61fb234df6a9b0b479f69e66901209d89ff72a435b49933f9122f94cac/pillow-12.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:92a7fe4225365c5e3a8e598982269c6d6698d3e783b3b1ae979e7819f9cd55c1", size = 8027579, upload-time = "2026-01-02T09:10:31.182Z" }, + { url = "https://files.pythonhosted.org/packages/ea/51/55c751a57cc524a15a0e3db20e5cde517582359508d62305a627e77fd295/pillow-12.1.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f10c98f49227ed8383d28174ee95155a675c4ed7f85e2e573b04414f7e371bda", size = 6335760, upload-time = "2026-01-02T09:10:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7c/60e3e6f5e5891a1a06b4c910f742ac862377a6fe842f7184df4a274ce7bf/pillow-12.1.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8637e29d13f478bc4f153d8daa9ffb16455f0a6cb287da1b432fdad2bfbd66c7", size = 7027127, upload-time = "2026-01-02T09:10:35.009Z" }, + { url = "https://files.pythonhosted.org/packages/06/37/49d47266ba50b00c27ba63a7c898f1bb41a29627ced8c09e25f19ebec0ff/pillow-12.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:21e686a21078b0f9cb8c8a961d99e6a4ddb88e0fc5ea6e130172ddddc2e5221a", size = 6449896, upload-time = "2026-01-02T09:10:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/67fd87d2913902462cd9b79c6211c25bfe95fcf5783d06e1367d6d9a741f/pillow-12.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2415373395a831f53933c23ce051021e79c8cd7979822d8cc478547a3f4da8ef", size = 7151345, upload-time = "2026-01-02T09:10:39.064Z" }, + { url = "https://files.pythonhosted.org/packages/bd/15/f8c7abf82af68b29f50d77c227e7a1f87ce02fdc66ded9bf603bc3b41180/pillow-12.1.0-cp310-cp310-win32.whl", hash = "sha256:e75d3dba8fc1ddfec0cd752108f93b83b4f8d6ab40e524a95d35f016b9683b09", size = 6325568, upload-time = "2026-01-02T09:10:41.035Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/7d1c0e160b6b5ac2605ef7d8be537e28753c0db5363d035948073f5513d7/pillow-12.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:64efdf00c09e31efd754448a383ea241f55a994fd079866b92d2bbff598aad91", size = 7032367, upload-time = "2026-01-02T09:10:43.09Z" }, + { url = "https://files.pythonhosted.org/packages/f4/03/41c038f0d7a06099254c60f618d0ec7be11e79620fc23b8e85e5b31d9a44/pillow-12.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f188028b5af6b8fb2e9a76ac0f841a575bd1bd396e46ef0840d9b88a48fdbcea", size = 2452345, upload-time = "2026-01-02T09:10:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/43/c4/bf8328039de6cc22182c3ef007a2abfbbdab153661c0a9aa78af8d706391/pillow-12.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a83e0850cb8f5ac975291ebfc4170ba481f41a28065277f7f735c202cd8e0af3", size = 5304057, upload-time = "2026-01-02T09:10:46.627Z" }, + { url = "https://files.pythonhosted.org/packages/43/06/7264c0597e676104cc22ca73ee48f752767cd4b1fe084662620b17e10120/pillow-12.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6e53e82ec2db0717eabb276aa56cf4e500c9a7cec2c2e189b55c24f65a3e8c0", size = 4657811, upload-time = "2026-01-02T09:10:49.548Z" }, + { url = "https://files.pythonhosted.org/packages/72/64/f9189e44474610daf83da31145fa56710b627b5c4c0b9c235e34058f6b31/pillow-12.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40a8e3b9e8773876d6e30daed22f016509e3987bab61b3b7fe309d7019a87451", size = 6232243, upload-time = "2026-01-02T09:10:51.62Z" }, + { url = "https://files.pythonhosted.org/packages/ef/30/0df458009be6a4caca4ca2c52975e6275c387d4e5c95544e34138b41dc86/pillow-12.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800429ac32c9b72909c671aaf17ecd13110f823ddb7db4dfef412a5587c2c24e", size = 8037872, upload-time = "2026-01-02T09:10:53.446Z" }, + { url = "https://files.pythonhosted.org/packages/e4/86/95845d4eda4f4f9557e25381d70876aa213560243ac1a6d619c46caaedd9/pillow-12.1.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b022eaaf709541b391ee069f0022ee5b36c709df71986e3f7be312e46f42c84", size = 6345398, upload-time = "2026-01-02T09:10:55.426Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1f/8e66ab9be3aaf1435bc03edd1ebdf58ffcd17f7349c1d970cafe87af27d9/pillow-12.1.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f345e7bc9d7f368887c712aa5054558bad44d2a301ddf9248599f4161abc7c0", size = 7034667, upload-time = "2026-01-02T09:10:57.11Z" }, + { url = "https://files.pythonhosted.org/packages/f9/f6/683b83cb9b1db1fb52b87951b1c0b99bdcfceaa75febf11406c19f82cb5e/pillow-12.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d70347c8a5b7ccd803ec0c85c8709f036e6348f1e6a5bf048ecd9c64d3550b8b", size = 6458743, upload-time = "2026-01-02T09:10:59.331Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7d/de833d63622538c1d58ce5395e7c6cb7e7dce80decdd8bde4a484e095d9f/pillow-12.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fcc52d86ce7a34fd17cb04e87cfdb164648a3662a6f20565910a99653d66c18", size = 7159342, upload-time = "2026-01-02T09:11:01.82Z" }, + { url = "https://files.pythonhosted.org/packages/8c/40/50d86571c9e5868c42b81fe7da0c76ca26373f3b95a8dd675425f4a92ec1/pillow-12.1.0-cp311-cp311-win32.whl", hash = "sha256:3ffaa2f0659e2f740473bcf03c702c39a8d4b2b7ffc629052028764324842c64", size = 6328655, upload-time = "2026-01-02T09:11:04.556Z" }, + { url = "https://files.pythonhosted.org/packages/6c/af/b1d7e301c4cd26cd45d4af884d9ee9b6fab893b0ad2450d4746d74a6968c/pillow-12.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:806f3987ffe10e867bab0ddad45df1148a2b98221798457fa097ad85d6e8bc75", size = 7031469, upload-time = "2026-01-02T09:11:06.538Z" }, + { url = "https://files.pythonhosted.org/packages/48/36/d5716586d887fb2a810a4a61518a327a1e21c8b7134c89283af272efe84b/pillow-12.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9f5fefaca968e700ad1a4a9de98bf0869a94e397fe3524c4c9450c1445252304", size = 2452515, upload-time = "2026-01-02T09:11:08.226Z" }, + { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642, upload-time = "2026-01-02T09:11:10.138Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464, upload-time = "2026-01-02T09:11:12.319Z" }, + { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878, upload-time = "2026-01-02T09:11:14.096Z" }, + { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868, upload-time = "2026-01-02T09:11:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468, upload-time = "2026-01-02T09:11:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518, upload-time = "2026-01-02T09:11:19.389Z" }, + { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829, upload-time = "2026-01-02T09:11:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756, upload-time = "2026-01-02T09:11:23.559Z" }, + { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770, upload-time = "2026-01-02T09:11:25.661Z" }, + { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406, upload-time = "2026-01-02T09:11:27.474Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612, upload-time = "2026-01-02T09:11:29.309Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c7/2530a4aa28248623e9d7f27316b42e27c32ec410f695929696f2e0e4a778/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:7b5dd7cbae20285cdb597b10eb5a2c13aa9de6cde9bb64a3c1317427b1db1ae1", size = 4062543, upload-time = "2026-01-02T09:11:31.566Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1f/40b8eae823dc1519b87d53c30ed9ef085506b05281d313031755c1705f73/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:29a4cef9cb672363926f0470afc516dbf7305a14d8c54f7abbb5c199cd8f8179", size = 4138373, upload-time = "2026-01-02T09:11:33.367Z" }, + { url = "https://files.pythonhosted.org/packages/d4/77/6fa60634cf06e52139fd0e89e5bbf055e8166c691c42fb162818b7fda31d/pillow-12.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:681088909d7e8fa9e31b9799aaa59ba5234c58e5e4f1951b4c4d1082a2e980e0", size = 3601241, upload-time = "2026-01-02T09:11:35.011Z" }, + { url = "https://files.pythonhosted.org/packages/4f/bf/28ab865de622e14b747f0cd7877510848252d950e43002e224fb1c9ababf/pillow-12.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:983976c2ab753166dc66d36af6e8ec15bb511e4a25856e2227e5f7e00a160587", size = 5262410, upload-time = "2026-01-02T09:11:36.682Z" }, + { url = "https://files.pythonhosted.org/packages/1c/34/583420a1b55e715937a85bd48c5c0991598247a1fd2eb5423188e765ea02/pillow-12.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:db44d5c160a90df2d24a24760bbd37607d53da0b34fb546c4c232af7192298ac", size = 4657312, upload-time = "2026-01-02T09:11:38.535Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fd/f5a0896839762885b3376ff04878f86ab2b097c2f9a9cdccf4eda8ba8dc0/pillow-12.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b7a9d1db5dad90e2991645874f708e87d9a3c370c243c2d7684d28f7e133e6b", size = 6232605, upload-time = "2026-01-02T09:11:40.602Z" }, + { url = "https://files.pythonhosted.org/packages/98/aa/938a09d127ac1e70e6ed467bd03834350b33ef646b31edb7452d5de43792/pillow-12.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6258f3260986990ba2fa8a874f8b6e808cf5abb51a94015ca3dc3c68aa4f30ea", size = 8041617, upload-time = "2026-01-02T09:11:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/17/e8/538b24cb426ac0186e03f80f78bc8dc7246c667f58b540bdd57c71c9f79d/pillow-12.1.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e115c15e3bc727b1ca3e641a909f77f8ca72a64fff150f666fcc85e57701c26c", size = 6346509, upload-time = "2026-01-02T09:11:44.955Z" }, + { url = "https://files.pythonhosted.org/packages/01/9a/632e58ec89a32738cabfd9ec418f0e9898a2b4719afc581f07c04a05e3c9/pillow-12.1.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6741e6f3074a35e47c77b23a4e4f2d90db3ed905cb1c5e6e0d49bff2045632bc", size = 7038117, upload-time = "2026-01-02T09:11:46.736Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a2/d40308cf86eada842ca1f3ffa45d0ca0df7e4ab33c83f81e73f5eaed136d/pillow-12.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:935b9d1aed48fcfb3f838caac506f38e29621b44ccc4f8a64d575cb1b2a88644", size = 6460151, upload-time = "2026-01-02T09:11:48.625Z" }, + { url = "https://files.pythonhosted.org/packages/f1/88/f5b058ad6453a085c5266660a1417bdad590199da1b32fb4efcff9d33b05/pillow-12.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5fee4c04aad8932da9f8f710af2c1a15a83582cfb884152a9caa79d4efcdbf9c", size = 7164534, upload-time = "2026-01-02T09:11:50.445Z" }, + { url = "https://files.pythonhosted.org/packages/19/ce/c17334caea1db789163b5d855a5735e47995b0b5dc8745e9a3605d5f24c0/pillow-12.1.0-cp313-cp313-win32.whl", hash = "sha256:a786bf667724d84aa29b5db1c61b7bfdde380202aaca12c3461afd6b71743171", size = 6332551, upload-time = "2026-01-02T09:11:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/e5/07/74a9d941fa45c90a0d9465098fe1ec85de3e2afbdc15cc4766622d516056/pillow-12.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:461f9dfdafa394c59cd6d818bdfdbab4028b83b02caadaff0ffd433faf4c9a7a", size = 7040087, upload-time = "2026-01-02T09:11:54.822Z" }, + { url = "https://files.pythonhosted.org/packages/88/09/c99950c075a0e9053d8e880595926302575bc742b1b47fe1bbcc8d388d50/pillow-12.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:9212d6b86917a2300669511ed094a9406888362e085f2431a7da985a6b124f45", size = 2452470, upload-time = "2026-01-02T09:11:56.522Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ba/970b7d85ba01f348dee4d65412476321d40ee04dcb51cd3735b9dc94eb58/pillow-12.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:00162e9ca6d22b7c3ee8e61faa3c3253cd19b6a37f126cad04f2f88b306f557d", size = 5264816, upload-time = "2026-01-02T09:11:58.227Z" }, + { url = "https://files.pythonhosted.org/packages/10/60/650f2fb55fdba7a510d836202aa52f0baac633e50ab1cf18415d332188fb/pillow-12.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7d6daa89a00b58c37cb1747ec9fb7ac3bc5ffd5949f5888657dfddde6d1312e0", size = 4660472, upload-time = "2026-01-02T09:12:00.798Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/5273a99478956a099d533c4f46cbaa19fd69d606624f4334b85e50987a08/pillow-12.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2479c7f02f9d505682dc47df8c0ea1fc5e264c4d1629a5d63fe3e2334b89554", size = 6268974, upload-time = "2026-01-02T09:12:02.572Z" }, + { url = "https://files.pythonhosted.org/packages/b4/26/0bf714bc2e73d5267887d47931d53c4ceeceea6978148ed2ab2a4e6463c4/pillow-12.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f188d580bd870cda1e15183790d1cc2fa78f666e76077d103edf048eed9c356e", size = 8073070, upload-time = "2026-01-02T09:12:04.75Z" }, + { url = "https://files.pythonhosted.org/packages/43/cf/1ea826200de111a9d65724c54f927f3111dc5ae297f294b370a670c17786/pillow-12.1.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0fde7ec5538ab5095cc02df38ee99b0443ff0e1c847a045554cf5f9af1f4aa82", size = 6380176, upload-time = "2026-01-02T09:12:06.626Z" }, + { url = "https://files.pythonhosted.org/packages/03/e0/7938dd2b2013373fd85d96e0f38d62b7a5a262af21ac274250c7ca7847c9/pillow-12.1.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ed07dca4a8464bada6139ab38f5382f83e5f111698caf3191cb8dbf27d908b4", size = 7067061, upload-time = "2026-01-02T09:12:08.624Z" }, + { url = "https://files.pythonhosted.org/packages/86/ad/a2aa97d37272a929a98437a8c0ac37b3cf012f4f8721e1bd5154699b2518/pillow-12.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f45bd71d1fa5e5749587613037b172e0b3b23159d1c00ef2fc920da6f470e6f0", size = 6491824, upload-time = "2026-01-02T09:12:10.488Z" }, + { url = "https://files.pythonhosted.org/packages/a4/44/80e46611b288d51b115826f136fb3465653c28f491068a72d3da49b54cd4/pillow-12.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:277518bf4fe74aa91489e1b20577473b19ee70fb97c374aa50830b279f25841b", size = 7190911, upload-time = "2026-01-02T09:12:12.772Z" }, + { url = "https://files.pythonhosted.org/packages/86/77/eacc62356b4cf81abe99ff9dbc7402750044aed02cfd6a503f7c6fc11f3e/pillow-12.1.0-cp313-cp313t-win32.whl", hash = "sha256:7315f9137087c4e0ee73a761b163fc9aa3b19f5f606a7fc08d83fd3e4379af65", size = 6336445, upload-time = "2026-01-02T09:12:14.775Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3c/57d81d0b74d218706dafccb87a87ea44262c43eef98eb3b164fd000e0491/pillow-12.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:0ddedfaa8b5f0b4ffbc2fa87b556dc59f6bb4ecb14a53b33f9189713ae8053c0", size = 7045354, upload-time = "2026-01-02T09:12:16.599Z" }, + { url = "https://files.pythonhosted.org/packages/ac/82/8b9b97bba2e3576a340f93b044a3a3a09841170ab4c1eb0d5c93469fd32f/pillow-12.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:80941e6d573197a0c28f394753de529bb436b1ca990ed6e765cf42426abc39f8", size = 2454547, upload-time = "2026-01-02T09:12:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/8c/87/bdf971d8bbcf80a348cc3bacfcb239f5882100fe80534b0ce67a784181d8/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:5cb7bc1966d031aec37ddb9dcf15c2da5b2e9f7cc3ca7c54473a20a927e1eb91", size = 4062533, upload-time = "2026-01-02T09:12:20.791Z" }, + { url = "https://files.pythonhosted.org/packages/ff/4f/5eb37a681c68d605eb7034c004875c81f86ec9ef51f5be4a63eadd58859a/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:97e9993d5ed946aba26baf9c1e8cf18adbab584b99f452ee72f7ee8acb882796", size = 4138546, upload-time = "2026-01-02T09:12:23.664Z" }, + { url = "https://files.pythonhosted.org/packages/11/6d/19a95acb2edbace40dcd582d077b991646b7083c41b98da4ed7555b59733/pillow-12.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:414b9a78e14ffeb98128863314e62c3f24b8a86081066625700b7985b3f529bd", size = 3601163, upload-time = "2026-01-02T09:12:26.338Z" }, + { url = "https://files.pythonhosted.org/packages/fc/36/2b8138e51cb42e4cc39c3297713455548be855a50558c3ac2beebdc251dd/pillow-12.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e6bdb408f7c9dd2a5ff2b14a3b0bb6d4deb29fb9961e6eb3ae2031ae9a5cec13", size = 5266086, upload-time = "2026-01-02T09:12:28.782Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/649056e4d22e1caa90816bf99cef0884aed607ed38075bd75f091a607a38/pillow-12.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3413c2ae377550f5487991d444428f1a8ae92784aac79caa8b1e3b89b175f77e", size = 4657344, upload-time = "2026-01-02T09:12:31.117Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6b/c5742cea0f1ade0cd61485dc3d81f05261fc2276f537fbdc00802de56779/pillow-12.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e5dcbe95016e88437ecf33544ba5db21ef1b8dd6e1b434a2cb2a3d605299e643", size = 6232114, upload-time = "2026-01-02T09:12:32.936Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8f/9f521268ce22d63991601aafd3d48d5ff7280a246a1ef62d626d67b44064/pillow-12.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d0a7735df32ccbcc98b98a1ac785cc4b19b580be1bdf0aeb5c03223220ea09d5", size = 8042708, upload-time = "2026-01-02T09:12:34.78Z" }, + { url = "https://files.pythonhosted.org/packages/1a/eb/257f38542893f021502a1bbe0c2e883c90b5cff26cc33b1584a841a06d30/pillow-12.1.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c27407a2d1b96774cbc4a7594129cc027339fd800cd081e44497722ea1179de", size = 6347762, upload-time = "2026-01-02T09:12:36.748Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5a/8ba375025701c09b309e8d5163c5a4ce0102fa86bbf8800eb0d7ac87bc51/pillow-12.1.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15c794d74303828eaa957ff8070846d0efe8c630901a1c753fdc63850e19ecd9", size = 7039265, upload-time = "2026-01-02T09:12:39.082Z" }, + { url = "https://files.pythonhosted.org/packages/cf/dc/cf5e4cdb3db533f539e88a7bbf9f190c64ab8a08a9bc7a4ccf55067872e4/pillow-12.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c990547452ee2800d8506c4150280757f88532f3de2a58e3022e9b179107862a", size = 6462341, upload-time = "2026-01-02T09:12:40.946Z" }, + { url = "https://files.pythonhosted.org/packages/d0/47/0291a25ac9550677e22eda48510cfc4fa4b2ef0396448b7fbdc0a6946309/pillow-12.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b63e13dd27da389ed9475b3d28510f0f954bca0041e8e551b2a4eb1eab56a39a", size = 7165395, upload-time = "2026-01-02T09:12:42.706Z" }, + { url = "https://files.pythonhosted.org/packages/4f/4c/e005a59393ec4d9416be06e6b45820403bb946a778e39ecec62f5b2b991e/pillow-12.1.0-cp314-cp314-win32.whl", hash = "sha256:1a949604f73eb07a8adab38c4fe50791f9919344398bdc8ac6b307f755fc7030", size = 6431413, upload-time = "2026-01-02T09:12:44.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/af/f23697f587ac5f9095d67e31b81c95c0249cd461a9798a061ed6709b09b5/pillow-12.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:4f9f6a650743f0ddee5593ac9e954ba1bdbc5e150bc066586d4f26127853ab94", size = 7176779, upload-time = "2026-01-02T09:12:46.727Z" }, + { url = "https://files.pythonhosted.org/packages/b3/36/6a51abf8599232f3e9afbd16d52829376a68909fe14efe29084445db4b73/pillow-12.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:808b99604f7873c800c4840f55ff389936ef1948e4e87645eaf3fccbc8477ac4", size = 2543105, upload-time = "2026-01-02T09:12:49.243Z" }, + { url = "https://files.pythonhosted.org/packages/82/54/2e1dd20c8749ff225080d6ba465a0cab4387f5db0d1c5fb1439e2d99923f/pillow-12.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bc11908616c8a283cf7d664f77411a5ed2a02009b0097ff8abbba5e79128ccf2", size = 5268571, upload-time = "2026-01-02T09:12:51.11Z" }, + { url = "https://files.pythonhosted.org/packages/57/61/571163a5ef86ec0cf30d265ac2a70ae6fc9e28413d1dc94fa37fae6bda89/pillow-12.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:896866d2d436563fa2a43a9d72f417874f16b5545955c54a64941e87c1376c61", size = 4660426, upload-time = "2026-01-02T09:12:52.865Z" }, + { url = "https://files.pythonhosted.org/packages/5e/e1/53ee5163f794aef1bf84243f755ee6897a92c708505350dd1923f4afec48/pillow-12.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8e178e3e99d3c0ea8fc64b88447f7cac8ccf058af422a6cedc690d0eadd98c51", size = 6269908, upload-time = "2026-01-02T09:12:54.884Z" }, + { url = "https://files.pythonhosted.org/packages/bc/0b/b4b4106ff0ee1afa1dc599fde6ab230417f800279745124f6c50bcffed8e/pillow-12.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:079af2fb0c599c2ec144ba2c02766d1b55498e373b3ac64687e43849fbbef5bc", size = 8074733, upload-time = "2026-01-02T09:12:56.802Z" }, + { url = "https://files.pythonhosted.org/packages/19/9f/80b411cbac4a732439e629a26ad3ef11907a8c7fc5377b7602f04f6fe4e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdec5e43377761c5dbca620efb69a77f6855c5a379e32ac5b158f54c84212b14", size = 6381431, upload-time = "2026-01-02T09:12:58.823Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b7/d65c45db463b66ecb6abc17c6ba6917a911202a07662247e1355ce1789e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:565c986f4b45c020f5421a4cea13ef294dde9509a8577f29b2fc5edc7587fff8", size = 7068529, upload-time = "2026-01-02T09:13:00.885Z" }, + { url = "https://files.pythonhosted.org/packages/50/96/dfd4cd726b4a45ae6e3c669fc9e49deb2241312605d33aba50499e9d9bd1/pillow-12.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:43aca0a55ce1eefc0aefa6253661cb54571857b1a7b2964bd8a1e3ef4b729924", size = 6492981, upload-time = "2026-01-02T09:13:03.314Z" }, + { url = "https://files.pythonhosted.org/packages/4d/1c/b5dc52cf713ae46033359c5ca920444f18a6359ce1020dd3e9c553ea5bc6/pillow-12.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0deedf2ea233722476b3a81e8cdfbad786f7adbed5d848469fa59fe52396e4ef", size = 7191878, upload-time = "2026-01-02T09:13:05.276Z" }, + { url = "https://files.pythonhosted.org/packages/53/26/c4188248bd5edaf543864fe4834aebe9c9cb4968b6f573ce014cc42d0720/pillow-12.1.0-cp314-cp314t-win32.whl", hash = "sha256:b17fbdbe01c196e7e159aacb889e091f28e61020a8abeac07b68079b6e626988", size = 6438703, upload-time = "2026-01-02T09:13:07.491Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0e/69ed296de8ea05cb03ee139cee600f424ca166e632567b2d66727f08c7ed/pillow-12.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27b9baecb428899db6c0de572d6d305cfaf38ca1596b5c0542a5182e3e74e8c6", size = 7182927, upload-time = "2026-01-02T09:13:09.841Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f5/68334c015eed9b5cff77814258717dec591ded209ab5b6fb70e2ae873d1d/pillow-12.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f61333d817698bdcdd0f9d7793e365ac3d2a21c1f1eb02b32ad6aefb8d8ea831", size = 2545104, upload-time = "2026-01-02T09:13:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/8b/bc/224b1d98cffd7164b14707c91aac83c07b047fbd8f58eba4066a3e53746a/pillow-12.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ca94b6aac0d7af2a10ba08c0f888b3d5114439b6b3ef39968378723622fed377", size = 5228605, upload-time = "2026-01-02T09:13:14.084Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ca/49ca7769c4550107de049ed85208240ba0f330b3f2e316f24534795702ce/pillow-12.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:351889afef0f485b84078ea40fe33727a0492b9af3904661b0abbafee0355b72", size = 4622245, upload-time = "2026-01-02T09:13:15.964Z" }, + { url = "https://files.pythonhosted.org/packages/73/48/fac807ce82e5955bcc2718642b94b1bd22a82a6d452aea31cbb678cddf12/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb0984b30e973f7e2884362b7d23d0a348c7143ee559f38ef3eaab640144204c", size = 5247593, upload-time = "2026-01-02T09:13:17.913Z" }, + { url = "https://files.pythonhosted.org/packages/d2/95/3e0742fe358c4664aed4fd05d5f5373dcdad0b27af52aa0972568541e3f4/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84cabc7095dd535ca934d57e9ce2a72ffd216e435a84acb06b2277b1de2689bd", size = 6989008, upload-time = "2026-01-02T09:13:20.083Z" }, + { url = "https://files.pythonhosted.org/packages/5a/74/fe2ac378e4e202e56d50540d92e1ef4ff34ed687f3c60f6a121bcf99437e/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53d8b764726d3af1a138dd353116f774e3862ec7e3794e0c8781e30db0f35dfc", size = 5313824, upload-time = "2026-01-02T09:13:22.405Z" }, + { url = "https://files.pythonhosted.org/packages/f3/77/2a60dee1adee4e2655ac328dd05c02a955c1cd683b9f1b82ec3feb44727c/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5da841d81b1a05ef940a8567da92decaa15bc4d7dedb540a8c219ad83d91808a", size = 5963278, upload-time = "2026-01-02T09:13:24.706Z" }, + { url = "https://files.pythonhosted.org/packages/2d/71/64e9b1c7f04ae0027f788a248e6297d7fcc29571371fe7d45495a78172c0/pillow-12.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:75af0b4c229ac519b155028fa1be632d812a519abba9b46b20e50c6caa184f19", size = 7029809, upload-time = "2026-01-02T09:13:26.541Z" }, ] [[package]] @@ -1785,23 +1707,47 @@ wheels = [ name = "pydantic" version = "2.11.7" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.14'", +] dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, + { name = "annotated-types", marker = "python_full_version < '3.14'" }, + { name = "pydantic-core", version = "2.33.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "typing-extensions", marker = "python_full_version < '3.14'" }, + { name = "typing-inspection", version = "0.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", +] +dependencies = [ + { name = "annotated-types", marker = "python_full_version >= '3.14'" }, + { name = "pydantic-core", version = "2.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.14'" }, + { name = "typing-inspection", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + [[package]] name = "pydantic-core" version = "2.33.2" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.14'", +] dependencies = [ - { name = "typing-extensions" }, + { name = "typing-extensions", marker = "python_full_version < '3.14'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } wheels = [ @@ -1883,14 +1829,137 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, ] +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", +] +dependencies = [ + { name = "typing-extensions", marker = "python_full_version >= '3.14'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + [[package]] name = "pydantic-settings" version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pydantic" }, + { name = "pydantic", version = "2.11.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "pydantic", version = "2.12.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, { name = "python-dotenv" }, - { name = "typing-inspection" }, + { name = "typing-inspection", version = "0.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, + { name = "typing-inspection", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" } wheels = [ @@ -2436,18 +2505,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/51/da/545b75d420bb23b5d494b0517757b351963e974e79933f01e05c929f20a6/starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875", size = 74175, upload-time = "2025-10-28T17:34:09.13Z" }, ] -[[package]] -name = "tinycss2" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, -] - [[package]] name = "tomli" version = "2.2.1" @@ -2533,14 +2590,32 @@ wheels = [ name = "typing-inspection" version = "0.4.1" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.14'", +] dependencies = [ - { name = "typing-extensions" }, + { name = "typing-extensions", marker = "python_full_version < '3.14'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", +] +dependencies = [ + { name = "typing-extensions", marker = "python_full_version >= '3.14'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + [[package]] name = "urllib3" version = "2.5.0" @@ -2596,15 +2671,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] -[[package]] -name = "webencodings" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, -] - [[package]] name = "websockets" version = "15.0.1"