diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index dd84ea782..6867cf8d2 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,38 +1,38 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index bbcbbe7d6..72718d5aa 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,20 +1,20 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/check-lock.yml b/.github/workflows/check-lock.yml index 805b0f3cc..d64acf296 100644 --- a/.github/workflows/check-lock.yml +++ b/.github/workflows/check-lock.yml @@ -1,25 +1,25 @@ -name: Check uv.lock - -on: - pull_request: - paths: - - "pyproject.toml" - - "uv.lock" - push: - paths: - - "pyproject.toml" - - "uv.lock" - -jobs: - check-lock: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install uv - run: | - curl -LsSf https://astral.sh/uv/install.sh | sh - echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - - name: Check uv.lock is up to date - run: uv lock --check +name: Check uv.lock + +on: + pull_request: + paths: + - "pyproject.toml" + - "uv.lock" + push: + paths: + - "pyproject.toml" + - "uv.lock" + +jobs: + check-lock: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + + - name: Check uv.lock is up to date + run: uv lock --check diff --git a/.github/workflows/main-checks.yml b/.github/workflows/main-checks.yml index 6f38043cd..88654e5db 100644 --- a/.github/workflows/main-checks.yml +++ b/.github/workflows/main-checks.yml @@ -1,13 +1,18 @@ -name: Main branch checks - -on: - push: - branches: - - main - - "v*.*.*" - tags: - - "v*.*.*" - -jobs: - checks: - uses: ./.github/workflows/shared.yml +name: Main branch checks + +on: + push: + branches: + - main + - "v*.*.*" + tags: + - "v*.*.*" + +jobs: + checks: + uses: ./.github/workflows/shared.yml + + + + + diff --git a/.github/workflows/publish-docs-manually.yml b/.github/workflows/publish-docs-manually.yml index e1c3954b1..6f7c08d76 100644 --- a/.github/workflows/publish-docs-manually.yml +++ b/.github/workflows/publish-docs-manually.yml @@ -1,32 +1,32 @@ -name: Publish Docs manually - -on: - workflow_dispatch: - -jobs: - docs-publish: - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - uses: actions/checkout@v4 - - name: Configure Git Credentials - run: | - git config user.name github-actions[bot] - git config user.email 41898282+github-actions[bot]@users.noreply.github.com - - - name: Install uv - uses: astral-sh/setup-uv@v3 - with: - enable-cache: true - - - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - - uses: actions/cache@v4 - with: - key: mkdocs-material-${{ env.cache_id }} - path: .cache - restore-keys: | - mkdocs-material- - - - run: uv sync --frozen --group docs - - run: uv run --no-sync mkdocs gh-deploy --force +name: Publish Docs manually + +on: + workflow_dispatch: + +jobs: + docs-publish: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + - name: Configure Git Credentials + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + key: mkdocs-material-${{ env.cache_id }} + path: .cache + restore-keys: | + mkdocs-material- + + - run: uv sync --frozen --group docs + - run: uv run --no-sync mkdocs gh-deploy --force diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 17edd0f3c..6a13df56d 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,80 +1,80 @@ -name: Publishing - -on: - release: - types: [published] - -jobs: - release-build: - name: Build distribution - runs-on: ubuntu-latest - needs: [checks] - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v3 - with: - enable-cache: true - - - name: Set up Python 3.12 - run: uv python install 3.12 - - - name: Build - run: uv build - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: release-dists - path: dist/ - - checks: - uses: ./.github/workflows/shared.yml - - pypi-publish: - name: Upload release to PyPI - runs-on: ubuntu-latest - environment: release - needs: - - release-build - permissions: - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - - steps: - - name: Retrieve release distributions - uses: actions/download-artifact@v4 - with: - name: release-dists - path: dist/ - - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - - docs-publish: - runs-on: ubuntu-latest - needs: ["pypi-publish"] - permissions: - contents: write - steps: - - uses: actions/checkout@v4 - - name: Configure Git Credentials - run: | - git config user.name github-actions[bot] - git config user.email 41898282+github-actions[bot]@users.noreply.github.com - - - name: Install uv - uses: astral-sh/setup-uv@v3 - with: - enable-cache: true - - - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - - uses: actions/cache@v4 - with: - key: mkdocs-material-${{ env.cache_id }} - path: .cache - restore-keys: | - mkdocs-material- - - - run: uv sync --frozen --group docs - - run: uv run --no-sync mkdocs gh-deploy --force +name: Publishing + +on: + release: + types: [published] + +jobs: + release-build: + name: Build distribution + runs-on: ubuntu-latest + needs: [checks] + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + + - name: Set up Python 3.12 + run: uv python install 3.12 + + - name: Build + run: uv build + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: release-dists + path: dist/ + + checks: + uses: ./.github/workflows/shared.yml + + pypi-publish: + name: Upload release to PyPI + runs-on: ubuntu-latest + environment: release + needs: + - release-build + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing + + steps: + - name: Retrieve release distributions + uses: actions/download-artifact@v4 + with: + name: release-dists + path: dist/ + + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + docs-publish: + runs-on: ubuntu-latest + needs: ["pypi-publish"] + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + - name: Configure Git Credentials + run: | + git config user.name github-actions[bot] + git config user.email 41898282+github-actions[bot]@users.noreply.github.com + + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + + - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + key: mkdocs-material-${{ env.cache_id }} + path: .cache + restore-keys: | + mkdocs-material- + + - run: uv sync --frozen --group docs + - run: uv run --no-sync mkdocs gh-deploy --force diff --git a/.github/workflows/pull-request-checks.yml b/.github/workflows/pull-request-checks.yml index a7e7a8bf1..37b11d157 100644 --- a/.github/workflows/pull-request-checks.yml +++ b/.github/workflows/pull-request-checks.yml @@ -1,8 +1,8 @@ -name: Pull request checks - -on: - pull_request: - -jobs: - checks: - uses: ./.github/workflows/shared.yml +name: Pull request checks + +on: + pull_request: + +jobs: + checks: + uses: ./.github/workflows/shared.yml diff --git a/.github/workflows/shared.yml b/.github/workflows/shared.yml index 4c9023ae9..501f434a6 100644 --- a/.github/workflows/shared.yml +++ b/.github/workflows/shared.yml @@ -4,56 +4,46 @@ on: workflow_call: jobs: - format: + test: runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + steps: - uses: actions/checkout@v4 + - name: Clear UV Cache (if exists) + run: uv cache clear || true + - name: Install uv uses: astral-sh/setup-uv@v3 with: enable-cache: true - - name: Install the project - run: uv sync --frozen --all-extras --dev --python 3.12 + - name: Install the project with the correct Python version + run: uv sync --frozen --all-extras --dev --python ${{ matrix.python-version }} - - name: Run ruff format check - run: uv run --no-sync ruff check . + - name: Ensure pip is installed and upgraded + run: | + uv run --no-sync python -m ensurepip --upgrade + uv run --no-sync python -m pip install --upgrade pip - typecheck: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v3 - with: - enable-cache: true + - name: Install jose dependencies + run: | + uv run --no-sync python -m pip install python-jose types-python-jose - - name: Install the project - run: uv sync --frozen --all-extras --dev --python 3.12 + - name: Verify installation + run: uv run --no-sync python -m pip list - - name: Run pyright - run: uv run --no-sync pyright + - name: Set Python Path for MCP + run: echo "PYTHONPATH=$(pwd)/src" >> $GITHUB_ENV - test: - runs-on: ${{ matrix.os }} - strategy: - matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] - os: [ubuntu-latest, windows-latest] + - name: Run pytest + env: + PYTHONPATH: ${{ env.PYTHONPATH }} + run: uv run --no-sync pytest - steps: - - uses: actions/checkout@v4 - - name: Install uv - uses: astral-sh/setup-uv@v3 - with: - enable-cache: true - - name: Install the project - run: uv sync --frozen --all-extras --dev --python ${{ matrix.python-version }} - - name: Run pytest - run: uv run --no-sync pytest - continue-on-error: true diff --git a/.gitignore b/.gitignore index e9fdca176..ec8e1b2b9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,170 +1,170 @@ -.DS_Store -scratch/ - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/latest/usage/project/#working-with-version-control -.pdm.toml -.pdm-python -.pdm-build/ - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# vscode -.vscode/ -.windsurfrules -**/CLAUDE.local.md +.DS_Store +scratch/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# vscode +.vscode/ +.windsurfrules +**/CLAUDE.local.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 35e12261a..9698b9cd3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,38 +1,38 @@ -fail_fast: true - -repos: - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.1.0 - hooks: - - id: prettier - types_or: [yaml, json5] - - - repo: local - hooks: - - id: ruff-format - name: Ruff Format - entry: uv run ruff - args: [format] - language: system - types: [python] - pass_filenames: false - - id: ruff - name: Ruff - entry: uv run ruff - args: ["check", "--fix", "--exit-non-zero-on-fix"] - types: [python] - language: system - pass_filenames: false - - id: pyright - name: pyright - entry: uv run pyright - args: [src] - language: system - types: [python] - pass_filenames: false - - id: uv-lock-check - name: Check uv.lock is up to date - entry: uv lock --check - language: system - files: ^(pyproject\.toml|uv\.lock)$ - pass_filenames: false +fail_fast: true + +repos: + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 + hooks: + - id: prettier + types_or: [yaml, json5] + + - repo: local + hooks: + - id: ruff-format + name: Ruff Format + entry: uv run ruff + args: [format] + language: system + types: [python] + pass_filenames: false + - id: ruff + name: Ruff + entry: uv run ruff + args: ["check", "--fix", "--exit-non-zero-on-fix"] + types: [python] + language: system + pass_filenames: false + - id: pyright + name: pyright + entry: uv run pyright + args: [src] + language: system + types: [python] + pass_filenames: false + - id: uv-lock-check + name: Check uv.lock is up to date + entry: uv lock --check + language: system + files: ^(pyproject\.toml|uv\.lock)$ + pass_filenames: false diff --git a/.python-version b/.python-version new file mode 100644 index 000000000..4eba2a62e --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.13.0 diff --git a/CLAUDE.md b/CLAUDE.md index 619f3bb44..dfd5c52dc 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,118 +1,118 @@ -# Development Guidelines - -This document contains critical information about working with this codebase. Follow these guidelines precisely. - -## Core Development Rules - -1. Package Management - - ONLY use uv, NEVER pip - - Installation: `uv add package` - - Running tools: `uv run tool` - - Upgrading: `uv add --dev package --upgrade-package package` - - FORBIDDEN: `uv pip install`, `@latest` syntax - -2. Code Quality - - Type hints required for all code - - Public APIs must have docstrings - - Functions must be focused and small - - Follow existing patterns exactly - - Line length: 88 chars maximum - -3. Testing Requirements - - Framework: `uv run --frozen pytest` - - Async testing: use anyio, not asyncio - - Coverage: test edge cases and errors - - New features require tests - - Bug fixes require regression tests - -- For commits fixing bugs or adding features based on user reports add: - ```bash - git commit --trailer "Reported-by:" - ``` - Where `` is the name of the user. - -- For commits related to a Github issue, add - ```bash - git commit --trailer "Github-Issue:#" - ``` -- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never - mention the tool used to create the commit message or PR. - -## Pull Requests - -- Create a detailed message of what changed. Focus on the high level description of - the problem it tries to solve, and how it is solved. Don't go into the specifics of the - code unless it adds clarity. - -- Always add `jerome3o-anthropic` and `jspahrsummers` as reviewer. - -- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never - mention the tool used to create the commit message or PR. - -## Python Tools - -## Code Formatting - -1. Ruff - - Format: `uv run --frozen ruff format .` - - Check: `uv run --frozen ruff check .` - - Fix: `uv run --frozen ruff check . --fix` - - Critical issues: - - Line length (88 chars) - - Import sorting (I001) - - Unused imports - - Line wrapping: - - Strings: use parentheses - - Function calls: multi-line with proper indent - - Imports: split into multiple lines - -2. Type Checking - - Tool: `uv run --frozen pyright` - - Requirements: - - Explicit None checks for Optional - - Type narrowing for strings - - Version warnings can be ignored if checks pass - -3. Pre-commit - - Config: `.pre-commit-config.yaml` - - Runs: on git commit - - Tools: Prettier (YAML/JSON), Ruff (Python) - - Ruff updates: - - Check PyPI versions - - Update config rev - - Commit config first - -## Error Resolution - -1. CI Failures - - Fix order: - 1. Formatting - 2. Type errors - 3. Linting - - Type errors: - - Get full line context - - Check Optional types - - Add type narrowing - - Verify function signatures - -2. Common Issues - - Line length: - - Break strings with parentheses - - Multi-line function calls - - Split imports - - Types: - - Add None checks - - Narrow string types - - Match existing patterns - - Pytest: - - If the tests aren't finding the anyio pytest mark, try adding PYTEST_DISABLE_PLUGIN_AUTOLOAD="" - to the start of the pytest run command eg: - `PYTEST_DISABLE_PLUGIN_AUTOLOAD="" uv run --frozen pytest` - -3. Best Practices - - Check git status before commits - - Run formatters before type checks - - Keep changes minimal - - Follow existing patterns - - Document public APIs - - Test thoroughly +# Development Guidelines + +This document contains critical information about working with this codebase. Follow these guidelines precisely. + +## Core Development Rules + +1. Package Management + - ONLY use uv, NEVER pip + - Installation: `uv add package` + - Running tools: `uv run tool` + - Upgrading: `uv add --dev package --upgrade-package package` + - FORBIDDEN: `uv pip install`, `@latest` syntax + +2. Code Quality + - Type hints required for all code + - Public APIs must have docstrings + - Functions must be focused and small + - Follow existing patterns exactly + - Line length: 88 chars maximum + +3. Testing Requirements + - Framework: `uv run --frozen pytest` + - Async testing: use anyio, not asyncio + - Coverage: test edge cases and errors + - New features require tests + - Bug fixes require regression tests + +- For commits fixing bugs or adding features based on user reports add: + ```bash + git commit --trailer "Reported-by:" + ``` + Where `` is the name of the user. + +- For commits related to a Github issue, add + ```bash + git commit --trailer "Github-Issue:#" + ``` +- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never + mention the tool used to create the commit message or PR. + +## Pull Requests + +- Create a detailed message of what changed. Focus on the high level description of + the problem it tries to solve, and how it is solved. Don't go into the specifics of the + code unless it adds clarity. + +- Always add `jerome3o-anthropic` and `jspahrsummers` as reviewer. + +- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never + mention the tool used to create the commit message or PR. + +## Python Tools + +## Code Formatting + +1. Ruff + - Format: `uv run --frozen ruff format .` + - Check: `uv run --frozen ruff check .` + - Fix: `uv run --frozen ruff check . --fix` + - Critical issues: + - Line length (88 chars) + - Import sorting (I001) + - Unused imports + - Line wrapping: + - Strings: use parentheses + - Function calls: multi-line with proper indent + - Imports: split into multiple lines + +2. Type Checking + - Tool: `uv run --frozen pyright` + - Requirements: + - Explicit None checks for Optional + - Type narrowing for strings + - Version warnings can be ignored if checks pass + +3. Pre-commit + - Config: `.pre-commit-config.yaml` + - Runs: on git commit + - Tools: Prettier (YAML/JSON), Ruff (Python) + - Ruff updates: + - Check PyPI versions + - Update config rev + - Commit config first + +## Error Resolution + +1. CI Failures + - Fix order: + 1. Formatting + 2. Type errors + 3. Linting + - Type errors: + - Get full line context + - Check Optional types + - Add type narrowing + - Verify function signatures + +2. Common Issues + - Line length: + - Break strings with parentheses + - Multi-line function calls + - Split imports + - Types: + - Add None checks + - Narrow string types + - Match existing patterns + - Pytest: + - If the tests aren't finding the anyio pytest mark, try adding PYTEST_DISABLE_PLUGIN_AUTOLOAD="" + to the start of the pytest run command eg: + `PYTEST_DISABLE_PLUGIN_AUTOLOAD="" uv run --frozen pytest` + +3. Best Practices + - Check git status before commits + - Run formatters before type checks + - Keep changes minimal + - Follow existing patterns + - Document public APIs + - Test thoroughly diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 05c32c605..baa1e893d 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,128 +1,128 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or - advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email - address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -mcp-coc@anthropic.com. -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at -https://www.contributor-covenant.org/translations. +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +mcp-coc@anthropic.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 929e5f504..d44144c8c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,66 +1,66 @@ -# Contributing - -Thank you for your interest in contributing to the MCP Python SDK! This document provides guidelines and instructions for contributing. - -## Development Setup - -1. Make sure you have Python 3.10+ installed -2. Install [uv](https://docs.astral.sh/uv/getting-started/installation/) -3. Fork the repository -4. Clone your fork: `git clone https://github.com/YOUR-USERNAME/python-sdk.git` -5. Install dependencies: -```bash -uv sync --frozen --all-extras --dev -``` - -## Development Workflow - -1. Choose the correct branch for your changes: - - For bug fixes to a released version: use the latest release branch (e.g. v1.1.x for 1.1.3) - - For new features: use the main branch (which will become the next minor/major version) - - If unsure, ask in an issue first - -2. Create a new branch from your chosen base branch - -3. Make your changes - -4. Ensure tests pass: -```bash -uv run pytest -``` - -5. Run type checking: -```bash -uv run pyright -``` - -6. Run linting: -```bash -uv run ruff check . -uv run ruff format . -``` - -7. Submit a pull request to the same branch you branched from - -## Code Style - -- We use `ruff` for linting and formatting -- Follow PEP 8 style guidelines -- Add type hints to all functions -- Include docstrings for public APIs - -## Pull Request Process - -1. Update documentation as needed -2. Add tests for new functionality -3. Ensure CI passes -4. Maintainers will review your code -5. Address review feedback - -## Code of Conduct - -Please note that this project is released with a [Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. - -## License - -By contributing, you agree that your contributions will be licensed under the MIT License. +# Contributing + +Thank you for your interest in contributing to the MCP Python SDK! This document provides guidelines and instructions for contributing. + +## Development Setup + +1. Make sure you have Python 3.10+ installed +2. Install [uv](https://docs.astral.sh/uv/getting-started/installation/) +3. Fork the repository +4. Clone your fork: `git clone https://github.com/YOUR-USERNAME/python-sdk.git` +5. Install dependencies: +```bash +uv sync --frozen --all-extras --dev +``` + +## Development Workflow + +1. Choose the correct branch for your changes: + - For bug fixes to a released version: use the latest release branch (e.g. v1.1.x for 1.1.3) + - For new features: use the main branch (which will become the next minor/major version) + - If unsure, ask in an issue first + +2. Create a new branch from your chosen base branch + +3. Make your changes + +4. Ensure tests pass: +```bash +uv run pytest +``` + +5. Run type checking: +```bash +uv run pyright +``` + +6. Run linting: +```bash +uv run ruff check . +uv run ruff format . +``` + +7. Submit a pull request to the same branch you branched from + +## Code Style + +- We use `ruff` for linting and formatting +- Follow PEP 8 style guidelines +- Add type hints to all functions +- Include docstrings for public APIs + +## Pull Request Process + +1. Update documentation as needed +2. Add tests for new functionality +3. Ensure CI passes +4. Maintainers will review your code +5. Address review feedback + +## Code of Conduct + +Please note that this project is released with a [Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. + +## License + +By contributing, you agree that your contributions will be licensed under the MIT License. diff --git a/LICENSE b/LICENSE index 3d4843545..2f352f619 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,21 @@ -MIT License - -Copyright (c) 2024 Anthropic, PBC - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +MIT License + +Copyright (c) 2024 Anthropic, PBC + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/RELEASE.md b/RELEASE.md index 6555a1c2d..3c4f415f3 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,13 +1,13 @@ -# Release Process - -## Bumping Dependencies - -1. Change dependency version in `pyproject.toml` -2. Upgrade lock with `uv lock --resolution lowest-direct` - -## Major or Minor Release - -Create a GitHub release via UI with the tag being `vX.Y.Z` where `X.Y.Z` is the version, -and the release title being the same. Then ask someone to review the release. - -The package version will be set automatically from the tag. +# Release Process + +## Bumping Dependencies + +1. Change dependency version in `pyproject.toml` +2. Upgrade lock with `uv lock --resolution lowest-direct` + +## Major or Minor Release + +Create a GitHub release via UI with the tag being `vX.Y.Z` where `X.Y.Z` is the version, +and the release title being the same. Then ask someone to review the release. + +The package version will be set automatically from the tag. diff --git a/SECURITY.md b/SECURITY.md index 8c09400cc..bbda2e191 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,14 +1,14 @@ -# Security Policy -Thank you for helping us keep the SDKs and systems they interact with secure. - -## Reporting Security Issues - -This SDK is maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project. - -The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities. - -Our security program is managed on HackerOne and we ask that any validated vulnerability in this functionality be reported through their [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability). - -## Vulnerability Disclosure Program - -Our Vulnerability Program Guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). +# Security Policy +Thank you for helping us keep the SDKs and systems they interact with secure. + +## Reporting Security Issues + +This SDK is maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project. + +The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities. + +Our security program is managed on HackerOne and we ask that any validated vulnerability in this functionality be reported through their [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability). + +## Vulnerability Disclosure Program + +Our Vulnerability Program Guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). diff --git a/docs/api.md b/docs/api.md index 3f696af54..a2538449e 100644 --- a/docs/api.md +++ b/docs/api.md @@ -1 +1 @@ -::: mcp +::: mcp diff --git a/docs/index.md b/docs/index.md index 42ad9ca0c..5b7a7104a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,5 +1,5 @@ -# MCP Server - -This is the MCP Server implementation in Python. - -It only contains the [API Reference](api.md) for the time being. +# MCP Server + +This is the MCP Server implementation in Python. + +It only contains the [API Reference](api.md) for the time being. diff --git a/examples/README.md b/examples/README.md deleted file mode 100644 index 5ed4dd55f..000000000 --- a/examples/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Python SDK Examples - -This folders aims to provide simple examples of using the Python SDK. Please refer to the -[servers repository](https://github.com/modelcontextprotocol/servers) -for real-world servers. diff --git a/examples/clients/simple-chatbot/.python-version b/examples/clients/simple-chatbot/.python-version index c8cfe3959..2951d9b02 100644 --- a/examples/clients/simple-chatbot/.python-version +++ b/examples/clients/simple-chatbot/.python-version @@ -1 +1 @@ -3.10 +3.10 diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/.env.example b/examples/clients/simple-chatbot/mcp_simple_chatbot/.env.example index 39be363c2..dd198dfbb 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/.env.example +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/.env.example @@ -1 +1 @@ -LLM_API_KEY=gsk_1234567890 +LLM_API_KEY=gsk_1234567890 diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py index ef72d78f9..f8c6d9f73 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py @@ -1,430 +1,430 @@ -import asyncio -import json -import logging -import os -import shutil -from contextlib import AsyncExitStack -from typing import Any - -import httpx -from dotenv import load_dotenv -from mcp import ClientSession, StdioServerParameters -from mcp.client.stdio import stdio_client - -# Configure logging -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) - - -class Configuration: - """Manages configuration and environment variables for the MCP client.""" - - def __init__(self) -> None: - """Initialize configuration with environment variables.""" - self.load_env() - self.api_key = os.getenv("LLM_API_KEY") - - @staticmethod - def load_env() -> None: - """Load environment variables from .env file.""" - load_dotenv() - - @staticmethod - def load_config(file_path: str) -> dict[str, Any]: - """Load server configuration from JSON file. - - Args: - file_path: Path to the JSON configuration file. - - Returns: - Dict containing server configuration. - - Raises: - FileNotFoundError: If configuration file doesn't exist. - JSONDecodeError: If configuration file is invalid JSON. - """ - with open(file_path, "r") as f: - return json.load(f) - - @property - def llm_api_key(self) -> str: - """Get the LLM API key. - - Returns: - The API key as a string. - - Raises: - ValueError: If the API key is not found in environment variables. - """ - if not self.api_key: - raise ValueError("LLM_API_KEY not found in environment variables") - return self.api_key - - -class Server: - """Manages MCP server connections and tool execution.""" - - def __init__(self, name: str, config: dict[str, Any]) -> None: - self.name: str = name - self.config: dict[str, Any] = config - self.stdio_context: Any | None = None - self.session: ClientSession | None = None - self._cleanup_lock: asyncio.Lock = asyncio.Lock() - self.exit_stack: AsyncExitStack = AsyncExitStack() - - async def initialize(self) -> None: - """Initialize the server connection.""" - command = ( - shutil.which("npx") - if self.config["command"] == "npx" - else self.config["command"] - ) - if command is None: - raise ValueError("The command must be a valid string and cannot be None.") - - server_params = StdioServerParameters( - command=command, - args=self.config["args"], - env={**os.environ, **self.config["env"]} - if self.config.get("env") - else None, - ) - try: - stdio_transport = await self.exit_stack.enter_async_context( - stdio_client(server_params) - ) - read, write = stdio_transport - session = await self.exit_stack.enter_async_context( - ClientSession(read, write) - ) - await session.initialize() - self.session = session - except Exception as e: - logging.error(f"Error initializing server {self.name}: {e}") - await self.cleanup() - raise - - async def list_tools(self) -> list[Any]: - """List available tools from the server. - - Returns: - A list of available tools. - - Raises: - RuntimeError: If the server is not initialized. - """ - if not self.session: - raise RuntimeError(f"Server {self.name} not initialized") - - tools_response = await self.session.list_tools() - tools = [] - - for item in tools_response: - if isinstance(item, tuple) and item[0] == "tools": - tools.extend( - Tool(tool.name, tool.description, tool.inputSchema) - for tool in item[1] - ) - - return tools - - async def execute_tool( - self, - tool_name: str, - arguments: dict[str, Any], - retries: int = 2, - delay: float = 1.0, - ) -> Any: - """Execute a tool with retry mechanism. - - Args: - tool_name: Name of the tool to execute. - arguments: Tool arguments. - retries: Number of retry attempts. - delay: Delay between retries in seconds. - - Returns: - Tool execution result. - - Raises: - RuntimeError: If server is not initialized. - Exception: If tool execution fails after all retries. - """ - if not self.session: - raise RuntimeError(f"Server {self.name} not initialized") - - attempt = 0 - while attempt < retries: - try: - logging.info(f"Executing {tool_name}...") - result = await self.session.call_tool(tool_name, arguments) - - return result - - except Exception as e: - attempt += 1 - logging.warning( - f"Error executing tool: {e}. Attempt {attempt} of {retries}." - ) - if attempt < retries: - logging.info(f"Retrying in {delay} seconds...") - await asyncio.sleep(delay) - else: - logging.error("Max retries reached. Failing.") - raise - - async def cleanup(self) -> None: - """Clean up server resources.""" - async with self._cleanup_lock: - try: - await self.exit_stack.aclose() - self.session = None - self.stdio_context = None - except Exception as e: - logging.error(f"Error during cleanup of server {self.name}: {e}") - - -class Tool: - """Represents a tool with its properties and formatting.""" - - def __init__( - self, name: str, description: str, input_schema: dict[str, Any] - ) -> None: - self.name: str = name - self.description: str = description - self.input_schema: dict[str, Any] = input_schema - - def format_for_llm(self) -> str: - """Format tool information for LLM. - - Returns: - A formatted string describing the tool. - """ - args_desc = [] - if "properties" in self.input_schema: - for param_name, param_info in self.input_schema["properties"].items(): - arg_desc = ( - f"- {param_name}: {param_info.get('description', 'No description')}" - ) - if param_name in self.input_schema.get("required", []): - arg_desc += " (required)" - args_desc.append(arg_desc) - - return f""" -Tool: {self.name} -Description: {self.description} -Arguments: -{chr(10).join(args_desc)} -""" - - -class LLMClient: - """Manages communication with the LLM provider.""" - - def __init__(self, api_key: str) -> None: - self.api_key: str = api_key - - def get_response(self, messages: list[dict[str, str]]) -> str: - """Get a response from the LLM. - - Args: - messages: A list of message dictionaries. - - Returns: - The LLM's response as a string. - - Raises: - httpx.RequestError: If the request to the LLM fails. - """ - url = "https://api.groq.com/openai/v1/chat/completions" - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {self.api_key}", - } - payload = { - "messages": messages, - "model": "llama-3.2-90b-vision-preview", - "temperature": 0.7, - "max_tokens": 4096, - "top_p": 1, - "stream": False, - "stop": None, - } - - try: - with httpx.Client() as client: - response = client.post(url, headers=headers, json=payload) - response.raise_for_status() - data = response.json() - return data["choices"][0]["message"]["content"] - - except httpx.RequestError as e: - error_message = f"Error getting LLM response: {str(e)}" - logging.error(error_message) - - if isinstance(e, httpx.HTTPStatusError): - status_code = e.response.status_code - logging.error(f"Status code: {status_code}") - logging.error(f"Response details: {e.response.text}") - - return ( - f"I encountered an error: {error_message}. " - "Please try again or rephrase your request." - ) - - -class ChatSession: - """Orchestrates the interaction between user, LLM, and tools.""" - - def __init__(self, servers: list[Server], llm_client: LLMClient) -> None: - self.servers: list[Server] = servers - self.llm_client: LLMClient = llm_client - - async def cleanup_servers(self) -> None: - """Clean up all servers properly.""" - cleanup_tasks = [ - asyncio.create_task(server.cleanup()) for server in self.servers - ] - if cleanup_tasks: - try: - await asyncio.gather(*cleanup_tasks, return_exceptions=True) - except Exception as e: - logging.warning(f"Warning during final cleanup: {e}") - - async def process_llm_response(self, llm_response: str) -> str: - """Process the LLM response and execute tools if needed. - - Args: - llm_response: The response from the LLM. - - Returns: - The result of tool execution or the original response. - """ - import json - - try: - tool_call = json.loads(llm_response) - if "tool" in tool_call and "arguments" in tool_call: - logging.info(f"Executing tool: {tool_call['tool']}") - logging.info(f"With arguments: {tool_call['arguments']}") - - for server in self.servers: - tools = await server.list_tools() - if any(tool.name == tool_call["tool"] for tool in tools): - try: - result = await server.execute_tool( - tool_call["tool"], tool_call["arguments"] - ) - - if isinstance(result, dict) and "progress" in result: - progress = result["progress"] - total = result["total"] - percentage = (progress / total) * 100 - logging.info( - f"Progress: {progress}/{total} ({percentage:.1f}%)" - ) - - return f"Tool execution result: {result}" - except Exception as e: - error_msg = f"Error executing tool: {str(e)}" - logging.error(error_msg) - return error_msg - - return f"No server found with tool: {tool_call['tool']}" - return llm_response - except json.JSONDecodeError: - return llm_response - - async def start(self) -> None: - """Main chat session handler.""" - try: - for server in self.servers: - try: - await server.initialize() - except Exception as e: - logging.error(f"Failed to initialize server: {e}") - await self.cleanup_servers() - return - - all_tools = [] - for server in self.servers: - tools = await server.list_tools() - all_tools.extend(tools) - - tools_description = "\n".join([tool.format_for_llm() for tool in all_tools]) - - system_message = ( - "You are a helpful assistant with access to these tools:\n\n" - f"{tools_description}\n" - "Choose the appropriate tool based on the user's question. " - "If no tool is needed, reply directly.\n\n" - "IMPORTANT: When you need to use a tool, you must ONLY respond with " - "the exact JSON object format below, nothing else:\n" - "{\n" - ' "tool": "tool-name",\n' - ' "arguments": {\n' - ' "argument-name": "value"\n' - " }\n" - "}\n\n" - "After receiving a tool's response:\n" - "1. Transform the raw data into a natural, conversational response\n" - "2. Keep responses concise but informative\n" - "3. Focus on the most relevant information\n" - "4. Use appropriate context from the user's question\n" - "5. Avoid simply repeating the raw data\n\n" - "Please use only the tools that are explicitly defined above." - ) - - messages = [{"role": "system", "content": system_message}] - - while True: - try: - user_input = input("You: ").strip().lower() - if user_input in ["quit", "exit"]: - logging.info("\nExiting...") - break - - messages.append({"role": "user", "content": user_input}) - - llm_response = self.llm_client.get_response(messages) - logging.info("\nAssistant: %s", llm_response) - - result = await self.process_llm_response(llm_response) - - if result != llm_response: - messages.append({"role": "assistant", "content": llm_response}) - messages.append({"role": "system", "content": result}) - - final_response = self.llm_client.get_response(messages) - logging.info("\nFinal response: %s", final_response) - messages.append( - {"role": "assistant", "content": final_response} - ) - else: - messages.append({"role": "assistant", "content": llm_response}) - - except KeyboardInterrupt: - logging.info("\nExiting...") - break - - finally: - await self.cleanup_servers() - - -async def main() -> None: - """Initialize and run the chat session.""" - config = Configuration() - server_config = config.load_config("servers_config.json") - servers = [ - Server(name, srv_config) - for name, srv_config in server_config["mcpServers"].items() - ] - llm_client = LLMClient(config.llm_api_key) - chat_session = ChatSession(servers, llm_client) - await chat_session.start() - - -if __name__ == "__main__": - asyncio.run(main()) +import asyncio +import json +import logging +import os +import shutil +from contextlib import AsyncExitStack +from typing import Any + +import httpx +from dotenv import load_dotenv +from mcp import ClientSession, StdioServerParameters +from mcp.client.stdio import stdio_client + +# Configure logging +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) + + +class Configuration: + """Manages configuration and environment variables for the MCP client.""" + + def __init__(self) -> None: + """Initialize configuration with environment variables.""" + self.load_env() + self.api_key = os.getenv("LLM_API_KEY") + + @staticmethod + def load_env() -> None: + """Load environment variables from .env file.""" + load_dotenv() + + @staticmethod + def load_config(file_path: str) -> dict[str, Any]: + """Load server configuration from JSON file. + + Args: + file_path: Path to the JSON configuration file. + + Returns: + Dict containing server configuration. + + Raises: + FileNotFoundError: If configuration file doesn't exist. + JSONDecodeError: If configuration file is invalid JSON. + """ + with open(file_path, "r") as f: + return json.load(f) + + @property + def llm_api_key(self) -> str: + """Get the LLM API key. + + Returns: + The API key as a string. + + Raises: + ValueError: If the API key is not found in environment variables. + """ + if not self.api_key: + raise ValueError("LLM_API_KEY not found in environment variables") + return self.api_key + + +class Server: + """Manages MCP server connections and tool execution.""" + + def __init__(self, name: str, config: dict[str, Any]) -> None: + self.name: str = name + self.config: dict[str, Any] = config + self.stdio_context: Any | None = None + self.session: ClientSession | None = None + self._cleanup_lock: asyncio.Lock = asyncio.Lock() + self.exit_stack: AsyncExitStack = AsyncExitStack() + + async def initialize(self) -> None: + """Initialize the server connection.""" + command = ( + shutil.which("npx") + if self.config["command"] == "npx" + else self.config["command"] + ) + if command is None: + raise ValueError("The command must be a valid string and cannot be None.") + + server_params = StdioServerParameters( + command=command, + args=self.config["args"], + env={**os.environ, **self.config["env"]} + if self.config.get("env") + else None, + ) + try: + stdio_transport = await self.exit_stack.enter_async_context( + stdio_client(server_params) + ) + read, write = stdio_transport + session = await self.exit_stack.enter_async_context( + ClientSession(read, write) + ) + await session.initialize() + self.session = session + except Exception as e: + logging.error(f"Error initializing server {self.name}: {e}") + await self.cleanup() + raise + + async def list_tools(self) -> list[Any]: + """List available tools from the server. + + Returns: + A list of available tools. + + Raises: + RuntimeError: If the server is not initialized. + """ + if not self.session: + raise RuntimeError(f"Server {self.name} not initialized") + + tools_response = await self.session.list_tools() + tools = [] + + for item in tools_response: + if isinstance(item, tuple) and item[0] == "tools": + tools.extend( + Tool(tool.name, tool.description, tool.inputSchema) + for tool in item[1] + ) + + return tools + + async def execute_tool( + self, + tool_name: str, + arguments: dict[str, Any], + retries: int = 2, + delay: float = 1.0, + ) -> Any: + """Execute a tool with retry mechanism. + + Args: + tool_name: Name of the tool to execute. + arguments: Tool arguments. + retries: Number of retry attempts. + delay: Delay between retries in seconds. + + Returns: + Tool execution result. + + Raises: + RuntimeError: If server is not initialized. + Exception: If tool execution fails after all retries. + """ + if not self.session: + raise RuntimeError(f"Server {self.name} not initialized") + + attempt = 0 + while attempt < retries: + try: + logging.info(f"Executing {tool_name}...") + result = await self.session.call_tool(tool_name, arguments) + + return result + + except Exception as e: + attempt += 1 + logging.warning( + f"Error executing tool: {e}. Attempt {attempt} of {retries}." + ) + if attempt < retries: + logging.info(f"Retrying in {delay} seconds...") + await asyncio.sleep(delay) + else: + logging.error("Max retries reached. Failing.") + raise + + async def cleanup(self) -> None: + """Clean up server resources.""" + async with self._cleanup_lock: + try: + await self.exit_stack.aclose() + self.session = None + self.stdio_context = None + except Exception as e: + logging.error(f"Error during cleanup of server {self.name}: {e}") + + +class Tool: + """Represents a tool with its properties and formatting.""" + + def __init__( + self, name: str, description: str, input_schema: dict[str, Any] + ) -> None: + self.name: str = name + self.description: str = description + self.input_schema: dict[str, Any] = input_schema + + def format_for_llm(self) -> str: + """Format tool information for LLM. + + Returns: + A formatted string describing the tool. + """ + args_desc = [] + if "properties" in self.input_schema: + for param_name, param_info in self.input_schema["properties"].items(): + arg_desc = ( + f"- {param_name}: {param_info.get('description', 'No description')}" + ) + if param_name in self.input_schema.get("required", []): + arg_desc += " (required)" + args_desc.append(arg_desc) + + return f""" +Tool: {self.name} +Description: {self.description} +Arguments: +{chr(10).join(args_desc)} +""" + + +class LLMClient: + """Manages communication with the LLM provider.""" + + def __init__(self, api_key: str) -> None: + self.api_key: str = api_key + + def get_response(self, messages: list[dict[str, str]]) -> str: + """Get a response from the LLM. + + Args: + messages: A list of message dictionaries. + + Returns: + The LLM's response as a string. + + Raises: + httpx.RequestError: If the request to the LLM fails. + """ + url = "https://api.groq.com/openai/v1/chat/completions" + + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}", + } + payload = { + "messages": messages, + "model": "llama-3.2-90b-vision-preview", + "temperature": 0.7, + "max_tokens": 4096, + "top_p": 1, + "stream": False, + "stop": None, + } + + try: + with httpx.Client() as client: + response = client.post(url, headers=headers, json=payload) + response.raise_for_status() + data = response.json() + return data["choices"][0]["message"]["content"] + + except httpx.RequestError as e: + error_message = f"Error getting LLM response: {str(e)}" + logging.error(error_message) + + if isinstance(e, httpx.HTTPStatusError): + status_code = e.response.status_code + logging.error(f"Status code: {status_code}") + logging.error(f"Response details: {e.response.text}") + + return ( + f"I encountered an error: {error_message}. " + "Please try again or rephrase your request." + ) + + +class ChatSession: + """Orchestrates the interaction between user, LLM, and tools.""" + + def __init__(self, servers: list[Server], llm_client: LLMClient) -> None: + self.servers: list[Server] = servers + self.llm_client: LLMClient = llm_client + + async def cleanup_servers(self) -> None: + """Clean up all servers properly.""" + cleanup_tasks = [ + asyncio.create_task(server.cleanup()) for server in self.servers + ] + if cleanup_tasks: + try: + await asyncio.gather(*cleanup_tasks, return_exceptions=True) + except Exception as e: + logging.warning(f"Warning during final cleanup: {e}") + + async def process_llm_response(self, llm_response: str) -> str: + """Process the LLM response and execute tools if needed. + + Args: + llm_response: The response from the LLM. + + Returns: + The result of tool execution or the original response. + """ + import json + + try: + tool_call = json.loads(llm_response) + if "tool" in tool_call and "arguments" in tool_call: + logging.info(f"Executing tool: {tool_call['tool']}") + logging.info(f"With arguments: {tool_call['arguments']}") + + for server in self.servers: + tools = await server.list_tools() + if any(tool.name == tool_call["tool"] for tool in tools): + try: + result = await server.execute_tool( + tool_call["tool"], tool_call["arguments"] + ) + + if isinstance(result, dict) and "progress" in result: + progress = result["progress"] + total = result["total"] + percentage = (progress / total) * 100 + logging.info( + f"Progress: {progress}/{total} ({percentage:.1f}%)" + ) + + return f"Tool execution result: {result}" + except Exception as e: + error_msg = f"Error executing tool: {str(e)}" + logging.error(error_msg) + return error_msg + + return f"No server found with tool: {tool_call['tool']}" + return llm_response + except json.JSONDecodeError: + return llm_response + + async def start(self) -> None: + """Main chat session handler.""" + try: + for server in self.servers: + try: + await server.initialize() + except Exception as e: + logging.error(f"Failed to initialize server: {e}") + await self.cleanup_servers() + return + + all_tools = [] + for server in self.servers: + tools = await server.list_tools() + all_tools.extend(tools) + + tools_description = "\n".join([tool.format_for_llm() for tool in all_tools]) + + system_message = ( + "You are a helpful assistant with access to these tools:\n\n" + f"{tools_description}\n" + "Choose the appropriate tool based on the user's question. " + "If no tool is needed, reply directly.\n\n" + "IMPORTANT: When you need to use a tool, you must ONLY respond with " + "the exact JSON object format below, nothing else:\n" + "{\n" + ' "tool": "tool-name",\n' + ' "arguments": {\n' + ' "argument-name": "value"\n' + " }\n" + "}\n\n" + "After receiving a tool's response:\n" + "1. Transform the raw data into a natural, conversational response\n" + "2. Keep responses concise but informative\n" + "3. Focus on the most relevant information\n" + "4. Use appropriate context from the user's question\n" + "5. Avoid simply repeating the raw data\n\n" + "Please use only the tools that are explicitly defined above." + ) + + messages = [{"role": "system", "content": system_message}] + + while True: + try: + user_input = input("You: ").strip().lower() + if user_input in ["quit", "exit"]: + logging.info("\nExiting...") + break + + messages.append({"role": "user", "content": user_input}) + + llm_response = self.llm_client.get_response(messages) + logging.info("\nAssistant: %s", llm_response) + + result = await self.process_llm_response(llm_response) + + if result != llm_response: + messages.append({"role": "assistant", "content": llm_response}) + messages.append({"role": "system", "content": result}) + + final_response = self.llm_client.get_response(messages) + logging.info("\nFinal response: %s", final_response) + messages.append( + {"role": "assistant", "content": final_response} + ) + else: + messages.append({"role": "assistant", "content": llm_response}) + + except KeyboardInterrupt: + logging.info("\nExiting...") + break + + finally: + await self.cleanup_servers() + + +async def main() -> None: + """Initialize and run the chat session.""" + config = Configuration() + server_config = config.load_config("servers_config.json") + servers = [ + Server(name, srv_config) + for name, srv_config in server_config["mcpServers"].items() + ] + llm_client = LLMClient(config.llm_api_key) + chat_session = ChatSession(servers, llm_client) + await chat_session.start() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/requirements.txt b/examples/clients/simple-chatbot/mcp_simple_chatbot/requirements.txt index c01e1576c..39b1346e7 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/requirements.txt +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/requirements.txt @@ -1,4 +1,4 @@ -python-dotenv>=1.0.0 -requests>=2.31.0 -mcp>=1.0.0 +python-dotenv>=1.0.0 +requests>=2.31.0 +mcp>=1.0.0 uvicorn>=0.32.1 \ No newline at end of file diff --git a/examples/clients/simple-chatbot/mcp_simple_chatbot/servers_config.json b/examples/clients/simple-chatbot/mcp_simple_chatbot/servers_config.json index 98f8e1fd5..af79210ec 100644 --- a/examples/clients/simple-chatbot/mcp_simple_chatbot/servers_config.json +++ b/examples/clients/simple-chatbot/mcp_simple_chatbot/servers_config.json @@ -1,12 +1,12 @@ -{ - "mcpServers": { - "sqlite": { - "command": "uvx", - "args": ["mcp-server-sqlite", "--db-path", "./test.db"] - }, - "puppeteer": { - "command": "npx", - "args": ["-y", "@modelcontextprotocol/server-puppeteer"] - } - } +{ + "mcpServers": { + "sqlite": { + "command": "uvx", + "args": ["mcp-server-sqlite", "--db-path", "./test.db"] + }, + "puppeteer": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-puppeteer"] + } + } } \ No newline at end of file diff --git a/examples/clients/simple-chatbot/pyproject.toml b/examples/clients/simple-chatbot/pyproject.toml index d88b8f6d2..49bec80af 100644 --- a/examples/clients/simple-chatbot/pyproject.toml +++ b/examples/clients/simple-chatbot/pyproject.toml @@ -1,48 +1,48 @@ -[project] -name = "mcp-simple-chatbot" -version = "0.1.0" -description = "A simple CLI chatbot using the Model Context Protocol (MCP)" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Edoardo Cilia" }] -keywords = ["mcp", "llm", "chatbot", "cli"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = [ - "python-dotenv>=1.0.0", - "requests>=2.31.0", - "mcp>=1.0.0", - "uvicorn>=0.32.1" -] - -[project.scripts] -mcp-simple-chatbot = "mcp_simple_chatbot.client:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_chatbot"] - -[tool.pyright] -include = ["mcp_simple_chatbot"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] -dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] +[project] +name = "mcp-simple-chatbot" +version = "0.1.0" +description = "A simple CLI chatbot using the Model Context Protocol (MCP)" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Edoardo Cilia" }] +keywords = ["mcp", "llm", "chatbot", "cli"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = [ + "python-dotenv>=1.0.0", + "requests>=2.31.0", + "mcp>=1.0.0", + "uvicorn>=0.32.1" +] + +[project.scripts] +mcp-simple-chatbot = "mcp_simple_chatbot.client:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_chatbot"] + +[tool.pyright] +include = ["mcp_simple_chatbot"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/clients/simple-chatbot/uv.lock b/examples/clients/simple-chatbot/uv.lock index ee7cb2fab..4b5374e22 100644 --- a/examples/clients/simple-chatbot/uv.lock +++ b/examples/clients/simple-chatbot/uv.lock @@ -1,555 +1,555 @@ -version = 1 -requires-python = ">=3.10" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "anyio" -version = "4.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, -] - -[[package]] -name = "certifi" -version = "2024.12.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, - { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, - { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, - { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, - { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, - { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, - { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, - { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, - { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, - { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, - { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, - { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, - { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, - { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, - { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, - { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, - { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, - { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, - { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, - { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, - { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, - { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, - { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, - { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, - { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, - { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, -] - -[[package]] -name = "h11" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, -] - -[[package]] -name = "httpx-sse" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, -] - -[[package]] -name = "mcp" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "httpx-sse" }, - { name = "pydantic" }, - { name = "pydantic-settings" }, - { name = "sse-starlette" }, - { name = "starlette" }, - { name = "uvicorn" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/a5/b08dc846ebedae9f17ced878e6975826e90e448cd4592f532f6a88a925a7/mcp-1.2.0.tar.gz", hash = "sha256:2b06c7ece98d6ea9e6379caa38d74b432385c338fb530cb82e2c70ea7add94f5", size = 102973 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/84/fca78f19ac8ce6c53ba416247c71baa53a9e791e98d3c81edbc20a77d6d1/mcp-1.2.0-py3-none-any.whl", hash = "sha256:1d0e77d8c14955a5aea1f5aa1f444c8e531c09355c829b20e42f7a142bc0755f", size = 66468 }, -] - -[[package]] -name = "mcp-simple-chatbot" -version = "0.1.0" -source = { editable = "." } -dependencies = [ - { name = "mcp" }, - { name = "python-dotenv" }, - { name = "requests" }, - { name = "uvicorn" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pyright" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "mcp", specifier = ">=1.0.0" }, - { name = "python-dotenv", specifier = ">=1.0.0" }, - { name = "requests", specifier = ">=2.31.0" }, - { name = "uvicorn", specifier = ">=0.32.1" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.379" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "ruff", specifier = ">=0.6.9" }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, -] - -[[package]] -name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, -] - -[[package]] -name = "pydantic" -version = "2.10.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/ca334c2ef6f2e046b1144fe4bb2a5da8a4c574e7f2ebf7e16b34a6a2fa92/pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff", size = 761287 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/26/82663c79010b28eddf29dcdd0ea723439535fa917fce5905885c0e9ba562/pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53", size = 431426 }, -] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, - { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, - { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, - { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, - { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, - { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, - { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, - { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, - { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, - { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, - { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, - { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, - { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, - { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, - { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, - { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, - { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, - { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, - { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, - { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, - { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, - { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, - { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, - { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, - { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, - { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, - { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, - { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, - { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, - { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, - { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, - { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, - { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, - { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, - { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, - { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, - { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, - { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, - { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, - { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, - { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, - { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, - { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, - { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, - { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, - { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, - { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, - { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, - { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, - { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, - { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, - { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, - { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, - { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, - { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, - { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, - { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, - { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, - { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, - { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, - { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, - { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, - { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, - { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, -] - -[[package]] -name = "pydantic-settings" -version = "2.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "python-dotenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718 }, -] - -[[package]] -name = "pyright" -version = "1.1.392.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nodeenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/df/3c6f6b08fba7ccf49b114dfc4bb33e25c299883fd763f93fad47ef8bc58d/pyright-1.1.392.post0.tar.gz", hash = "sha256:3b7f88de74a28dcfa90c7d90c782b6569a48c2be5f9d4add38472bdaac247ebd", size = 3789911 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/b1/a18de17f40e4f61ca58856b9ef9b0febf74ff88978c3f7776f910071f567/pyright-1.1.392.post0-py3-none-any.whl", hash = "sha256:252f84458a46fa2f0fd4e2f91fc74f50b9ca52c757062e93f6c250c0d8329eb2", size = 5595487 }, -] - -[[package]] -name = "pytest" -version = "8.3.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, -] - -[[package]] -name = "python-dotenv" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, -] - -[[package]] -name = "requests" -version = "2.32.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, -] - -[[package]] -name = "ruff" -version = "0.9.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/63/77ecca9d21177600f551d1c58ab0e5a0b260940ea7312195bd2a4798f8a8/ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0", size = 3553799 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b9/0e168e4e7fb3af851f739e8f07889b91d1a33a30fca8c29fa3149d6b03ec/ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347", size = 11652408 }, - { url = "https://files.pythonhosted.org/packages/2c/22/08ede5db17cf701372a461d1cb8fdde037da1d4fa622b69ac21960e6237e/ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00", size = 11587553 }, - { url = "https://files.pythonhosted.org/packages/42/05/dedfc70f0bf010230229e33dec6e7b2235b2a1b8cbb2a991c710743e343f/ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4", size = 11020755 }, - { url = "https://files.pythonhosted.org/packages/df/9b/65d87ad9b2e3def67342830bd1af98803af731243da1255537ddb8f22209/ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d", size = 11826502 }, - { url = "https://files.pythonhosted.org/packages/93/02/f2239f56786479e1a89c3da9bc9391120057fc6f4a8266a5b091314e72ce/ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c", size = 11390562 }, - { url = "https://files.pythonhosted.org/packages/c9/37/d3a854dba9931f8cb1b2a19509bfe59e00875f48ade632e95aefcb7a0aee/ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f", size = 12548968 }, - { url = "https://files.pythonhosted.org/packages/fa/c3/c7b812bb256c7a1d5553433e95980934ffa85396d332401f6b391d3c4569/ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684", size = 13187155 }, - { url = "https://files.pythonhosted.org/packages/bd/5a/3c7f9696a7875522b66aa9bba9e326e4e5894b4366bd1dc32aa6791cb1ff/ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d", size = 12704674 }, - { url = "https://files.pythonhosted.org/packages/be/d6/d908762257a96ce5912187ae9ae86792e677ca4f3dc973b71e7508ff6282/ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df", size = 14529328 }, - { url = "https://files.pythonhosted.org/packages/2d/c2/049f1e6755d12d9cd8823242fa105968f34ee4c669d04cac8cea51a50407/ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247", size = 12385955 }, - { url = "https://files.pythonhosted.org/packages/91/5a/a9bdb50e39810bd9627074e42743b00e6dc4009d42ae9f9351bc3dbc28e7/ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e", size = 11810149 }, - { url = "https://files.pythonhosted.org/packages/e5/fd/57df1a0543182f79a1236e82a79c68ce210efb00e97c30657d5bdb12b478/ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe", size = 11479141 }, - { url = "https://files.pythonhosted.org/packages/dc/16/bc3fd1d38974f6775fc152a0554f8c210ff80f2764b43777163c3c45d61b/ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb", size = 12014073 }, - { url = "https://files.pythonhosted.org/packages/47/6b/e4ca048a8f2047eb652e1e8c755f384d1b7944f69ed69066a37acd4118b0/ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a", size = 12435758 }, - { url = "https://files.pythonhosted.org/packages/c2/40/4d3d6c979c67ba24cf183d29f706051a53c36d78358036a9cd21421582ab/ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145", size = 9796916 }, - { url = "https://files.pythonhosted.org/packages/c3/ef/7f548752bdb6867e6939489c87fe4da489ab36191525fadc5cede2a6e8e2/ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5", size = 10773080 }, - { url = "https://files.pythonhosted.org/packages/0e/4e/33df635528292bd2d18404e4daabcd74ca8a9853b2e1df85ed3d32d24362/ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6", size = 10001738 }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, -] - -[[package]] -name = "sse-starlette" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "starlette" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, -] - -[[package]] -name = "starlette" -version = "0.45.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/90/4f/e1c9f4ec3dae67a94c9285ed275355d5f7cf0f3a5c34538c8ae5412af550/starlette-0.45.2.tar.gz", hash = "sha256:bba1831d15ae5212b22feab2f218bab6ed3cd0fc2dc1d4442443bb1ee52260e0", size = 2574026 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/ab/fe4f57c83620b39dfc9e7687ebad59129ff05170b99422105019d9a65eec/starlette-0.45.2-py3-none-any.whl", hash = "sha256:4daec3356fb0cb1e723a5235e5beaf375d2259af27532958e2d79df549dad9da", size = 71505 }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, -] - -[[package]] -name = "urllib3" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, -] - -[[package]] -name = "uvicorn" -version = "0.34.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, -] +version = 1 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, +] + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, + { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, + { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, + { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, + { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, + { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, + { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, + { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, + { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, + { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, + { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, + { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, + { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, + { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, + { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, + { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, + { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, + { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, + { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, + { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, + { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, + { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, + { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, + { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, + { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, + { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "mcp" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/a5/b08dc846ebedae9f17ced878e6975826e90e448cd4592f532f6a88a925a7/mcp-1.2.0.tar.gz", hash = "sha256:2b06c7ece98d6ea9e6379caa38d74b432385c338fb530cb82e2c70ea7add94f5", size = 102973 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/84/fca78f19ac8ce6c53ba416247c71baa53a9e791e98d3c81edbc20a77d6d1/mcp-1.2.0-py3-none-any.whl", hash = "sha256:1d0e77d8c14955a5aea1f5aa1f444c8e531c09355c829b20e42f7a142bc0755f", size = 66468 }, +] + +[[package]] +name = "mcp-simple-chatbot" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "mcp" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "uvicorn" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "mcp", specifier = ">=1.0.0" }, + { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "uvicorn", specifier = ">=0.32.1" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.379" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pydantic" +version = "2.10.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/ca334c2ef6f2e046b1144fe4bb2a5da8a4c574e7f2ebf7e16b34a6a2fa92/pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff", size = 761287 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/26/82663c79010b28eddf29dcdd0ea723439535fa917fce5905885c0e9ba562/pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53", size = 431426 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718 }, +] + +[[package]] +name = "pyright" +version = "1.1.392.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/df/3c6f6b08fba7ccf49b114dfc4bb33e25c299883fd763f93fad47ef8bc58d/pyright-1.1.392.post0.tar.gz", hash = "sha256:3b7f88de74a28dcfa90c7d90c782b6569a48c2be5f9d4add38472bdaac247ebd", size = 3789911 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/b1/a18de17f40e4f61ca58856b9ef9b0febf74ff88978c3f7776f910071f567/pyright-1.1.392.post0-py3-none-any.whl", hash = "sha256:252f84458a46fa2f0fd4e2f91fc74f50b9ca52c757062e93f6c250c0d8329eb2", size = 5595487 }, +] + +[[package]] +name = "pytest" +version = "8.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "ruff" +version = "0.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/63/77ecca9d21177600f551d1c58ab0e5a0b260940ea7312195bd2a4798f8a8/ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0", size = 3553799 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b9/0e168e4e7fb3af851f739e8f07889b91d1a33a30fca8c29fa3149d6b03ec/ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347", size = 11652408 }, + { url = "https://files.pythonhosted.org/packages/2c/22/08ede5db17cf701372a461d1cb8fdde037da1d4fa622b69ac21960e6237e/ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00", size = 11587553 }, + { url = "https://files.pythonhosted.org/packages/42/05/dedfc70f0bf010230229e33dec6e7b2235b2a1b8cbb2a991c710743e343f/ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4", size = 11020755 }, + { url = "https://files.pythonhosted.org/packages/df/9b/65d87ad9b2e3def67342830bd1af98803af731243da1255537ddb8f22209/ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d", size = 11826502 }, + { url = "https://files.pythonhosted.org/packages/93/02/f2239f56786479e1a89c3da9bc9391120057fc6f4a8266a5b091314e72ce/ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c", size = 11390562 }, + { url = "https://files.pythonhosted.org/packages/c9/37/d3a854dba9931f8cb1b2a19509bfe59e00875f48ade632e95aefcb7a0aee/ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f", size = 12548968 }, + { url = "https://files.pythonhosted.org/packages/fa/c3/c7b812bb256c7a1d5553433e95980934ffa85396d332401f6b391d3c4569/ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684", size = 13187155 }, + { url = "https://files.pythonhosted.org/packages/bd/5a/3c7f9696a7875522b66aa9bba9e326e4e5894b4366bd1dc32aa6791cb1ff/ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d", size = 12704674 }, + { url = "https://files.pythonhosted.org/packages/be/d6/d908762257a96ce5912187ae9ae86792e677ca4f3dc973b71e7508ff6282/ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df", size = 14529328 }, + { url = "https://files.pythonhosted.org/packages/2d/c2/049f1e6755d12d9cd8823242fa105968f34ee4c669d04cac8cea51a50407/ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247", size = 12385955 }, + { url = "https://files.pythonhosted.org/packages/91/5a/a9bdb50e39810bd9627074e42743b00e6dc4009d42ae9f9351bc3dbc28e7/ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e", size = 11810149 }, + { url = "https://files.pythonhosted.org/packages/e5/fd/57df1a0543182f79a1236e82a79c68ce210efb00e97c30657d5bdb12b478/ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe", size = 11479141 }, + { url = "https://files.pythonhosted.org/packages/dc/16/bc3fd1d38974f6775fc152a0554f8c210ff80f2764b43777163c3c45d61b/ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb", size = 12014073 }, + { url = "https://files.pythonhosted.org/packages/47/6b/e4ca048a8f2047eb652e1e8c755f384d1b7944f69ed69066a37acd4118b0/ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a", size = 12435758 }, + { url = "https://files.pythonhosted.org/packages/c2/40/4d3d6c979c67ba24cf183d29f706051a53c36d78358036a9cd21421582ab/ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145", size = 9796916 }, + { url = "https://files.pythonhosted.org/packages/c3/ef/7f548752bdb6867e6939489c87fe4da489ab36191525fadc5cede2a6e8e2/ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5", size = 10773080 }, + { url = "https://files.pythonhosted.org/packages/0e/4e/33df635528292bd2d18404e4daabcd74ca8a9853b2e1df85ed3d32d24362/ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6", size = 10001738 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "sse-starlette" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "starlette" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, +] + +[[package]] +name = "starlette" +version = "0.45.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/4f/e1c9f4ec3dae67a94c9285ed275355d5f7cf0f3a5c34538c8ae5412af550/starlette-0.45.2.tar.gz", hash = "sha256:bba1831d15ae5212b22feab2f218bab6ed3cd0fc2dc1d4442443bb1ee52260e0", size = 2574026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/ab/fe4f57c83620b39dfc9e7687ebad59129ff05170b99422105019d9a65eec/starlette-0.45.2-py3-none-any.whl", hash = "sha256:4daec3356fb0cb1e723a5235e5beaf375d2259af27532958e2d79df549dad9da", size = 71505 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, +] diff --git a/examples/fastmcp/complex_inputs.py b/examples/fastmcp/complex_inputs.py index e859165a9..3d1b1f479 100644 --- a/examples/fastmcp/complex_inputs.py +++ b/examples/fastmcp/complex_inputs.py @@ -1,30 +1,30 @@ -""" -FastMCP Complex inputs Example - -Demonstrates validation via pydantic with complex models. -""" - -from typing import Annotated - -from pydantic import BaseModel, Field - -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP("Shrimp Tank") - - -class ShrimpTank(BaseModel): - class Shrimp(BaseModel): - name: Annotated[str, Field(max_length=10)] - - shrimp: list[Shrimp] - - -@mcp.tool() -def name_shrimp( - tank: ShrimpTank, - # You can use pydantic Field in function signatures for validation. - extra_names: Annotated[list[str], Field(max_length=10)], -) -> list[str]: - """List all shrimp names in the tank""" - return [shrimp.name for shrimp in tank.shrimp] + extra_names +""" +FastMCP Complex inputs Example + +Demonstrates validation via pydantic with complex models. +""" + +from typing import Annotated + +from pydantic import BaseModel, Field + +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("Shrimp Tank") + + +class ShrimpTank(BaseModel): + class Shrimp(BaseModel): + name: Annotated[str, Field(max_length=10)] + + shrimp: list[Shrimp] + + +@mcp.tool() +def name_shrimp( + tank: ShrimpTank, + # You can use pydantic Field in function signatures for validation. + extra_names: Annotated[list[str], Field(max_length=10)], +) -> list[str]: + """List all shrimp names in the tank""" + return [shrimp.name for shrimp in tank.shrimp] + extra_names diff --git a/examples/fastmcp/desktop.py b/examples/fastmcp/desktop.py index 8fd71b263..ffc15c64a 100644 --- a/examples/fastmcp/desktop.py +++ b/examples/fastmcp/desktop.py @@ -1,25 +1,25 @@ -""" -FastMCP Desktop Example - -A simple example that exposes the desktop directory as a resource. -""" - -from pathlib import Path - -from mcp.server.fastmcp import FastMCP - -# Create server -mcp = FastMCP("Demo") - - -@mcp.resource("dir://desktop") -def desktop() -> list[str]: - """List the files in the user's desktop""" - desktop = Path.home() / "Desktop" - return [str(f) for f in desktop.iterdir()] - - -@mcp.tool() -def add(a: int, b: int) -> int: - """Add two numbers""" - return a + b +""" +FastMCP Desktop Example + +A simple example that exposes the desktop directory as a resource. +""" + +from pathlib import Path + +from mcp.server.fastmcp import FastMCP + +# Create server +mcp = FastMCP("Demo") + + +@mcp.resource("dir://desktop") +def desktop() -> list[str]: + """List the files in the user's desktop""" + desktop = Path.home() / "Desktop" + return [str(f) for f in desktop.iterdir()] + + +@mcp.tool() +def add(a: int, b: int) -> int: + """Add two numbers""" + return a + b diff --git a/examples/fastmcp/echo.py b/examples/fastmcp/echo.py index 7bdbcdce6..48833a2a3 100644 --- a/examples/fastmcp/echo.py +++ b/examples/fastmcp/echo.py @@ -1,30 +1,30 @@ -""" -FastMCP Echo Server -""" - -from mcp.server.fastmcp import FastMCP - -# Create server -mcp = FastMCP("Echo Server") - - -@mcp.tool() -def echo_tool(text: str) -> str: - """Echo the input text""" - return text - - -@mcp.resource("echo://static") -def echo_resource() -> str: - return "Echo!" - - -@mcp.resource("echo://{text}") -def echo_template(text: str) -> str: - """Echo the input text""" - return f"Echo: {text}" - - -@mcp.prompt("echo") -def echo_prompt(text: str) -> str: - return text +""" +FastMCP Echo Server +""" + +from mcp.server.fastmcp import FastMCP + +# Create server +mcp = FastMCP("Echo Server") + + +@mcp.tool() +def echo_tool(text: str) -> str: + """Echo the input text""" + return text + + +@mcp.resource("echo://static") +def echo_resource() -> str: + return "Echo!" + + +@mcp.resource("echo://{text}") +def echo_template(text: str) -> str: + """Echo the input text""" + return f"Echo: {text}" + + +@mcp.prompt("echo") +def echo_prompt(text: str) -> str: + return text diff --git a/examples/fastmcp/memory.py b/examples/fastmcp/memory.py index dbc890815..16ad524ba 100644 --- a/examples/fastmcp/memory.py +++ b/examples/fastmcp/memory.py @@ -1,349 +1,349 @@ -# /// script -# dependencies = ["pydantic-ai-slim[openai]", "asyncpg", "numpy", "pgvector"] -# /// - -# uv pip install 'pydantic-ai-slim[openai]' asyncpg numpy pgvector - -""" -Recursive memory system inspired by the human brain's clustering of memories. -Uses OpenAI's 'text-embedding-3-small' model and pgvector for efficient -similarity search. -""" - -import asyncio -import math -import os -from dataclasses import dataclass -from datetime import datetime, timezone -from pathlib import Path -from typing import Annotated, Self - -import asyncpg -import numpy as np -from openai import AsyncOpenAI -from pgvector.asyncpg import register_vector # Import register_vector -from pydantic import BaseModel, Field -from pydantic_ai import Agent - -from mcp.server.fastmcp import FastMCP - -MAX_DEPTH = 5 -SIMILARITY_THRESHOLD = 0.7 -DECAY_FACTOR = 0.99 -REINFORCEMENT_FACTOR = 1.1 - -DEFAULT_LLM_MODEL = "openai:gpt-4o" -DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small" - -mcp = FastMCP( - "memory", - dependencies=[ - "pydantic-ai-slim[openai]", - "asyncpg", - "numpy", - "pgvector", - ], -) - -DB_DSN = "postgresql://postgres:postgres@localhost:54320/memory_db" -# reset memory with rm ~/.fastmcp/{USER}/memory/* -PROFILE_DIR = ( - Path.home() / ".fastmcp" / os.environ.get("USER", "anon") / "memory" -).resolve() -PROFILE_DIR.mkdir(parents=True, exist_ok=True) - - -def cosine_similarity(a: list[float], b: list[float]) -> float: - a_array = np.array(a, dtype=np.float64) - b_array = np.array(b, dtype=np.float64) - return np.dot(a_array, b_array) / ( - np.linalg.norm(a_array) * np.linalg.norm(b_array) - ) - - -async def do_ai[T]( - user_prompt: str, - system_prompt: str, - result_type: type[T] | Annotated, - deps=None, -) -> T: - agent = Agent( - DEFAULT_LLM_MODEL, - system_prompt=system_prompt, - result_type=result_type, - ) - result = await agent.run(user_prompt, deps=deps) - return result.data - - -@dataclass -class Deps: - openai: AsyncOpenAI - pool: asyncpg.Pool - - -async def get_db_pool() -> asyncpg.Pool: - async def init(conn): - await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") - await register_vector(conn) - - pool = await asyncpg.create_pool(DB_DSN, init=init) - return pool - - -class MemoryNode(BaseModel): - id: int | None = None - content: str - summary: str = "" - importance: float = 1.0 - access_count: int = 0 - timestamp: float = Field( - default_factory=lambda: datetime.now(timezone.utc).timestamp() - ) - embedding: list[float] - - @classmethod - async def from_content(cls, content: str, deps: Deps): - embedding = await get_embedding(content, deps) - return cls(content=content, embedding=embedding) - - async def save(self, deps: Deps): - async with deps.pool.acquire() as conn: - if self.id is None: - result = await conn.fetchrow( - """ - INSERT INTO memories (content, summary, importance, access_count, - timestamp, embedding) - VALUES ($1, $2, $3, $4, $5, $6) - RETURNING id - """, - self.content, - self.summary, - self.importance, - self.access_count, - self.timestamp, - self.embedding, - ) - self.id = result["id"] - else: - await conn.execute( - """ - UPDATE memories - SET content = $1, summary = $2, importance = $3, - access_count = $4, timestamp = $5, embedding = $6 - WHERE id = $7 - """, - self.content, - self.summary, - self.importance, - self.access_count, - self.timestamp, - self.embedding, - self.id, - ) - - async def merge_with(self, other: Self, deps: Deps): - self.content = await do_ai( - f"{self.content}\n\n{other.content}", - "Combine the following two texts into a single, coherent text.", - str, - deps, - ) - self.importance += other.importance - self.access_count += other.access_count - self.embedding = [(a + b) / 2 for a, b in zip(self.embedding, other.embedding)] - self.summary = await do_ai( - self.content, "Summarize the following text concisely.", str, deps - ) - await self.save(deps) - # Delete the merged node from the database - if other.id is not None: - await delete_memory(other.id, deps) - - def get_effective_importance(self): - return self.importance * (1 + math.log(self.access_count + 1)) - - -async def get_embedding(text: str, deps: Deps) -> list[float]: - embedding_response = await deps.openai.embeddings.create( - input=text, - model=DEFAULT_EMBEDDING_MODEL, - ) - return embedding_response.data[0].embedding - - -async def delete_memory(memory_id: int, deps: Deps): - async with deps.pool.acquire() as conn: - await conn.execute("DELETE FROM memories WHERE id = $1", memory_id) - - -async def add_memory(content: str, deps: Deps): - new_memory = await MemoryNode.from_content(content, deps) - await new_memory.save(deps) - - similar_memories = await find_similar_memories(new_memory.embedding, deps) - for memory in similar_memories: - if memory.id != new_memory.id: - await new_memory.merge_with(memory, deps) - - await update_importance(new_memory.embedding, deps) - - await prune_memories(deps) - - return f"Remembered: {content}" - - -async def find_similar_memories(embedding: list[float], deps: Deps) -> list[MemoryNode]: - async with deps.pool.acquire() as conn: - rows = await conn.fetch( - """ - SELECT id, content, summary, importance, access_count, timestamp, embedding - FROM memories - ORDER BY embedding <-> $1 - LIMIT 5 - """, - embedding, - ) - memories = [ - MemoryNode( - id=row["id"], - content=row["content"], - summary=row["summary"], - importance=row["importance"], - access_count=row["access_count"], - timestamp=row["timestamp"], - embedding=row["embedding"], - ) - for row in rows - ] - return memories - - -async def update_importance(user_embedding: list[float], deps: Deps): - async with deps.pool.acquire() as conn: - rows = await conn.fetch( - "SELECT id, importance, access_count, embedding FROM memories" - ) - for row in rows: - memory_embedding = row["embedding"] - similarity = cosine_similarity(user_embedding, memory_embedding) - if similarity > SIMILARITY_THRESHOLD: - new_importance = row["importance"] * REINFORCEMENT_FACTOR - new_access_count = row["access_count"] + 1 - else: - new_importance = row["importance"] * DECAY_FACTOR - new_access_count = row["access_count"] - await conn.execute( - """ - UPDATE memories - SET importance = $1, access_count = $2 - WHERE id = $3 - """, - new_importance, - new_access_count, - row["id"], - ) - - -async def prune_memories(deps: Deps): - async with deps.pool.acquire() as conn: - rows = await conn.fetch( - """ - SELECT id, importance, access_count - FROM memories - ORDER BY importance DESC - OFFSET $1 - """, - MAX_DEPTH, - ) - for row in rows: - await conn.execute("DELETE FROM memories WHERE id = $1", row["id"]) - - -async def display_memory_tree(deps: Deps) -> str: - async with deps.pool.acquire() as conn: - rows = await conn.fetch( - """ - SELECT content, summary, importance, access_count - FROM memories - ORDER BY importance DESC - LIMIT $1 - """, - MAX_DEPTH, - ) - result = "" - for row in rows: - effective_importance = row["importance"] * ( - 1 + math.log(row["access_count"] + 1) - ) - summary = row["summary"] or row["content"] - result += f"- {summary} (Importance: {effective_importance:.2f})\n" - return result - - -@mcp.tool() -async def remember( - contents: list[str] = Field( - description="List of observations or memories to store" - ), -): - deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) - try: - return "\n".join( - await asyncio.gather(*[add_memory(content, deps) for content in contents]) - ) - finally: - await deps.pool.close() - - -@mcp.tool() -async def read_profile() -> str: - deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) - profile = await display_memory_tree(deps) - await deps.pool.close() - return profile - - -async def initialize_database(): - pool = await asyncpg.create_pool( - "postgresql://postgres:postgres@localhost:54320/postgres" - ) - try: - async with pool.acquire() as conn: - await conn.execute(""" - SELECT pg_terminate_backend(pg_stat_activity.pid) - FROM pg_stat_activity - WHERE pg_stat_activity.datname = 'memory_db' - AND pid <> pg_backend_pid(); - """) - await conn.execute("DROP DATABASE IF EXISTS memory_db;") - await conn.execute("CREATE DATABASE memory_db;") - finally: - await pool.close() - - pool = await asyncpg.create_pool(DB_DSN) - try: - async with pool.acquire() as conn: - await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") - - await register_vector(conn) - - await conn.execute(""" - CREATE TABLE IF NOT EXISTS memories ( - id SERIAL PRIMARY KEY, - content TEXT NOT NULL, - summary TEXT, - importance REAL NOT NULL, - access_count INT NOT NULL, - timestamp DOUBLE PRECISION NOT NULL, - embedding vector(1536) NOT NULL - ); - CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories - USING hnsw (embedding vector_l2_ops); - """) - finally: - await pool.close() - - -if __name__ == "__main__": - asyncio.run(initialize_database()) +# /// script +# dependencies = ["pydantic-ai-slim[openai]", "asyncpg", "numpy", "pgvector"] +# /// + +# uv pip install 'pydantic-ai-slim[openai]' asyncpg numpy pgvector + +""" +Recursive memory system inspired by the human brain's clustering of memories. +Uses OpenAI's 'text-embedding-3-small' model and pgvector for efficient +similarity search. +""" + +import asyncio +import math +import os +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Annotated, Self + +import asyncpg +import numpy as np +from openai import AsyncOpenAI +from pgvector.asyncpg import register_vector # Import register_vector +from pydantic import BaseModel, Field +from pydantic_ai import Agent + +from mcp.server.fastmcp import FastMCP + +MAX_DEPTH = 5 +SIMILARITY_THRESHOLD = 0.7 +DECAY_FACTOR = 0.99 +REINFORCEMENT_FACTOR = 1.1 + +DEFAULT_LLM_MODEL = "openai:gpt-4o" +DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small" + +mcp = FastMCP( + "memory", + dependencies=[ + "pydantic-ai-slim[openai]", + "asyncpg", + "numpy", + "pgvector", + ], +) + +DB_DSN = "postgresql://postgres:postgres@localhost:54320/memory_db" +# reset memory with rm ~/.fastmcp/{USER}/memory/* +PROFILE_DIR = ( + Path.home() / ".fastmcp" / os.environ.get("USER", "anon") / "memory" +).resolve() +PROFILE_DIR.mkdir(parents=True, exist_ok=True) + + +def cosine_similarity(a: list[float], b: list[float]) -> float: + a_array = np.array(a, dtype=np.float64) + b_array = np.array(b, dtype=np.float64) + return np.dot(a_array, b_array) / ( + np.linalg.norm(a_array) * np.linalg.norm(b_array) + ) + + +async def do_ai[T]( + user_prompt: str, + system_prompt: str, + result_type: type[T] | Annotated, + deps=None, +) -> T: + agent = Agent( + DEFAULT_LLM_MODEL, + system_prompt=system_prompt, + result_type=result_type, + ) + result = await agent.run(user_prompt, deps=deps) + return result.data + + +@dataclass +class Deps: + openai: AsyncOpenAI + pool: asyncpg.Pool + + +async def get_db_pool() -> asyncpg.Pool: + async def init(conn): + await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") + await register_vector(conn) + + pool = await asyncpg.create_pool(DB_DSN, init=init) + return pool + + +class MemoryNode(BaseModel): + id: int | None = None + content: str + summary: str = "" + importance: float = 1.0 + access_count: int = 0 + timestamp: float = Field( + default_factory=lambda: datetime.now(timezone.utc).timestamp() + ) + embedding: list[float] + + @classmethod + async def from_content(cls, content: str, deps: Deps): + embedding = await get_embedding(content, deps) + return cls(content=content, embedding=embedding) + + async def save(self, deps: Deps): + async with deps.pool.acquire() as conn: + if self.id is None: + result = await conn.fetchrow( + """ + INSERT INTO memories (content, summary, importance, access_count, + timestamp, embedding) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING id + """, + self.content, + self.summary, + self.importance, + self.access_count, + self.timestamp, + self.embedding, + ) + self.id = result["id"] + else: + await conn.execute( + """ + UPDATE memories + SET content = $1, summary = $2, importance = $3, + access_count = $4, timestamp = $5, embedding = $6 + WHERE id = $7 + """, + self.content, + self.summary, + self.importance, + self.access_count, + self.timestamp, + self.embedding, + self.id, + ) + + async def merge_with(self, other: Self, deps: Deps): + self.content = await do_ai( + f"{self.content}\n\n{other.content}", + "Combine the following two texts into a single, coherent text.", + str, + deps, + ) + self.importance += other.importance + self.access_count += other.access_count + self.embedding = [(a + b) / 2 for a, b in zip(self.embedding, other.embedding)] + self.summary = await do_ai( + self.content, "Summarize the following text concisely.", str, deps + ) + await self.save(deps) + # Delete the merged node from the database + if other.id is not None: + await delete_memory(other.id, deps) + + def get_effective_importance(self): + return self.importance * (1 + math.log(self.access_count + 1)) + + +async def get_embedding(text: str, deps: Deps) -> list[float]: + embedding_response = await deps.openai.embeddings.create( + input=text, + model=DEFAULT_EMBEDDING_MODEL, + ) + return embedding_response.data[0].embedding + + +async def delete_memory(memory_id: int, deps: Deps): + async with deps.pool.acquire() as conn: + await conn.execute("DELETE FROM memories WHERE id = $1", memory_id) + + +async def add_memory(content: str, deps: Deps): + new_memory = await MemoryNode.from_content(content, deps) + await new_memory.save(deps) + + similar_memories = await find_similar_memories(new_memory.embedding, deps) + for memory in similar_memories: + if memory.id != new_memory.id: + await new_memory.merge_with(memory, deps) + + await update_importance(new_memory.embedding, deps) + + await prune_memories(deps) + + return f"Remembered: {content}" + + +async def find_similar_memories(embedding: list[float], deps: Deps) -> list[MemoryNode]: + async with deps.pool.acquire() as conn: + rows = await conn.fetch( + """ + SELECT id, content, summary, importance, access_count, timestamp, embedding + FROM memories + ORDER BY embedding <-> $1 + LIMIT 5 + """, + embedding, + ) + memories = [ + MemoryNode( + id=row["id"], + content=row["content"], + summary=row["summary"], + importance=row["importance"], + access_count=row["access_count"], + timestamp=row["timestamp"], + embedding=row["embedding"], + ) + for row in rows + ] + return memories + + +async def update_importance(user_embedding: list[float], deps: Deps): + async with deps.pool.acquire() as conn: + rows = await conn.fetch( + "SELECT id, importance, access_count, embedding FROM memories" + ) + for row in rows: + memory_embedding = row["embedding"] + similarity = cosine_similarity(user_embedding, memory_embedding) + if similarity > SIMILARITY_THRESHOLD: + new_importance = row["importance"] * REINFORCEMENT_FACTOR + new_access_count = row["access_count"] + 1 + else: + new_importance = row["importance"] * DECAY_FACTOR + new_access_count = row["access_count"] + await conn.execute( + """ + UPDATE memories + SET importance = $1, access_count = $2 + WHERE id = $3 + """, + new_importance, + new_access_count, + row["id"], + ) + + +async def prune_memories(deps: Deps): + async with deps.pool.acquire() as conn: + rows = await conn.fetch( + """ + SELECT id, importance, access_count + FROM memories + ORDER BY importance DESC + OFFSET $1 + """, + MAX_DEPTH, + ) + for row in rows: + await conn.execute("DELETE FROM memories WHERE id = $1", row["id"]) + + +async def display_memory_tree(deps: Deps) -> str: + async with deps.pool.acquire() as conn: + rows = await conn.fetch( + """ + SELECT content, summary, importance, access_count + FROM memories + ORDER BY importance DESC + LIMIT $1 + """, + MAX_DEPTH, + ) + result = "" + for row in rows: + effective_importance = row["importance"] * ( + 1 + math.log(row["access_count"] + 1) + ) + summary = row["summary"] or row["content"] + result += f"- {summary} (Importance: {effective_importance:.2f})\n" + return result + + +@mcp.tool() +async def remember( + contents: list[str] = Field( + description="List of observations or memories to store" + ), +): + deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) + try: + return "\n".join( + await asyncio.gather(*[add_memory(content, deps) for content in contents]) + ) + finally: + await deps.pool.close() + + +@mcp.tool() +async def read_profile() -> str: + deps = Deps(openai=AsyncOpenAI(), pool=await get_db_pool()) + profile = await display_memory_tree(deps) + await deps.pool.close() + return profile + + +async def initialize_database(): + pool = await asyncpg.create_pool( + "postgresql://postgres:postgres@localhost:54320/postgres" + ) + try: + async with pool.acquire() as conn: + await conn.execute(""" + SELECT pg_terminate_backend(pg_stat_activity.pid) + FROM pg_stat_activity + WHERE pg_stat_activity.datname = 'memory_db' + AND pid <> pg_backend_pid(); + """) + await conn.execute("DROP DATABASE IF EXISTS memory_db;") + await conn.execute("CREATE DATABASE memory_db;") + finally: + await pool.close() + + pool = await asyncpg.create_pool(DB_DSN) + try: + async with pool.acquire() as conn: + await conn.execute("CREATE EXTENSION IF NOT EXISTS vector;") + + await register_vector(conn) + + await conn.execute(""" + CREATE TABLE IF NOT EXISTS memories ( + id SERIAL PRIMARY KEY, + content TEXT NOT NULL, + summary TEXT, + importance REAL NOT NULL, + access_count INT NOT NULL, + timestamp DOUBLE PRECISION NOT NULL, + embedding vector(1536) NOT NULL + ); + CREATE INDEX IF NOT EXISTS idx_memories_embedding ON memories + USING hnsw (embedding vector_l2_ops); + """) + finally: + await pool.close() + + +if __name__ == "__main__": + asyncio.run(initialize_database()) diff --git a/examples/fastmcp/parameter_descriptions.py b/examples/fastmcp/parameter_descriptions.py index dc56e9182..111156073 100644 --- a/examples/fastmcp/parameter_descriptions.py +++ b/examples/fastmcp/parameter_descriptions.py @@ -1,21 +1,21 @@ -""" -FastMCP Example showing parameter descriptions -""" - -from pydantic import Field - -from mcp.server.fastmcp import FastMCP - -# Create server -mcp = FastMCP("Parameter Descriptions Server") - - -@mcp.tool() -def greet_user( - name: str = Field(description="The name of the person to greet"), - title: str = Field(description="Optional title like Mr/Ms/Dr", default=""), - times: int = Field(description="Number of times to repeat the greeting", default=1), -) -> str: - """Greet a user with optional title and repetition""" - greeting = f"Hello {title + ' ' if title else ''}{name}!" - return "\n".join([greeting] * times) +""" +FastMCP Example showing parameter descriptions +""" + +from pydantic import Field + +from mcp.server.fastmcp import FastMCP + +# Create server +mcp = FastMCP("Parameter Descriptions Server") + + +@mcp.tool() +def greet_user( + name: str = Field(description="The name of the person to greet"), + title: str = Field(description="Optional title like Mr/Ms/Dr", default=""), + times: int = Field(description="Number of times to repeat the greeting", default=1), +) -> str: + """Greet a user with optional title and repetition""" + greeting = f"Hello {title + ' ' if title else ''}{name}!" + return "\n".join([greeting] * times) diff --git a/examples/fastmcp/readme-quickstart.py b/examples/fastmcp/readme-quickstart.py index d1c522a81..252224ad8 100644 --- a/examples/fastmcp/readme-quickstart.py +++ b/examples/fastmcp/readme-quickstart.py @@ -1,18 +1,18 @@ -from mcp.server.fastmcp import FastMCP - -# Create an MCP server -mcp = FastMCP("Demo") - - -# Add an addition tool -@mcp.tool() -def add(a: int, b: int) -> int: - """Add two numbers""" - return a + b - - -# Add a dynamic greeting resource -@mcp.resource("greeting://{name}") -def get_greeting(name: str) -> str: - """Get a personalized greeting""" - return f"Hello, {name}!" +from mcp.server.fastmcp import FastMCP + +# Create an MCP server +mcp = FastMCP("Demo") + + +# Add an addition tool +@mcp.tool() +def add(a: int, b: int) -> int: + """Add two numbers""" + return a + b + + +# Add a dynamic greeting resource +@mcp.resource("greeting://{name}") +def get_greeting(name: str) -> str: + """Get a personalized greeting""" + return f"Hello, {name}!" diff --git a/examples/fastmcp/screenshot.py b/examples/fastmcp/screenshot.py index 694b49f2f..06c7bb123 100644 --- a/examples/fastmcp/screenshot.py +++ b/examples/fastmcp/screenshot.py @@ -1,29 +1,29 @@ -""" -FastMCP Screenshot Example - -Give Claude a tool to capture and view screenshots. -""" - -import io - -from mcp.server.fastmcp import FastMCP -from mcp.server.fastmcp.utilities.types import Image - -# Create server -mcp = FastMCP("Screenshot Demo", dependencies=["pyautogui", "Pillow"]) - - -@mcp.tool() -def take_screenshot() -> Image: - """ - Take a screenshot of the user's screen and return it as an image. Use - this tool anytime the user wants you to look at something they're doing. - """ - import pyautogui - - buffer = io.BytesIO() - - # if the file exceeds ~1MB, it will be rejected by Claude - screenshot = pyautogui.screenshot() - screenshot.convert("RGB").save(buffer, format="JPEG", quality=60, optimize=True) - return Image(data=buffer.getvalue(), format="jpeg") +""" +FastMCP Screenshot Example + +Give Claude a tool to capture and view screenshots. +""" + +import io + +from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.utilities.types import Image + +# Create server +mcp = FastMCP("Screenshot Demo", dependencies=["pyautogui", "Pillow"]) + + +@mcp.tool() +def take_screenshot() -> Image: + """ + Take a screenshot of the user's screen and return it as an image. Use + this tool anytime the user wants you to look at something they're doing. + """ + import pyautogui + + buffer = io.BytesIO() + + # if the file exceeds ~1MB, it will be rejected by Claude + screenshot = pyautogui.screenshot() + screenshot.convert("RGB").save(buffer, format="JPEG", quality=60, optimize=True) + return Image(data=buffer.getvalue(), format="jpeg") diff --git a/examples/fastmcp/simple_echo.py b/examples/fastmcp/simple_echo.py index c26152646..92015efa8 100644 --- a/examples/fastmcp/simple_echo.py +++ b/examples/fastmcp/simple_echo.py @@ -1,14 +1,14 @@ -""" -FastMCP Echo Server -""" - -from mcp.server.fastmcp import FastMCP - -# Create server -mcp = FastMCP("Echo Server") - - -@mcp.tool() -def echo(text: str) -> str: - """Echo the input text""" - return text +""" +FastMCP Echo Server +""" + +from mcp.server.fastmcp import FastMCP + +# Create server +mcp = FastMCP("Echo Server") + + +@mcp.tool() +def echo(text: str) -> str: + """Echo the input text""" + return text diff --git a/examples/fastmcp/text_me.py b/examples/fastmcp/text_me.py index 8053c6cc5..8d61762ab 100644 --- a/examples/fastmcp/text_me.py +++ b/examples/fastmcp/text_me.py @@ -1,72 +1,72 @@ -# /// script -# dependencies = [] -# /// - -""" -FastMCP Text Me Server --------------------------------- -This defines a simple FastMCP server that sends a text message to a phone number via https://surgemsg.com/. - -To run this example, create a `.env` file with the following values: - -SURGE_API_KEY=... -SURGE_ACCOUNT_ID=... -SURGE_MY_PHONE_NUMBER=... -SURGE_MY_FIRST_NAME=... -SURGE_MY_LAST_NAME=... - -Visit https://surgemsg.com/ and click "Get Started" to obtain these values. -""" - -from typing import Annotated - -import httpx -from pydantic import BeforeValidator -from pydantic_settings import BaseSettings, SettingsConfigDict - -from mcp.server.fastmcp import FastMCP - - -class SurgeSettings(BaseSettings): - model_config: SettingsConfigDict = SettingsConfigDict( - env_prefix="SURGE_", env_file=".env" - ) - - api_key: str - account_id: str - my_phone_number: Annotated[ - str, BeforeValidator(lambda v: "+" + v if not v.startswith("+") else v) - ] - my_first_name: str - my_last_name: str - - -# Create server -mcp = FastMCP("Text me") -surge_settings = SurgeSettings() # type: ignore - - -@mcp.tool(name="textme", description="Send a text message to me") -def text_me(text_content: str) -> str: - """Send a text message to a phone number via https://surgemsg.com/""" - with httpx.Client() as client: - response = client.post( - "https://api.surgemsg.com/messages", - headers={ - "Authorization": f"Bearer {surge_settings.api_key}", - "Surge-Account": surge_settings.account_id, - "Content-Type": "application/json", - }, - json={ - "body": text_content, - "conversation": { - "contact": { - "first_name": surge_settings.my_first_name, - "last_name": surge_settings.my_last_name, - "phone_number": surge_settings.my_phone_number, - } - }, - }, - ) - response.raise_for_status() - return f"Message sent: {text_content}" +# /// script +# dependencies = [] +# /// + +""" +FastMCP Text Me Server +-------------------------------- +This defines a simple FastMCP server that sends a text message to a phone number via https://surgemsg.com/. + +To run this example, create a `.env` file with the following values: + +SURGE_API_KEY=... +SURGE_ACCOUNT_ID=... +SURGE_MY_PHONE_NUMBER=... +SURGE_MY_FIRST_NAME=... +SURGE_MY_LAST_NAME=... + +Visit https://surgemsg.com/ and click "Get Started" to obtain these values. +""" + +from typing import Annotated + +import httpx +from pydantic import BeforeValidator +from pydantic_settings import BaseSettings, SettingsConfigDict + +from mcp.server.fastmcp import FastMCP + + +class SurgeSettings(BaseSettings): + model_config: SettingsConfigDict = SettingsConfigDict( + env_prefix="SURGE_", env_file=".env" + ) + + api_key: str + account_id: str + my_phone_number: Annotated[ + str, BeforeValidator(lambda v: "+" + v if not v.startswith("+") else v) + ] + my_first_name: str + my_last_name: str + + +# Create server +mcp = FastMCP("Text me") +surge_settings = SurgeSettings() # type: ignore + + +@mcp.tool(name="textme", description="Send a text message to me") +def text_me(text_content: str) -> str: + """Send a text message to a phone number via https://surgemsg.com/""" + with httpx.Client() as client: + response = client.post( + "https://api.surgemsg.com/messages", + headers={ + "Authorization": f"Bearer {surge_settings.api_key}", + "Surge-Account": surge_settings.account_id, + "Content-Type": "application/json", + }, + json={ + "body": text_content, + "conversation": { + "contact": { + "first_name": surge_settings.my_first_name, + "last_name": surge_settings.my_last_name, + "phone_number": surge_settings.my_phone_number, + } + }, + }, + ) + response.raise_for_status() + return f"Message sent: {text_content}" diff --git a/examples/fastmcp/unicode_example.py b/examples/fastmcp/unicode_example.py index a69f586a5..48f8bd447 100644 --- a/examples/fastmcp/unicode_example.py +++ b/examples/fastmcp/unicode_example.py @@ -1,64 +1,64 @@ -""" -Example FastMCP server that uses Unicode characters in various places to help test -Unicode handling in tools and inspectors. -""" - -from mcp.server.fastmcp import FastMCP - -mcp = FastMCP() - - -@mcp.tool( - description="🌟 A tool that uses various Unicode characters in its description: " - "á é í ó ú ñ 漢字 🎉" -) -def hello_unicode(name: str = "世界", greeting: str = "¡Hola") -> str: - """ - A simple tool that demonstrates Unicode handling in: - - Tool description (emojis, accents, CJK characters) - - Parameter defaults (CJK characters) - - Return values (Spanish punctuation, emojis) - """ - return f"{greeting}, {name}! 👋" - - -@mcp.tool(description="🎨 Tool that returns a list of emoji categories") -def list_emoji_categories() -> list[str]: - """Returns a list of emoji categories with emoji examples.""" - return [ - "😀 Smileys & Emotion", - "👋 People & Body", - "🐶 Animals & Nature", - "🍎 Food & Drink", - "⚽ Activities", - "🌍 Travel & Places", - "💡 Objects", - "❤️ Symbols", - "🚩 Flags", - ] - - -@mcp.tool(description="🔤 Tool that returns text in different scripts") -def multilingual_hello() -> str: - """Returns hello in different scripts and writing systems.""" - return "\n".join( - [ - "English: Hello!", - "Spanish: ¡Hola!", - "French: Bonjour!", - "German: Grüß Gott!", - "Russian: Привет!", - "Greek: Γεια σας!", - "Hebrew: !שָׁלוֹם", - "Arabic: !مرحبا", - "Hindi: नमस्ते!", - "Chinese: 你好!", - "Japanese: こんにちは!", - "Korean: 안녕하세요!", - "Thai: สวัสดี!", - ] - ) - - -if __name__ == "__main__": - mcp.run() +""" +Example FastMCP server that uses Unicode characters in various places to help test +Unicode handling in tools and inspectors. +""" + +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP() + + +@mcp.tool( + description="🌟 A tool that uses various Unicode characters in its description: " + "á é í ó ú ñ 漢字 🎉" +) +def hello_unicode(name: str = "世界", greeting: str = "¡Hola") -> str: + """ + A simple tool that demonstrates Unicode handling in: + - Tool description (emojis, accents, CJK characters) + - Parameter defaults (CJK characters) + - Return values (Spanish punctuation, emojis) + """ + return f"{greeting}, {name}! 👋" + + +@mcp.tool(description="🎨 Tool that returns a list of emoji categories") +def list_emoji_categories() -> list[str]: + """Returns a list of emoji categories with emoji examples.""" + return [ + "😀 Smileys & Emotion", + "👋 People & Body", + "🐶 Animals & Nature", + "🍎 Food & Drink", + "⚽ Activities", + "🌍 Travel & Places", + "💡 Objects", + "❤️ Symbols", + "🚩 Flags", + ] + + +@mcp.tool(description="🔤 Tool that returns text in different scripts") +def multilingual_hello() -> str: + """Returns hello in different scripts and writing systems.""" + return "\n".join( + [ + "English: Hello!", + "Spanish: ¡Hola!", + "French: Bonjour!", + "German: Grüß Gott!", + "Russian: Привет!", + "Greek: Γεια σας!", + "Hebrew: !שָׁלוֹם", + "Arabic: !مرحبا", + "Hindi: नमस्ते!", + "Chinese: 你好!", + "Japanese: こんにちは!", + "Korean: 안녕하세요!", + "Thai: สวัสดี!", + ] + ) + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/servers/simple-prompt/.python-version b/examples/servers/simple-prompt/.python-version index c8cfe3959..2951d9b02 100644 --- a/examples/servers/simple-prompt/.python-version +++ b/examples/servers/simple-prompt/.python-version @@ -1 +1 @@ -3.10 +3.10 diff --git a/examples/servers/simple-prompt/README.md b/examples/servers/simple-prompt/README.md index 48e796e19..0b948d5d5 100644 --- a/examples/servers/simple-prompt/README.md +++ b/examples/servers/simple-prompt/README.md @@ -1,55 +1,55 @@ -# MCP Simple Prompt - -A simple MCP server that exposes a customizable prompt template with optional context and topic parameters. - -## Usage - -Start the server using either stdio (default) or SSE transport: - -```bash -# Using stdio transport (default) -uv run mcp-simple-prompt - -# Using SSE transport on custom port -uv run mcp-simple-prompt --transport sse --port 8000 -``` - -The server exposes a prompt named "simple" that accepts two optional arguments: - -- `context`: Additional context to consider -- `topic`: Specific topic to focus on - -## Example - -Using the MCP client, you can retrieve the prompt like this using the STDIO transport: - -```python -import asyncio -from mcp.client.session import ClientSession -from mcp.client.stdio import StdioServerParameters, stdio_client - - -async def main(): - async with stdio_client( - StdioServerParameters(command="uv", args=["run", "mcp-simple-prompt"]) - ) as (read, write): - async with ClientSession(read, write) as session: - await session.initialize() - - # List available prompts - prompts = await session.list_prompts() - print(prompts) - - # Get the prompt with arguments - prompt = await session.get_prompt( - "simple", - { - "context": "User is a software developer", - "topic": "Python async programming", - }, - ) - print(prompt) - - -asyncio.run(main()) -``` +# MCP Simple Prompt + +A simple MCP server that exposes a customizable prompt template with optional context and topic parameters. + +## Usage + +Start the server using either stdio (default) or SSE transport: + +```bash +# Using stdio transport (default) +uv run mcp-simple-prompt + +# Using SSE transport on custom port +uv run mcp-simple-prompt --transport sse --port 8000 +``` + +The server exposes a prompt named "simple" that accepts two optional arguments: + +- `context`: Additional context to consider +- `topic`: Specific topic to focus on + +## Example + +Using the MCP client, you can retrieve the prompt like this using the STDIO transport: + +```python +import asyncio +from mcp.client.session import ClientSession +from mcp.client.stdio import StdioServerParameters, stdio_client + + +async def main(): + async with stdio_client( + StdioServerParameters(command="uv", args=["run", "mcp-simple-prompt"]) + ) as (read, write): + async with ClientSession(read, write) as session: + await session.initialize() + + # List available prompts + prompts = await session.list_prompts() + print(prompts) + + # Get the prompt with arguments + prompt = await session.get_prompt( + "simple", + { + "context": "User is a software developer", + "topic": "Python async programming", + }, + ) + print(prompt) + + +asyncio.run(main()) +``` diff --git a/examples/servers/simple-prompt/mcp_simple_prompt/__init__.py b/examples/servers/simple-prompt/mcp_simple_prompt/__init__.py index 8b1378917..d3f5a12fa 100644 --- a/examples/servers/simple-prompt/mcp_simple_prompt/__init__.py +++ b/examples/servers/simple-prompt/mcp_simple_prompt/__init__.py @@ -1 +1 @@ - + diff --git a/examples/servers/simple-prompt/mcp_simple_prompt/server.py b/examples/servers/simple-prompt/mcp_simple_prompt/server.py index bc14b7cd0..d26060c19 100644 --- a/examples/servers/simple-prompt/mcp_simple_prompt/server.py +++ b/examples/servers/simple-prompt/mcp_simple_prompt/server.py @@ -1,129 +1,129 @@ -import anyio -import click -import mcp.types as types -from mcp.server.lowlevel import Server - - -def create_messages( - context: str | None = None, topic: str | None = None -) -> list[types.PromptMessage]: - """Create the messages for the prompt.""" - messages = [] - - # Add context if provided - if context: - messages.append( - types.PromptMessage( - role="user", - content=types.TextContent( - type="text", text=f"Here is some relevant context: {context}" - ), - ) - ) - - # Add the main prompt - prompt = "Please help me with " - if topic: - prompt += f"the following topic: {topic}" - else: - prompt += "whatever questions I may have." - - messages.append( - types.PromptMessage( - role="user", content=types.TextContent(type="text", text=prompt) - ) - ) - - return messages - - -@click.command() -@click.option("--port", default=8000, help="Port to listen on for SSE") -@click.option( - "--transport", - type=click.Choice(["stdio", "sse"]), - default="stdio", - help="Transport type", -) -def main(port: int, transport: str) -> int: - app = Server("mcp-simple-prompt") - - @app.list_prompts() - async def list_prompts() -> list[types.Prompt]: - return [ - types.Prompt( - name="simple", - description="A simple prompt that can take optional context and topic " - "arguments", - arguments=[ - types.PromptArgument( - name="context", - description="Additional context to consider", - required=False, - ), - types.PromptArgument( - name="topic", - description="Specific topic to focus on", - required=False, - ), - ], - ) - ] - - @app.get_prompt() - async def get_prompt( - name: str, arguments: dict[str, str] | None = None - ) -> types.GetPromptResult: - if name != "simple": - raise ValueError(f"Unknown prompt: {name}") - - if arguments is None: - arguments = {} - - return types.GetPromptResult( - messages=create_messages( - context=arguments.get("context"), topic=arguments.get("topic") - ), - description="A simple prompt with optional context and topic arguments", - ) - - if transport == "sse": - from mcp.server.sse import SseServerTransport - from starlette.applications import Starlette - from starlette.responses import Response - from starlette.routing import Mount, Route - - sse = SseServerTransport("/messages/") - - async def handle_sse(request): - async with sse.connect_sse( - request.scope, request.receive, request._send - ) as streams: - await app.run( - streams[0], streams[1], app.create_initialization_options() - ) - return Response() - - starlette_app = Starlette( - debug=True, - routes=[ - Route("/sse", endpoint=handle_sse), - Mount("/messages/", app=sse.handle_post_message), - ], - ) - - import uvicorn - - uvicorn.run(starlette_app, host="0.0.0.0", port=port) - else: - from mcp.server.stdio import stdio_server - - async def arun(): - async with stdio_server() as streams: - await app.run( - streams[0], streams[1], app.create_initialization_options() - ) - - anyio.run(arun) - - return 0 +import anyio +import click +import mcp.types as types +from mcp.server.lowlevel import Server + + +def create_messages( + context: str | None = None, topic: str | None = None +) -> list[types.PromptMessage]: + """Create the messages for the prompt.""" + messages = [] + + # Add context if provided + if context: + messages.append( + types.PromptMessage( + role="user", + content=types.TextContent( + type="text", text=f"Here is some relevant context: {context}" + ), + ) + ) + + # Add the main prompt + prompt = "Please help me with " + if topic: + prompt += f"the following topic: {topic}" + else: + prompt += "whatever questions I may have." + + messages.append( + types.PromptMessage( + role="user", content=types.TextContent(type="text", text=prompt) + ) + ) + + return messages + + +@click.command() +@click.option("--port", default=8000, help="Port to listen on for SSE") +@click.option( + "--transport", + type=click.Choice(["stdio", "sse"]), + default="stdio", + help="Transport type", +) +def main(port: int, transport: str) -> int: + app = Server("mcp-simple-prompt") + + @app.list_prompts() + async def list_prompts() -> list[types.Prompt]: + return [ + types.Prompt( + name="simple", + description="A simple prompt that can take optional context and topic " + "arguments", + arguments=[ + types.PromptArgument( + name="context", + description="Additional context to consider", + required=False, + ), + types.PromptArgument( + name="topic", + description="Specific topic to focus on", + required=False, + ), + ], + ) + ] + + @app.get_prompt() + async def get_prompt( + name: str, arguments: dict[str, str] | None = None + ) -> types.GetPromptResult: + if name != "simple": + raise ValueError(f"Unknown prompt: {name}") + + if arguments is None: + arguments = {} + + return types.GetPromptResult( + messages=create_messages( + context=arguments.get("context"), topic=arguments.get("topic") + ), + description="A simple prompt with optional context and topic arguments", + ) + + if transport == "sse": + from mcp.server.sse import SseServerTransport + from starlette.applications import Starlette + from starlette.responses import Response + from starlette.routing import Mount, Route + + sse = SseServerTransport("/messages/") + + async def handle_sse(request): + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + return Response() + + starlette_app = Starlette( + debug=True, + routes=[ + Route("/sse", endpoint=handle_sse), + Mount("/messages/", app=sse.handle_post_message), + ], + ) + + import uvicorn + + uvicorn.run(starlette_app, host="0.0.0.0", port=port) + else: + from mcp.server.stdio import stdio_server + + async def arun(): + async with stdio_server() as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + + anyio.run(arun) + + return 0 diff --git a/examples/servers/simple-prompt/pyproject.toml b/examples/servers/simple-prompt/pyproject.toml index 1ef968d40..5000de38a 100644 --- a/examples/servers/simple-prompt/pyproject.toml +++ b/examples/servers/simple-prompt/pyproject.toml @@ -1,47 +1,47 @@ -[project] -name = "mcp-simple-prompt" -version = "0.1.0" -description = "A simple MCP server exposing a customizable prompt" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic, PBC." }] -maintainers = [ - { name = "David Soria Parra", email = "davidsp@anthropic.com" }, - { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, -] -keywords = ["mcp", "llm", "automation", "web", "fetch"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] - -[project.scripts] -mcp-simple-prompt = "mcp_simple_prompt.server:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_prompt"] - -[tool.pyright] -include = ["mcp_simple_prompt"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] -dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] +[project] +name = "mcp-simple-prompt" +version = "0.1.0" +description = "A simple MCP server exposing a customizable prompt" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +maintainers = [ + { name = "David Soria Parra", email = "davidsp@anthropic.com" }, + { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, +] +keywords = ["mcp", "llm", "automation", "web", "fetch"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-simple-prompt = "mcp_simple_prompt.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_prompt"] + +[tool.pyright] +include = ["mcp_simple_prompt"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/servers/simple-resource/.python-version b/examples/servers/simple-resource/.python-version index c8cfe3959..2951d9b02 100644 --- a/examples/servers/simple-resource/.python-version +++ b/examples/servers/simple-resource/.python-version @@ -1 +1 @@ -3.10 +3.10 diff --git a/examples/servers/simple-resource/README.md b/examples/servers/simple-resource/README.md index df674e91e..8fe9eaa78 100644 --- a/examples/servers/simple-resource/README.md +++ b/examples/servers/simple-resource/README.md @@ -1,48 +1,48 @@ -# MCP Simple Resource - -A simple MCP server that exposes sample text files as resources. - -## Usage - -Start the server using either stdio (default) or SSE transport: - -```bash -# Using stdio transport (default) -uv run mcp-simple-resource - -# Using SSE transport on custom port -uv run mcp-simple-resource --transport sse --port 8000 -``` - -The server exposes some basic text file resources that can be read by clients. - -## Example - -Using the MCP client, you can retrieve resources like this using the STDIO transport: - -```python -import asyncio -from mcp.types import AnyUrl -from mcp.client.session import ClientSession -from mcp.client.stdio import StdioServerParameters, stdio_client - - -async def main(): - async with stdio_client( - StdioServerParameters(command="uv", args=["run", "mcp-simple-resource"]) - ) as (read, write): - async with ClientSession(read, write) as session: - await session.initialize() - - # List available resources - resources = await session.list_resources() - print(resources) - - # Get a specific resource - resource = await session.read_resource(AnyUrl("file:///greeting.txt")) - print(resource) - - -asyncio.run(main()) - -``` +# MCP Simple Resource + +A simple MCP server that exposes sample text files as resources. + +## Usage + +Start the server using either stdio (default) or SSE transport: + +```bash +# Using stdio transport (default) +uv run mcp-simple-resource + +# Using SSE transport on custom port +uv run mcp-simple-resource --transport sse --port 8000 +``` + +The server exposes some basic text file resources that can be read by clients. + +## Example + +Using the MCP client, you can retrieve resources like this using the STDIO transport: + +```python +import asyncio +from mcp.types import AnyUrl +from mcp.client.session import ClientSession +from mcp.client.stdio import StdioServerParameters, stdio_client + + +async def main(): + async with stdio_client( + StdioServerParameters(command="uv", args=["run", "mcp-simple-resource"]) + ) as (read, write): + async with ClientSession(read, write) as session: + await session.initialize() + + # List available resources + resources = await session.list_resources() + print(resources) + + # Get a specific resource + resource = await session.read_resource(AnyUrl("file:///greeting.txt")) + print(resource) + + +asyncio.run(main()) + +``` diff --git a/examples/servers/simple-resource/mcp_simple_resource/__init__.py b/examples/servers/simple-resource/mcp_simple_resource/__init__.py index 8b1378917..d3f5a12fa 100644 --- a/examples/servers/simple-resource/mcp_simple_resource/__init__.py +++ b/examples/servers/simple-resource/mcp_simple_resource/__init__.py @@ -1 +1 @@ - + diff --git a/examples/servers/simple-resource/pyproject.toml b/examples/servers/simple-resource/pyproject.toml index cbab1ca47..07bf83fbf 100644 --- a/examples/servers/simple-resource/pyproject.toml +++ b/examples/servers/simple-resource/pyproject.toml @@ -1,47 +1,47 @@ -[project] -name = "mcp-simple-resource" -version = "0.1.0" -description = "A simple MCP server exposing sample text resources" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic, PBC." }] -maintainers = [ - { name = "David Soria Parra", email = "davidsp@anthropic.com" }, - { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, -] -keywords = ["mcp", "llm", "automation", "web", "fetch"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] - -[project.scripts] -mcp-simple-resource = "mcp_simple_resource.server:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_resource"] - -[tool.pyright] -include = ["mcp_simple_resource"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] -dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] +[project] +name = "mcp-simple-resource" +version = "0.1.0" +description = "A simple MCP server exposing sample text resources" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +maintainers = [ + { name = "David Soria Parra", email = "davidsp@anthropic.com" }, + { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, +] +keywords = ["mcp", "llm", "automation", "web", "fetch"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-simple-resource = "mcp_simple_resource.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_resource"] + +[tool.pyright] +include = ["mcp_simple_resource"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/servers/simple-streamablehttp-stateless/README.md b/examples/servers/simple-streamablehttp-stateless/README.md index 2abb60614..7459a846a 100644 --- a/examples/servers/simple-streamablehttp-stateless/README.md +++ b/examples/servers/simple-streamablehttp-stateless/README.md @@ -1,41 +1,41 @@ -# MCP Simple StreamableHttp Stateless Server Example - -A stateless MCP server example demonstrating the StreamableHttp transport without maintaining session state. This example is ideal for understanding how to deploy MCP servers in multi-node environments where requests can be routed to any instance. - -## Features - -- Uses the StreamableHTTP transport in stateless mode (mcp_session_id=None) -- Each request creates a new ephemeral connection -- No session state maintained between requests -- Task lifecycle scoped to individual requests -- Suitable for deployment in multi-node environments - - -## Usage - -Start the server: - -```bash -# Using default port 3000 -uv run mcp-simple-streamablehttp-stateless - -# Using custom port -uv run mcp-simple-streamablehttp-stateless --port 3000 - -# Custom logging level -uv run mcp-simple-streamablehttp-stateless --log-level DEBUG - -# Enable JSON responses instead of SSE streams -uv run mcp-simple-streamablehttp-stateless --json-response -``` - -The server exposes a tool named "start-notification-stream" that accepts three arguments: - -- `interval`: Time between notifications in seconds (e.g., 1.0) -- `count`: Number of notifications to send (e.g., 5) -- `caller`: Identifier string for the caller - - -## Client - +# MCP Simple StreamableHttp Stateless Server Example + +A stateless MCP server example demonstrating the StreamableHttp transport without maintaining session state. This example is ideal for understanding how to deploy MCP servers in multi-node environments where requests can be routed to any instance. + +## Features + +- Uses the StreamableHTTP transport in stateless mode (mcp_session_id=None) +- Each request creates a new ephemeral connection +- No session state maintained between requests +- Task lifecycle scoped to individual requests +- Suitable for deployment in multi-node environments + + +## Usage + +Start the server: + +```bash +# Using default port 3000 +uv run mcp-simple-streamablehttp-stateless + +# Using custom port +uv run mcp-simple-streamablehttp-stateless --port 3000 + +# Custom logging level +uv run mcp-simple-streamablehttp-stateless --log-level DEBUG + +# Enable JSON responses instead of SSE streams +uv run mcp-simple-streamablehttp-stateless --json-response +``` + +The server exposes a tool named "start-notification-stream" that accepts three arguments: + +- `interval`: Time between notifications in seconds (e.g., 1.0) +- `count`: Number of notifications to send (e.g., 5) +- `caller`: Identifier string for the caller + + +## Client + You can connect to this server using an HTTP client. For now, only the TypeScript SDK has streamable HTTP client examples, or you can use [Inspector](https://github.com/modelcontextprotocol/inspector) for testing. \ No newline at end of file diff --git a/examples/servers/simple-streamablehttp-stateless/pyproject.toml b/examples/servers/simple-streamablehttp-stateless/pyproject.toml index d2b089451..39568691b 100644 --- a/examples/servers/simple-streamablehttp-stateless/pyproject.toml +++ b/examples/servers/simple-streamablehttp-stateless/pyproject.toml @@ -1,36 +1,36 @@ -[project] -name = "mcp-simple-streamablehttp-stateless" -version = "0.1.0" -description = "A simple MCP server exposing a StreamableHttp transport in stateless mode" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic, PBC." }] -keywords = ["mcp", "llm", "automation", "web", "fetch", "http", "streamable", "stateless"] -license = { text = "MIT" } -dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp", "starlette", "uvicorn"] - -[project.scripts] -mcp-simple-streamablehttp-stateless = "mcp_simple_streamablehttp_stateless.server:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_streamablehttp_stateless"] - -[tool.pyright] -include = ["mcp_simple_streamablehttp_stateless"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] +[project] +name = "mcp-simple-streamablehttp-stateless" +version = "0.1.0" +description = "A simple MCP server exposing a StreamableHttp transport in stateless mode" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +keywords = ["mcp", "llm", "automation", "web", "fetch", "http", "streamable", "stateless"] +license = { text = "MIT" } +dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp", "starlette", "uvicorn"] + +[project.scripts] +mcp-simple-streamablehttp-stateless = "mcp_simple_streamablehttp_stateless.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_streamablehttp_stateless"] + +[tool.pyright] +include = ["mcp_simple_streamablehttp_stateless"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] \ No newline at end of file diff --git a/examples/servers/simple-streamablehttp/README.md b/examples/servers/simple-streamablehttp/README.md index f850b7286..71ded4fba 100644 --- a/examples/servers/simple-streamablehttp/README.md +++ b/examples/servers/simple-streamablehttp/README.md @@ -1,55 +1,55 @@ -# MCP Simple StreamableHttp Server Example - -A simple MCP server example demonstrating the StreamableHttp transport, which enables HTTP-based communication with MCP servers using streaming. - -## Features - -- Uses the StreamableHTTP transport for server-client communication -- Supports REST API operations (POST, GET, DELETE) for `/mcp` endpoint -- Task management with anyio task groups -- Ability to send multiple notifications over time to the client -- Proper resource cleanup and lifespan management -- Resumability support via InMemoryEventStore - -## Usage - -Start the server on the default or custom port: - -```bash - -# Using custom port -uv run mcp-simple-streamablehttp --port 3000 - -# Custom logging level -uv run mcp-simple-streamablehttp --log-level DEBUG - -# Enable JSON responses instead of SSE streams -uv run mcp-simple-streamablehttp --json-response -``` - -The server exposes a tool named "start-notification-stream" that accepts three arguments: - -- `interval`: Time between notifications in seconds (e.g., 1.0) -- `count`: Number of notifications to send (e.g., 5) -- `caller`: Identifier string for the caller - -## Resumability Support - -This server includes resumability support through the InMemoryEventStore. This enables clients to: - -- Reconnect to the server after a disconnection -- Resume event streaming from where they left off using the Last-Event-ID header - - -The server will: -- Generate unique event IDs for each SSE message -- Store events in memory for later replay -- Replay missed events when a client reconnects with a Last-Event-ID header - -Note: The InMemoryEventStore is designed for demonstration purposes only. For production use, consider implementing a persistent storage solution. - - - -## Client - +# MCP Simple StreamableHttp Server Example + +A simple MCP server example demonstrating the StreamableHttp transport, which enables HTTP-based communication with MCP servers using streaming. + +## Features + +- Uses the StreamableHTTP transport for server-client communication +- Supports REST API operations (POST, GET, DELETE) for `/mcp` endpoint +- Task management with anyio task groups +- Ability to send multiple notifications over time to the client +- Proper resource cleanup and lifespan management +- Resumability support via InMemoryEventStore + +## Usage + +Start the server on the default or custom port: + +```bash + +# Using custom port +uv run mcp-simple-streamablehttp --port 3000 + +# Custom logging level +uv run mcp-simple-streamablehttp --log-level DEBUG + +# Enable JSON responses instead of SSE streams +uv run mcp-simple-streamablehttp --json-response +``` + +The server exposes a tool named "start-notification-stream" that accepts three arguments: + +- `interval`: Time between notifications in seconds (e.g., 1.0) +- `count`: Number of notifications to send (e.g., 5) +- `caller`: Identifier string for the caller + +## Resumability Support + +This server includes resumability support through the InMemoryEventStore. This enables clients to: + +- Reconnect to the server after a disconnection +- Resume event streaming from where they left off using the Last-Event-ID header + + +The server will: +- Generate unique event IDs for each SSE message +- Store events in memory for later replay +- Replay missed events when a client reconnects with a Last-Event-ID header + +Note: The InMemoryEventStore is designed for demonstration purposes only. For production use, consider implementing a persistent storage solution. + + + +## Client + You can connect to this server using an HTTP client, for now only Typescript SDK has streamable HTTP client examples or you can use [Inspector](https://github.com/modelcontextprotocol/inspector) \ No newline at end of file diff --git a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py index 28c58149f..625400487 100644 --- a/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py +++ b/examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/event_store.py @@ -1,105 +1,105 @@ -""" -In-memory event store for demonstrating resumability functionality. - -This is a simple implementation intended for examples and testing, -not for production use where a persistent storage solution would be more appropriate. -""" - -import logging -from collections import deque -from dataclasses import dataclass -from uuid import uuid4 - -from mcp.server.streamable_http import ( - EventCallback, - EventId, - EventMessage, - EventStore, - StreamId, -) -from mcp.types import JSONRPCMessage - -logger = logging.getLogger(__name__) - - -@dataclass -class EventEntry: - """ - Represents an event entry in the event store. - """ - - event_id: EventId - stream_id: StreamId - message: JSONRPCMessage - - -class InMemoryEventStore(EventStore): - """ - Simple in-memory implementation of the EventStore interface for resumability. - This is primarily intended for examples and testing, not for production use - where a persistent storage solution would be more appropriate. - - This implementation keeps only the last N events per stream for memory efficiency. - """ - - def __init__(self, max_events_per_stream: int = 100): - """Initialize the event store. - - Args: - max_events_per_stream: Maximum number of events to keep per stream - """ - self.max_events_per_stream = max_events_per_stream - # for maintaining last N events per stream - self.streams: dict[StreamId, deque[EventEntry]] = {} - # event_id -> EventEntry for quick lookup - self.event_index: dict[EventId, EventEntry] = {} - - async def store_event( - self, stream_id: StreamId, message: JSONRPCMessage - ) -> EventId: - """Stores an event with a generated event ID.""" - event_id = str(uuid4()) - event_entry = EventEntry( - event_id=event_id, stream_id=stream_id, message=message - ) - - # Get or create deque for this stream - if stream_id not in self.streams: - self.streams[stream_id] = deque(maxlen=self.max_events_per_stream) - - # If deque is full, the oldest event will be automatically removed - # We need to remove it from the event_index as well - if len(self.streams[stream_id]) == self.max_events_per_stream: - oldest_event = self.streams[stream_id][0] - self.event_index.pop(oldest_event.event_id, None) - - # Add new event - self.streams[stream_id].append(event_entry) - self.event_index[event_id] = event_entry - - return event_id - - async def replay_events_after( - self, - last_event_id: EventId, - send_callback: EventCallback, - ) -> StreamId | None: - """Replays events that occurred after the specified event ID.""" - if last_event_id not in self.event_index: - logger.warning(f"Event ID {last_event_id} not found in store") - return None - - # Get the stream and find events after the last one - last_event = self.event_index[last_event_id] - stream_id = last_event.stream_id - stream_events = self.streams.get(last_event.stream_id, deque()) - - # Events in deque are already in chronological order - found_last = False - for event in stream_events: - if found_last: - await send_callback(EventMessage(event.message, event.event_id)) - elif event.event_id == last_event_id: - found_last = True - - return stream_id +""" +In-memory event store for demonstrating resumability functionality. + +This is a simple implementation intended for examples and testing, +not for production use where a persistent storage solution would be more appropriate. +""" + +import logging +from collections import deque +from dataclasses import dataclass +from uuid import uuid4 + +from mcp.server.streamable_http import ( + EventCallback, + EventId, + EventMessage, + EventStore, + StreamId, +) +from mcp.types import JSONRPCMessage + +logger = logging.getLogger(__name__) + + +@dataclass +class EventEntry: + """ + Represents an event entry in the event store. + """ + + event_id: EventId + stream_id: StreamId + message: JSONRPCMessage + + +class InMemoryEventStore(EventStore): + """ + Simple in-memory implementation of the EventStore interface for resumability. + This is primarily intended for examples and testing, not for production use + where a persistent storage solution would be more appropriate. + + This implementation keeps only the last N events per stream for memory efficiency. + """ + + def __init__(self, max_events_per_stream: int = 100): + """Initialize the event store. + + Args: + max_events_per_stream: Maximum number of events to keep per stream + """ + self.max_events_per_stream = max_events_per_stream + # for maintaining last N events per stream + self.streams: dict[StreamId, deque[EventEntry]] = {} + # event_id -> EventEntry for quick lookup + self.event_index: dict[EventId, EventEntry] = {} + + async def store_event( + self, stream_id: StreamId, message: JSONRPCMessage + ) -> EventId: + """Stores an event with a generated event ID.""" + event_id = str(uuid4()) + event_entry = EventEntry( + event_id=event_id, stream_id=stream_id, message=message + ) + + # Get or create deque for this stream + if stream_id not in self.streams: + self.streams[stream_id] = deque(maxlen=self.max_events_per_stream) + + # If deque is full, the oldest event will be automatically removed + # We need to remove it from the event_index as well + if len(self.streams[stream_id]) == self.max_events_per_stream: + oldest_event = self.streams[stream_id][0] + self.event_index.pop(oldest_event.event_id, None) + + # Add new event + self.streams[stream_id].append(event_entry) + self.event_index[event_id] = event_entry + + return event_id + + async def replay_events_after( + self, + last_event_id: EventId, + send_callback: EventCallback, + ) -> StreamId | None: + """Replays events that occurred after the specified event ID.""" + if last_event_id not in self.event_index: + logger.warning(f"Event ID {last_event_id} not found in store") + return None + + # Get the stream and find events after the last one + last_event = self.event_index[last_event_id] + stream_id = last_event.stream_id + stream_events = self.streams.get(last_event.stream_id, deque()) + + # Events in deque are already in chronological order + found_last = False + for event in stream_events: + if found_last: + await send_callback(EventMessage(event.message, event.event_id)) + elif event.event_id == last_event_id: + found_last = True + + return stream_id diff --git a/examples/servers/simple-streamablehttp/pyproject.toml b/examples/servers/simple-streamablehttp/pyproject.toml index c35887d1f..8ef843ddf 100644 --- a/examples/servers/simple-streamablehttp/pyproject.toml +++ b/examples/servers/simple-streamablehttp/pyproject.toml @@ -1,36 +1,36 @@ -[project] -name = "mcp-simple-streamablehttp" -version = "0.1.0" -description = "A simple MCP server exposing a StreamableHttp transport for testing" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic, PBC." }] -keywords = ["mcp", "llm", "automation", "web", "fetch", "http", "streamable"] -license = { text = "MIT" } -dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp", "starlette", "uvicorn"] - -[project.scripts] -mcp-simple-streamablehttp = "mcp_simple_streamablehttp.server:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_streamablehttp"] - -[tool.pyright] -include = ["mcp_simple_streamablehttp"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] +[project] +name = "mcp-simple-streamablehttp" +version = "0.1.0" +description = "A simple MCP server exposing a StreamableHttp transport for testing" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +keywords = ["mcp", "llm", "automation", "web", "fetch", "http", "streamable"] +license = { text = "MIT" } +dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp", "starlette", "uvicorn"] + +[project.scripts] +mcp-simple-streamablehttp = "mcp_simple_streamablehttp.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_streamablehttp"] + +[tool.pyright] +include = ["mcp_simple_streamablehttp"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] \ No newline at end of file diff --git a/examples/servers/simple-tool/.python-version b/examples/servers/simple-tool/.python-version index c8cfe3959..2951d9b02 100644 --- a/examples/servers/simple-tool/.python-version +++ b/examples/servers/simple-tool/.python-version @@ -1 +1 @@ -3.10 +3.10 diff --git a/examples/servers/simple-tool/README.md b/examples/servers/simple-tool/README.md index 06020b4b0..4880e92be 100644 --- a/examples/servers/simple-tool/README.md +++ b/examples/servers/simple-tool/README.md @@ -1,48 +1,48 @@ - -A simple MCP server that exposes a website fetching tool. - -## Usage - -Start the server using either stdio (default) or SSE transport: - -```bash -# Using stdio transport (default) -uv run mcp-simple-tool - -# Using SSE transport on custom port -uv run mcp-simple-tool --transport sse --port 8000 -``` - -The server exposes a tool named "fetch" that accepts one required argument: - -- `url`: The URL of the website to fetch - -## Example - -Using the MCP client, you can use the tool like this using the STDIO transport: - -```python -import asyncio -from mcp.client.session import ClientSession -from mcp.client.stdio import StdioServerParameters, stdio_client - - -async def main(): - async with stdio_client( - StdioServerParameters(command="uv", args=["run", "mcp-simple-tool"]) - ) as (read, write): - async with ClientSession(read, write) as session: - await session.initialize() - - # List available tools - tools = await session.list_tools() - print(tools) - - # Call the fetch tool - result = await session.call_tool("fetch", {"url": "https://example.com"}) - print(result) - - -asyncio.run(main()) - -``` + +A simple MCP server that exposes a website fetching tool. + +## Usage + +Start the server using either stdio (default) or SSE transport: + +```bash +# Using stdio transport (default) +uv run mcp-simple-tool + +# Using SSE transport on custom port +uv run mcp-simple-tool --transport sse --port 8000 +``` + +The server exposes a tool named "fetch" that accepts one required argument: + +- `url`: The URL of the website to fetch + +## Example + +Using the MCP client, you can use the tool like this using the STDIO transport: + +```python +import asyncio +from mcp.client.session import ClientSession +from mcp.client.stdio import StdioServerParameters, stdio_client + + +async def main(): + async with stdio_client( + StdioServerParameters(command="uv", args=["run", "mcp-simple-tool"]) + ) as (read, write): + async with ClientSession(read, write) as session: + await session.initialize() + + # List available tools + tools = await session.list_tools() + print(tools) + + # Call the fetch tool + result = await session.call_tool("fetch", {"url": "https://example.com"}) + print(result) + + +asyncio.run(main()) + +``` diff --git a/examples/servers/simple-tool/mcp_simple_tool/__init__.py b/examples/servers/simple-tool/mcp_simple_tool/__init__.py index 8b1378917..d3f5a12fa 100644 --- a/examples/servers/simple-tool/mcp_simple_tool/__init__.py +++ b/examples/servers/simple-tool/mcp_simple_tool/__init__.py @@ -1 +1 @@ - + diff --git a/examples/servers/simple-tool/pyproject.toml b/examples/servers/simple-tool/pyproject.toml index c690aad97..cb08267e5 100644 --- a/examples/servers/simple-tool/pyproject.toml +++ b/examples/servers/simple-tool/pyproject.toml @@ -1,47 +1,47 @@ -[project] -name = "mcp-simple-tool" -version = "0.1.0" -description = "A simple MCP server exposing a website fetching tool" -readme = "README.md" -requires-python = ">=3.10" -authors = [{ name = "Anthropic, PBC." }] -maintainers = [ - { name = "David Soria Parra", email = "davidsp@anthropic.com" }, - { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, -] -keywords = ["mcp", "llm", "automation", "web", "fetch"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] - -[project.scripts] -mcp-simple-tool = "mcp_simple_tool.server:main" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["mcp_simple_tool"] - -[tool.pyright] -include = ["mcp_simple_tool"] -venvPath = "." -venv = ".venv" - -[tool.ruff.lint] -select = ["E", "F", "I"] -ignore = [] - -[tool.ruff] -line-length = 88 -target-version = "py310" - -[tool.uv] -dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] +[project] +name = "mcp-simple-tool" +version = "0.1.0" +description = "A simple MCP server exposing a website fetching tool" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +maintainers = [ + { name = "David Soria Parra", email = "davidsp@anthropic.com" }, + { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, +] +keywords = ["mcp", "llm", "automation", "web", "fetch"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-simple-tool = "mcp_simple_tool.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_tool"] + +[tool.pyright] +include = ["mcp_simple_tool"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 88 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/mkdocs.yml b/mkdocs.yml index b907cb873..2ed1ba699 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,120 +1,120 @@ -site_name: MCP Server -site_description: MCP Server -strict: true - -repo_name: modelcontextprotocol/python-sdk -repo_url: https://github.com/modelcontextprotocol/python-sdk -edit_uri: edit/main/docs/ -site_url: https://modelcontextprotocol.github.io/python-sdk - -# TODO(Marcelo): Add Anthropic copyright? -# copyright: © Model Context Protocol 2025 to present - -nav: - - Home: index.md - - API Reference: api.md - -theme: - name: "material" - palette: - - media: "(prefers-color-scheme)" - scheme: default - primary: black - accent: black - toggle: - icon: material/lightbulb - name: "Switch to light mode" - - media: "(prefers-color-scheme: light)" - scheme: default - primary: black - accent: black - toggle: - icon: material/lightbulb-outline - name: "Switch to dark mode" - - media: "(prefers-color-scheme: dark)" - scheme: slate - primary: white - accent: white - toggle: - icon: material/lightbulb-auto-outline - name: "Switch to system preference" - features: - - search.suggest - - search.highlight - - content.tabs.link - - content.code.annotate - - content.code.copy - - content.code.select - - navigation.path - - navigation.indexes - - navigation.sections - - navigation.tracking - - toc.follow - # logo: "img/logo-white.svg" - # TODO(Marcelo): Add a favicon. - # favicon: "favicon.ico" - -# https://www.mkdocs.org/user-guide/configuration/#validation -validation: - omitted_files: warn - absolute_links: warn - unrecognized_links: warn - anchors: warn - -markdown_extensions: - - tables - - admonition - - attr_list - - md_in_html - - pymdownx.details - - pymdownx.caret - - pymdownx.critic - - pymdownx.mark - - pymdownx.superfences - - pymdownx.snippets - - pymdownx.tilde - - pymdownx.inlinehilite - - pymdownx.highlight: - pygments_lang_class: true - - pymdownx.extra: - pymdownx.superfences: - custom_fences: - - name: mermaid - class: mermaid - format: !!python/name:pymdownx.superfences.fence_code_format - - pymdownx.emoji: - emoji_index: !!python/name:material.extensions.emoji.twemoji - emoji_generator: !!python/name:material.extensions.emoji.to_svg - options: - custom_icons: - - docs/.overrides/.icons - - pymdownx.tabbed: - alternate_style: true - - pymdownx.tasklist: - custom_checkbox: true - - sane_lists # this means you can start a list from any number - -watch: - - src/mcp - -plugins: - - search - - social - - glightbox - - mkdocstrings: - handlers: - python: - paths: [src/mcp] - options: - relative_crossrefs: true - members_order: source - separate_signature: true - show_signature_annotations: true - signature_crossrefs: true - group_by_category: false - # 3 because docs are in pages with an H2 just above them - heading_level: 3 - import: - - url: https://docs.python.org/3/objects.inv - - url: https://docs.pydantic.dev/latest/objects.inv - - url: https://typing-extensions.readthedocs.io/en/latest/objects.inv +site_name: MCP Server +site_description: MCP Server +strict: true + +repo_name: modelcontextprotocol/python-sdk +repo_url: https://github.com/modelcontextprotocol/python-sdk +edit_uri: edit/main/docs/ +site_url: https://modelcontextprotocol.github.io/python-sdk + +# TODO(Marcelo): Add Anthropic copyright? +# copyright: © Model Context Protocol 2025 to present + +nav: + - Home: index.md + - API Reference: api.md + +theme: + name: "material" + palette: + - media: "(prefers-color-scheme)" + scheme: default + primary: black + accent: black + toggle: + icon: material/lightbulb + name: "Switch to light mode" + - media: "(prefers-color-scheme: light)" + scheme: default + primary: black + accent: black + toggle: + icon: material/lightbulb-outline + name: "Switch to dark mode" + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: white + accent: white + toggle: + icon: material/lightbulb-auto-outline + name: "Switch to system preference" + features: + - search.suggest + - search.highlight + - content.tabs.link + - content.code.annotate + - content.code.copy + - content.code.select + - navigation.path + - navigation.indexes + - navigation.sections + - navigation.tracking + - toc.follow + # logo: "img/logo-white.svg" + # TODO(Marcelo): Add a favicon. + # favicon: "favicon.ico" + +# https://www.mkdocs.org/user-guide/configuration/#validation +validation: + omitted_files: warn + absolute_links: warn + unrecognized_links: warn + anchors: warn + +markdown_extensions: + - tables + - admonition + - attr_list + - md_in_html + - pymdownx.details + - pymdownx.caret + - pymdownx.critic + - pymdownx.mark + - pymdownx.superfences + - pymdownx.snippets + - pymdownx.tilde + - pymdownx.inlinehilite + - pymdownx.highlight: + pygments_lang_class: true + - pymdownx.extra: + pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + options: + custom_icons: + - docs/.overrides/.icons + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - sane_lists # this means you can start a list from any number + +watch: + - src/mcp + +plugins: + - search + - social + - glightbox + - mkdocstrings: + handlers: + python: + paths: [src/mcp] + options: + relative_crossrefs: true + members_order: source + separate_signature: true + show_signature_annotations: true + signature_crossrefs: true + group_by_category: false + # 3 because docs are in pages with an H2 just above them + heading_level: 3 + import: + - url: https://docs.python.org/3/objects.inv + - url: https://docs.pydantic.dev/latest/objects.inv + - url: https://typing-extensions.readthedocs.io/en/latest/objects.inv diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..a4a166629 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,40 @@ +annotated-types==0.7.0 +anyio==4.9.0 +Authlib==1.5.2 +certifi==2025.4.26 +cffi==1.17.1 +click==8.2.0 +cryptography==44.0.3 +ecdsa==0.19.1 +h11==0.16.0 +httpcore==1.0.9 +httpx==0.28.1 +httpx-sse==0.4.0 +idna==3.10 +iniconfig==2.1.0 +-e git+https://github.com/Vinisha-Projects/python-sdk.git@23efae98ed37528a2dffd34988664b1b2585307b#egg=mcp +mypy==1.15.0 +mypy_extensions==1.1.0 +packaging==25.0 +pluggy==1.6.0 +pyasn1==0.4.8 +pycparser==2.22 +pydantic==2.11.4 +pydantic-settings==2.9.1 +pydantic_core==2.33.2 +pytest==8.3.5 +python-dotenv==1.1.0 +python-jose==3.4.0 +types-python-jose==3.4.0.20250516 +python-multipart==0.0.20 +rsa==4.9.1 +ruff==0.11.10 +six==1.17.0 +sniffio==1.3.1 +sse-starlette==2.3.5 +starlette==0.46.2 +types-pyasn1==0.6.0.20250516 +typing-inspection==0.4.0 +typing_extensions==4.13.2 +uvicorn==0.34.2 +uvloop==0.21.0 diff --git a/src/mcp/cli/__init__.py b/src/mcp/cli/__init__.py index 3ef56d806..015e27389 100644 --- a/src/mcp/cli/__init__.py +++ b/src/mcp/cli/__init__.py @@ -1,6 +1,6 @@ -"""FastMCP CLI package.""" - -from .cli import app - -if __name__ == "__main__": - app() +"""FastMCP CLI package.""" + +from .cli import app + +if __name__ == "__main__": + app() diff --git a/src/mcp/client/__main__.py b/src/mcp/client/__main__.py index 2ec68e56c..3fc0f16f5 100644 --- a/src/mcp/client/__main__.py +++ b/src/mcp/client/__main__.py @@ -1,89 +1,89 @@ -import argparse -import logging -import sys -from functools import partial -from urllib.parse import urlparse - -import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - -import mcp.types as types -from mcp.client.session import ClientSession -from mcp.client.sse import sse_client -from mcp.client.stdio import StdioServerParameters, stdio_client -from mcp.shared.message import SessionMessage -from mcp.shared.session import RequestResponder - -if not sys.warnoptions: - import warnings - - warnings.simplefilter("ignore") - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("client") - - -async def message_handler( - message: RequestResponder[types.ServerRequest, types.ClientResult] - | types.ServerNotification - | Exception, -) -> None: - if isinstance(message, Exception): - logger.error("Error: %s", message) - return - - logger.info("Received message from server: %s", message) - - -async def run_session( - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], - client_info: types.Implementation | None = None, -): - async with ClientSession( - read_stream, - write_stream, - message_handler=message_handler, - client_info=client_info, - ) as session: - logger.info("Initializing session") - await session.initialize() - logger.info("Initialized") - - -async def main(command_or_url: str, args: list[str], env: list[tuple[str, str]]): - env_dict = dict(env) - - if urlparse(command_or_url).scheme in ("http", "https"): - # Use SSE client for HTTP(S) URLs - async with sse_client(command_or_url) as streams: - await run_session(*streams) - else: - # Use stdio client for commands - server_parameters = StdioServerParameters( - command=command_or_url, args=args, env=env_dict - ) - async with stdio_client(server_parameters) as streams: - await run_session(*streams) - - -def cli(): - parser = argparse.ArgumentParser() - parser.add_argument("command_or_url", help="Command or URL to connect to") - parser.add_argument("args", nargs="*", help="Additional arguments") - parser.add_argument( - "-e", - "--env", - nargs=2, - action="append", - metavar=("KEY", "VALUE"), - help="Environment variables to set. Can be used multiple times.", - default=[], - ) - - args = parser.parse_args() - anyio.run(partial(main, args.command_or_url, args.args, args.env), backend="trio") - - -if __name__ == "__main__": - cli() +import argparse +import logging +import sys +from functools import partial +from urllib.parse import urlparse + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +import mcp.types as types +from mcp.client.session import ClientSession +from mcp.client.sse import sse_client +from mcp.client.stdio import StdioServerParameters, stdio_client +from mcp.shared.message import SessionMessage +from mcp.shared.session import RequestResponder + +if not sys.warnoptions: + import warnings + + warnings.simplefilter("ignore") + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("client") + + +async def message_handler( + message: RequestResponder[types.ServerRequest, types.ClientResult] + | types.ServerNotification + | Exception, +) -> None: + if isinstance(message, Exception): + logger.error("Error: %s", message) + return + + logger.info("Received message from server: %s", message) + + +async def run_session( + read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], + write_stream: MemoryObjectSendStream[SessionMessage], + client_info: types.Implementation | None = None, +): + async with ClientSession( + read_stream, + write_stream, + message_handler=message_handler, + client_info=client_info, + ) as session: + logger.info("Initializing session") + await session.initialize() + logger.info("Initialized") + + +async def main(command_or_url: str, args: list[str], env: list[tuple[str, str]]): + env_dict = dict(env) + + if urlparse(command_or_url).scheme in ("http", "https"): + # Use SSE client for HTTP(S) URLs + async with sse_client(command_or_url) as streams: + await run_session(*streams) + else: + # Use stdio client for commands + server_parameters = StdioServerParameters( + command=command_or_url, args=args, env=env_dict + ) + async with stdio_client(server_parameters) as streams: + await run_session(*streams) + + +def cli(): + parser = argparse.ArgumentParser() + parser.add_argument("command_or_url", help="Command or URL to connect to") + parser.add_argument("args", nargs="*", help="Additional arguments") + parser.add_argument( + "-e", + "--env", + nargs=2, + action="append", + metavar=("KEY", "VALUE"), + help="Environment variables to set. Can be used multiple times.", + default=[], + ) + + args = parser.parse_args() + anyio.run(partial(main, args.command_or_url, args.args, args.env), backend="trio") + + +if __name__ == "__main__": + cli() diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index c714c44bb..199e5ba53 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -403,4 +403,4 @@ async def _received_notification( case types.LoggingMessageNotification(params=params): await self._logging_callback(params) case _: - pass + pass \ No newline at end of file diff --git a/src/mcp/client/stdio/win32.py b/src/mcp/client/stdio/win32.py index 825a0477d..27ab74cb5 100644 --- a/src/mcp/client/stdio/win32.py +++ b/src/mcp/client/stdio/win32.py @@ -1,109 +1,109 @@ -""" -Windows-specific functionality for stdio client operations. -""" - -import shutil -import subprocess -import sys -from pathlib import Path -from typing import TextIO - -import anyio -from anyio.abc import Process - - -def get_windows_executable_command(command: str) -> str: - """ - Get the correct executable command normalized for Windows. - - On Windows, commands might exist with specific extensions (.exe, .cmd, etc.) - that need to be located for proper execution. - - Args: - command: Base command (e.g., 'uvx', 'npx') - - Returns: - str: Windows-appropriate command path - """ - try: - # First check if command exists in PATH as-is - if command_path := shutil.which(command): - return command_path - - # Check for Windows-specific extensions - for ext in [".cmd", ".bat", ".exe", ".ps1"]: - ext_version = f"{command}{ext}" - if ext_path := shutil.which(ext_version): - return ext_path - - # For regular commands or if we couldn't find special versions - return command - except OSError: - # Handle file system errors during path resolution - # (permissions, broken symlinks, etc.) - return command - - -async def create_windows_process( - command: str, - args: list[str], - env: dict[str, str] | None = None, - errlog: TextIO = sys.stderr, - cwd: Path | str | None = None, -): - """ - Creates a subprocess in a Windows-compatible way. - - Windows processes need special handling for console windows and - process creation flags. - - Args: - command: The command to execute - args: Command line arguments - env: Environment variables - errlog: Where to send stderr output - cwd: Working directory for the process - - Returns: - A process handle - """ - try: - # Try with Windows-specific flags to hide console window - process = await anyio.open_process( - [command, *args], - env=env, - # Ensure we don't create console windows for each process - creationflags=subprocess.CREATE_NO_WINDOW # type: ignore - if hasattr(subprocess, "CREATE_NO_WINDOW") - else 0, - stderr=errlog, - cwd=cwd, - ) - return process - except Exception: - # Don't raise, let's try to create the process without creation flags - process = await anyio.open_process( - [command, *args], env=env, stderr=errlog, cwd=cwd - ) - return process - - -async def terminate_windows_process(process: Process): - """ - Terminate a Windows process. - - Note: On Windows, terminating a process with process.terminate() doesn't - always guarantee immediate process termination. - So we give it 2s to exit, or we call process.kill() - which sends a SIGKILL equivalent signal. - - Args: - process: The process to terminate - """ - try: - process.terminate() - with anyio.fail_after(2.0): - await process.wait() - except TimeoutError: - # Force kill if it doesn't terminate - process.kill() +""" +Windows-specific functionality for stdio client operations. +""" + +import shutil +import subprocess +import sys +from pathlib import Path +from typing import TextIO + +import anyio +from anyio.abc import Process + + +def get_windows_executable_command(command: str) -> str: + """ + Get the correct executable command normalized for Windows. + + On Windows, commands might exist with specific extensions (.exe, .cmd, etc.) + that need to be located for proper execution. + + Args: + command: Base command (e.g., 'uvx', 'npx') + + Returns: + str: Windows-appropriate command path + """ + try: + # First check if command exists in PATH as-is + if command_path := shutil.which(command): + return command_path + + # Check for Windows-specific extensions + for ext in [".cmd", ".bat", ".exe", ".ps1"]: + ext_version = f"{command}{ext}" + if ext_path := shutil.which(ext_version): + return ext_path + + # For regular commands or if we couldn't find special versions + return command + except OSError: + # Handle file system errors during path resolution + # (permissions, broken symlinks, etc.) + return command + + +async def create_windows_process( + command: str, + args: list[str], + env: dict[str, str] | None = None, + errlog: TextIO = sys.stderr, + cwd: Path | str | None = None, +): + """ + Creates a subprocess in a Windows-compatible way. + + Windows processes need special handling for console windows and + process creation flags. + + Args: + command: The command to execute + args: Command line arguments + env: Environment variables + errlog: Where to send stderr output + cwd: Working directory for the process + + Returns: + A process handle + """ + try: + # Try with Windows-specific flags to hide console window + process = await anyio.open_process( + [command, *args], + env=env, + # Ensure we don't create console windows for each process + creationflags=subprocess.CREATE_NO_WINDOW # type: ignore + if hasattr(subprocess, "CREATE_NO_WINDOW") + else 0, + stderr=errlog, + cwd=cwd, + ) + return process + except Exception: + # Don't raise, let's try to create the process without creation flags + process = await anyio.open_process( + [command, *args], env=env, stderr=errlog, cwd=cwd + ) + return process + + +async def terminate_windows_process(process: Process): + """ + Terminate a Windows process. + + Note: On Windows, terminating a process with process.terminate() doesn't + always guarantee immediate process termination. + So we give it 2s to exit, or we call process.kill() + which sends a SIGKILL equivalent signal. + + Args: + process: The process to terminate + """ + try: + process.terminate() + with anyio.fail_after(2.0): + await process.wait() + except TimeoutError: + # Force kill if it doesn't terminate + process.kill() diff --git a/src/mcp/client/websocket.py b/src/mcp/client/websocket.py index ac542fb3f..0f7e0b62a 100644 --- a/src/mcp/client/websocket.py +++ b/src/mcp/client/websocket.py @@ -1,91 +1,91 @@ -import json -import logging -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager - -import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from pydantic import ValidationError -from websockets.asyncio.client import connect as ws_connect -from websockets.typing import Subprotocol - -import mcp.types as types -from mcp.shared.message import SessionMessage - -logger = logging.getLogger(__name__) - - -@asynccontextmanager -async def websocket_client( - url: str, -) -> AsyncGenerator[ - tuple[ - MemoryObjectReceiveStream[SessionMessage | Exception], - MemoryObjectSendStream[SessionMessage], - ], - None, -]: - """ - WebSocket client transport for MCP, symmetrical to the server version. - - Connects to 'url' using the 'mcp' subprotocol, then yields: - (read_stream, write_stream) - - - read_stream: As you read from this stream, you'll receive either valid - JSONRPCMessage objects or Exception objects (when validation fails). - - write_stream: Write JSONRPCMessage objects to this stream to send them - over the WebSocket to the server. - """ - - # Create two in-memory streams: - # - One for incoming messages (read_stream, written by ws_reader) - # - One for outgoing messages (write_stream, read by ws_writer) - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) - - # Connect using websockets, requesting the "mcp" subprotocol - async with ws_connect(url, subprotocols=[Subprotocol("mcp")]) as ws: - - async def ws_reader(): - """ - Reads text messages from the WebSocket, parses them as JSON-RPC messages, - and sends them into read_stream_writer. - """ - async with read_stream_writer: - async for raw_text in ws: - try: - message = types.JSONRPCMessage.model_validate_json(raw_text) - session_message = SessionMessage(message) - await read_stream_writer.send(session_message) - except ValidationError as exc: - # If JSON parse or model validation fails, send the exception - await read_stream_writer.send(exc) - - async def ws_writer(): - """ - Reads JSON-RPC messages from write_stream_reader and - sends them to the server. - """ - async with write_stream_reader: - async for session_message in write_stream_reader: - # Convert to a dict, then to JSON - msg_dict = session_message.message.model_dump( - by_alias=True, mode="json", exclude_none=True - ) - await ws.send(json.dumps(msg_dict)) - - async with anyio.create_task_group() as tg: - # Start reader and writer tasks - tg.start_soon(ws_reader) - tg.start_soon(ws_writer) - - # Yield the receive/send streams - yield (read_stream, write_stream) - - # Once the caller's 'async with' block exits, we shut down - tg.cancel_scope.cancel() +import json +import logging +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from pydantic import ValidationError +from websockets.asyncio.client import connect as ws_connect +from websockets.typing import Subprotocol + +import mcp.types as types +from mcp.shared.message import SessionMessage + +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def websocket_client( + url: str, +) -> AsyncGenerator[ + tuple[ + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], + ], + None, +]: + """ + WebSocket client transport for MCP, symmetrical to the server version. + + Connects to 'url' using the 'mcp' subprotocol, then yields: + (read_stream, write_stream) + + - read_stream: As you read from this stream, you'll receive either valid + JSONRPCMessage objects or Exception objects (when validation fails). + - write_stream: Write JSONRPCMessage objects to this stream to send them + over the WebSocket to the server. + """ + + # Create two in-memory streams: + # - One for incoming messages (read_stream, written by ws_reader) + # - One for outgoing messages (write_stream, read by ws_writer) + read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] + read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] + write_stream: MemoryObjectSendStream[SessionMessage] + write_stream_reader: MemoryObjectReceiveStream[SessionMessage] + + read_stream_writer, read_stream = anyio.create_memory_object_stream(0) + write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + + # Connect using websockets, requesting the "mcp" subprotocol + async with ws_connect(url, subprotocols=[Subprotocol("mcp")]) as ws: + + async def ws_reader(): + """ + Reads text messages from the WebSocket, parses them as JSON-RPC messages, + and sends them into read_stream_writer. + """ + async with read_stream_writer: + async for raw_text in ws: + try: + message = types.JSONRPCMessage.model_validate_json(raw_text) + session_message = SessionMessage(message) + await read_stream_writer.send(session_message) + except ValidationError as exc: + # If JSON parse or model validation fails, send the exception + await read_stream_writer.send(exc) + + async def ws_writer(): + """ + Reads JSON-RPC messages from write_stream_reader and + sends them to the server. + """ + async with write_stream_reader: + async for session_message in write_stream_reader: + # Convert to a dict, then to JSON + msg_dict = session_message.message.model_dump( + by_alias=True, mode="json", exclude_none=True + ) + await ws.send(json.dumps(msg_dict)) + + async with anyio.create_task_group() as tg: + # Start reader and writer tasks + tg.start_soon(ws_reader) + tg.start_soon(ws_writer) + + # Yield the receive/send streams + yield (read_stream, write_stream) + + # Once the caller's 'async with' block exits, we shut down + tg.cancel_scope.cancel() diff --git a/src/mcp/server/__init__.py b/src/mcp/server/__init__.py index 0feed368e..a16fc335f 100644 --- a/src/mcp/server/__init__.py +++ b/src/mcp/server/__init__.py @@ -1,5 +1,5 @@ -from .fastmcp import FastMCP -from .lowlevel import NotificationOptions, Server -from .models import InitializationOptions - -__all__ = ["Server", "FastMCP", "NotificationOptions", "InitializationOptions"] +from .fastmcp import FastMCP +from .lowlevel import NotificationOptions, Server +from .models import InitializationOptions + +__all__ = ["Server", "FastMCP", "NotificationOptions", "InitializationOptions"] diff --git a/src/mcp/server/__main__.py b/src/mcp/server/__main__.py index 1970eca7d..d0e9b7869 100644 --- a/src/mcp/server/__main__.py +++ b/src/mcp/server/__main__.py @@ -1,50 +1,50 @@ -import importlib.metadata -import logging -import sys - -import anyio - -from mcp.server.models import InitializationOptions -from mcp.server.session import ServerSession -from mcp.server.stdio import stdio_server -from mcp.types import ServerCapabilities - -if not sys.warnoptions: - import warnings - - warnings.simplefilter("ignore") - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger("server") - - -async def receive_loop(session: ServerSession): - logger.info("Starting receive loop") - async for message in session.incoming_messages: - if isinstance(message, Exception): - logger.error("Error: %s", message) - continue - - logger.info("Received message from client: %s", message) - - -async def main(): - version = importlib.metadata.version("mcp") - async with stdio_server() as (read_stream, write_stream): - async with ( - ServerSession( - read_stream, - write_stream, - InitializationOptions( - server_name="mcp", - server_version=version, - capabilities=ServerCapabilities(), - ), - ) as session, - write_stream, - ): - await receive_loop(session) - - -if __name__ == "__main__": - anyio.run(main, backend="trio") +import importlib.metadata +import logging +import sys + +import anyio + +from mcp.server.models import InitializationOptions +from mcp.server.session import ServerSession +from mcp.server.stdio import stdio_server +from mcp.types import ServerCapabilities + +if not sys.warnoptions: + import warnings + + warnings.simplefilter("ignore") + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("server") + + +async def receive_loop(session: ServerSession): + logger.info("Starting receive loop") + async for message in session.incoming_messages: + if isinstance(message, Exception): + logger.error("Error: %s", message) + continue + + logger.info("Received message from client: %s", message) + + +async def main(): + version = importlib.metadata.version("mcp") + async with stdio_server() as (read_stream, write_stream): + async with ( + ServerSession( + read_stream, + write_stream, + InitializationOptions( + server_name="mcp", + server_version=version, + capabilities=ServerCapabilities(), + ), + ) as session, + write_stream, + ): + await receive_loop(session) + + +if __name__ == "__main__": + anyio.run(main, backend="trio") diff --git a/src/mcp/server/auth/__init__.py b/src/mcp/server/auth/__init__.py index 6888ffe8d..10fbd1228 100644 --- a/src/mcp/server/auth/__init__.py +++ b/src/mcp/server/auth/__init__.py @@ -1,3 +1,3 @@ -""" -MCP OAuth server authorization components. -""" +""" +MCP OAuth server authorization components. +""" diff --git a/src/mcp/server/auth/errors.py b/src/mcp/server/auth/errors.py index 053c2fd2e..05041d2f8 100644 --- a/src/mcp/server/auth/errors.py +++ b/src/mcp/server/auth/errors.py @@ -1,8 +1,8 @@ -from pydantic import ValidationError - - -def stringify_pydantic_error(validation_error: ValidationError) -> str: - return "\n".join( - f"{'.'.join(str(loc) for loc in e['loc'])}: {e['msg']}" - for e in validation_error.errors() - ) +from pydantic import ValidationError + + +def stringify_pydantic_error(validation_error: ValidationError) -> str: + return "\n".join( + f"{'.'.join(str(loc) for loc in e['loc'])}: {e['msg']}" + for e in validation_error.errors() + ) diff --git a/src/mcp/server/auth/handlers/__init__.py b/src/mcp/server/auth/handlers/__init__.py index e99a62de1..9df7d1bfd 100644 --- a/src/mcp/server/auth/handlers/__init__.py +++ b/src/mcp/server/auth/handlers/__init__.py @@ -1,3 +1,3 @@ -""" -Request handlers for MCP authorization endpoints. -""" +""" +Request handlers for MCP authorization endpoints. +""" diff --git a/src/mcp/server/auth/handlers/authorize.py b/src/mcp/server/auth/handlers/authorize.py index 8f3768908..111383da5 100644 --- a/src/mcp/server/auth/handlers/authorize.py +++ b/src/mcp/server/auth/handlers/authorize.py @@ -1,244 +1,244 @@ -import logging -from dataclasses import dataclass -from typing import Any, Literal - -from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, RootModel, ValidationError -from starlette.datastructures import FormData, QueryParams -from starlette.requests import Request -from starlette.responses import RedirectResponse, Response - -from mcp.server.auth.errors import ( - stringify_pydantic_error, -) -from mcp.server.auth.json_response import PydanticJSONResponse -from mcp.server.auth.provider import ( - AuthorizationErrorCode, - AuthorizationParams, - AuthorizeError, - OAuthAuthorizationServerProvider, - construct_redirect_uri, -) -from mcp.shared.auth import ( - InvalidRedirectUriError, - InvalidScopeError, -) - -logger = logging.getLogger(__name__) - - -class AuthorizationRequest(BaseModel): - # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1 - client_id: str = Field(..., description="The client ID") - redirect_uri: AnyHttpUrl | None = Field( - None, description="URL to redirect to after authorization" - ) - - # see OAuthClientMetadata; we only support `code` - response_type: Literal["code"] = Field( - ..., description="Must be 'code' for authorization code flow" - ) - code_challenge: str = Field(..., description="PKCE code challenge") - code_challenge_method: Literal["S256"] = Field( - "S256", description="PKCE code challenge method, must be S256" - ) - state: str | None = Field(None, description="Optional state parameter") - scope: str | None = Field( - None, - description="Optional scope; if specified, should be " - "a space-separated list of scope strings", - ) - - -class AuthorizationErrorResponse(BaseModel): - error: AuthorizationErrorCode - error_description: str | None - error_uri: AnyUrl | None = None - # must be set if provided in the request - state: str | None = None - - -def best_effort_extract_string( - key: str, params: None | FormData | QueryParams -) -> str | None: - if params is None: - return None - value = params.get(key) - if isinstance(value, str): - return value - return None - - -class AnyHttpUrlModel(RootModel[AnyHttpUrl]): - root: AnyHttpUrl - - -@dataclass -class AuthorizationHandler: - provider: OAuthAuthorizationServerProvider[Any, Any, Any] - - async def handle(self, request: Request) -> Response: - # implements authorization requests for grant_type=code; - # see https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1 - - state = None - redirect_uri = None - client = None - params = None - - async def error_response( - error: AuthorizationErrorCode, - error_description: str | None, - attempt_load_client: bool = True, - ): - # Error responses take two different formats: - # 1. The request has a valid client ID & redirect_uri: we issue a redirect - # back to the redirect_uri with the error response fields as query - # parameters. This allows the client to be notified of the error. - # 2. Otherwise, we return an error response directly to the end user; - # we choose to do so in JSON, but this is left undefined in the - # specification. - # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.2.1 - # - # This logic is a bit awkward to handle, because the error might be thrown - # very early in request validation, before we've done the usual Pydantic - # validation, loaded the client, etc. To handle this, error_response() - # contains fallback logic which attempts to load the parameters directly - # from the request. - - nonlocal client, redirect_uri, state - if client is None and attempt_load_client: - # make last-ditch attempt to load the client - client_id = best_effort_extract_string("client_id", params) - client = client_id and await self.provider.get_client(client_id) - if redirect_uri is None and client: - # make last-ditch effort to load the redirect uri - try: - if params is not None and "redirect_uri" not in params: - raw_redirect_uri = None - else: - raw_redirect_uri = AnyHttpUrlModel.model_validate( - best_effort_extract_string("redirect_uri", params) - ).root - redirect_uri = client.validate_redirect_uri(raw_redirect_uri) - except (ValidationError, InvalidRedirectUriError): - # if the redirect URI is invalid, ignore it & just return the - # initial error - pass - - # the error response MUST contain the state specified by the client, if any - if state is None: - # make last-ditch effort to load state - state = best_effort_extract_string("state", params) - - error_resp = AuthorizationErrorResponse( - error=error, - error_description=error_description, - state=state, - ) - - if redirect_uri and client: - return RedirectResponse( - url=construct_redirect_uri( - str(redirect_uri), **error_resp.model_dump(exclude_none=True) - ), - status_code=302, - headers={"Cache-Control": "no-store"}, - ) - else: - return PydanticJSONResponse( - status_code=400, - content=error_resp, - headers={"Cache-Control": "no-store"}, - ) - - try: - # Parse request parameters - if request.method == "GET": - # Convert query_params to dict for pydantic validation - params = request.query_params - else: - # Parse form data for POST requests - params = await request.form() - - # Save state if it exists, even before validation - state = best_effort_extract_string("state", params) - - try: - auth_request = AuthorizationRequest.model_validate(params) - state = auth_request.state # Update with validated state - except ValidationError as validation_error: - error: AuthorizationErrorCode = "invalid_request" - for e in validation_error.errors(): - if e["loc"] == ("response_type",) and e["type"] == "literal_error": - error = "unsupported_response_type" - break - return await error_response( - error, stringify_pydantic_error(validation_error) - ) - - # Get client information - client = await self.provider.get_client( - auth_request.client_id, - ) - if not client: - # For client_id validation errors, return direct error (no redirect) - return await error_response( - error="invalid_request", - error_description=f"Client ID '{auth_request.client_id}' not found", - attempt_load_client=False, - ) - - # Validate redirect_uri against client's registered URIs - try: - redirect_uri = client.validate_redirect_uri(auth_request.redirect_uri) - except InvalidRedirectUriError as validation_error: - # For redirect_uri validation errors, return direct error (no redirect) - return await error_response( - error="invalid_request", - error_description=validation_error.message, - ) - - # Validate scope - for scope errors, we can redirect - try: - scopes = client.validate_scope(auth_request.scope) - except InvalidScopeError as validation_error: - # For scope errors, redirect with error parameters - return await error_response( - error="invalid_scope", - error_description=validation_error.message, - ) - - # Setup authorization parameters - auth_params = AuthorizationParams( - state=state, - scopes=scopes, - code_challenge=auth_request.code_challenge, - redirect_uri=redirect_uri, - redirect_uri_provided_explicitly=auth_request.redirect_uri is not None, - ) - - try: - # Let the provider pick the next URI to redirect to - return RedirectResponse( - url=await self.provider.authorize( - client, - auth_params, - ), - status_code=302, - headers={"Cache-Control": "no-store"}, - ) - except AuthorizeError as e: - # Handle authorization errors as defined in RFC 6749 Section 4.1.2.1 - return await error_response( - error=e.error, - error_description=e.error_description, - ) - - except Exception as validation_error: - # Catch-all for unexpected errors - logger.exception( - "Unexpected error in authorization_handler", exc_info=validation_error - ) - return await error_response( - error="server_error", error_description="An unexpected error occurred" - ) +import logging +from dataclasses import dataclass +from typing import Any, Literal + +from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, RootModel, ValidationError +from starlette.datastructures import FormData, QueryParams +from starlette.requests import Request +from starlette.responses import RedirectResponse, Response + +from mcp.server.auth.errors import ( + stringify_pydantic_error, +) +from mcp.server.auth.json_response import PydanticJSONResponse +from mcp.server.auth.provider import ( + AuthorizationErrorCode, + AuthorizationParams, + AuthorizeError, + OAuthAuthorizationServerProvider, + construct_redirect_uri, +) +from mcp.shared.auth import ( + InvalidRedirectUriError, + InvalidScopeError, +) + +logger = logging.getLogger(__name__) + + +class AuthorizationRequest(BaseModel): + # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1 + client_id: str = Field(..., description="The client ID") + redirect_uri: AnyHttpUrl | None = Field( + None, description="URL to redirect to after authorization" + ) + + # see OAuthClientMetadata; we only support `code` + response_type: Literal["code"] = Field( + ..., description="Must be 'code' for authorization code flow" + ) + code_challenge: str = Field(..., description="PKCE code challenge") + code_challenge_method: Literal["S256"] = Field( + "S256", description="PKCE code challenge method, must be S256" + ) + state: str | None = Field(None, description="Optional state parameter") + scope: str | None = Field( + None, + description="Optional scope; if specified, should be " + "a space-separated list of scope strings", + ) + + +class AuthorizationErrorResponse(BaseModel): + error: AuthorizationErrorCode + error_description: str | None + error_uri: AnyUrl | None = None + # must be set if provided in the request + state: str | None = None + + +def best_effort_extract_string( + key: str, params: None | FormData | QueryParams +) -> str | None: + if params is None: + return None + value = params.get(key) + if isinstance(value, str): + return value + return None + + +class AnyHttpUrlModel(RootModel[AnyHttpUrl]): + root: AnyHttpUrl + + +@dataclass +class AuthorizationHandler: + provider: OAuthAuthorizationServerProvider[Any, Any, Any] + + async def handle(self, request: Request) -> Response: + # implements authorization requests for grant_type=code; + # see https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1 + + state = None + redirect_uri = None + client = None + params = None + + async def error_response( + error: AuthorizationErrorCode, + error_description: str | None, + attempt_load_client: bool = True, + ): + # Error responses take two different formats: + # 1. The request has a valid client ID & redirect_uri: we issue a redirect + # back to the redirect_uri with the error response fields as query + # parameters. This allows the client to be notified of the error. + # 2. Otherwise, we return an error response directly to the end user; + # we choose to do so in JSON, but this is left undefined in the + # specification. + # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.2.1 + # + # This logic is a bit awkward to handle, because the error might be thrown + # very early in request validation, before we've done the usual Pydantic + # validation, loaded the client, etc. To handle this, error_response() + # contains fallback logic which attempts to load the parameters directly + # from the request. + + nonlocal client, redirect_uri, state + if client is None and attempt_load_client: + # make last-ditch attempt to load the client + client_id = best_effort_extract_string("client_id", params) + client = client_id and await self.provider.get_client(client_id) + if redirect_uri is None and client: + # make last-ditch effort to load the redirect uri + try: + if params is not None and "redirect_uri" not in params: + raw_redirect_uri = None + else: + raw_redirect_uri = AnyHttpUrlModel.model_validate( + best_effort_extract_string("redirect_uri", params) + ).root + redirect_uri = client.validate_redirect_uri(raw_redirect_uri) + except (ValidationError, InvalidRedirectUriError): + # if the redirect URI is invalid, ignore it & just return the + # initial error + pass + + # the error response MUST contain the state specified by the client, if any + if state is None: + # make last-ditch effort to load state + state = best_effort_extract_string("state", params) + + error_resp = AuthorizationErrorResponse( + error=error, + error_description=error_description, + state=state, + ) + + if redirect_uri and client: + return RedirectResponse( + url=construct_redirect_uri( + str(redirect_uri), **error_resp.model_dump(exclude_none=True) + ), + status_code=302, + headers={"Cache-Control": "no-store"}, + ) + else: + return PydanticJSONResponse( + status_code=400, + content=error_resp, + headers={"Cache-Control": "no-store"}, + ) + + try: + # Parse request parameters + if request.method == "GET": + # Convert query_params to dict for pydantic validation + params = request.query_params + else: + # Parse form data for POST requests + params = await request.form() + + # Save state if it exists, even before validation + state = best_effort_extract_string("state", params) + + try: + auth_request = AuthorizationRequest.model_validate(params) + state = auth_request.state # Update with validated state + except ValidationError as validation_error: + error: AuthorizationErrorCode = "invalid_request" + for e in validation_error.errors(): + if e["loc"] == ("response_type",) and e["type"] == "literal_error": + error = "unsupported_response_type" + break + return await error_response( + error, stringify_pydantic_error(validation_error) + ) + + # Get client information + client = await self.provider.get_client( + auth_request.client_id, + ) + if not client: + # For client_id validation errors, return direct error (no redirect) + return await error_response( + error="invalid_request", + error_description=f"Client ID '{auth_request.client_id}' not found", + attempt_load_client=False, + ) + + # Validate redirect_uri against client's registered URIs + try: + redirect_uri = client.validate_redirect_uri(auth_request.redirect_uri) + except InvalidRedirectUriError as validation_error: + # For redirect_uri validation errors, return direct error (no redirect) + return await error_response( + error="invalid_request", + error_description=validation_error.message, + ) + + # Validate scope - for scope errors, we can redirect + try: + scopes = client.validate_scope(auth_request.scope) + except InvalidScopeError as validation_error: + # For scope errors, redirect with error parameters + return await error_response( + error="invalid_scope", + error_description=validation_error.message, + ) + + # Setup authorization parameters + auth_params = AuthorizationParams( + state=state, + scopes=scopes, + code_challenge=auth_request.code_challenge, + redirect_uri=redirect_uri, + redirect_uri_provided_explicitly=auth_request.redirect_uri is not None, + ) + + try: + # Let the provider pick the next URI to redirect to + return RedirectResponse( + url=await self.provider.authorize( + client, + auth_params, + ), + status_code=302, + headers={"Cache-Control": "no-store"}, + ) + except AuthorizeError as e: + # Handle authorization errors as defined in RFC 6749 Section 4.1.2.1 + return await error_response( + error=e.error, + error_description=e.error_description, + ) + + except Exception as validation_error: + # Catch-all for unexpected errors + logger.exception( + "Unexpected error in authorization_handler", exc_info=validation_error + ) + return await error_response( + error="server_error", error_description="An unexpected error occurred" + ) diff --git a/src/mcp/server/auth/handlers/metadata.py b/src/mcp/server/auth/handlers/metadata.py index e37e5d311..37ccf0715 100644 --- a/src/mcp/server/auth/handlers/metadata.py +++ b/src/mcp/server/auth/handlers/metadata.py @@ -1,18 +1,18 @@ -from dataclasses import dataclass - -from starlette.requests import Request -from starlette.responses import Response - -from mcp.server.auth.json_response import PydanticJSONResponse -from mcp.shared.auth import OAuthMetadata - - -@dataclass -class MetadataHandler: - metadata: OAuthMetadata - - async def handle(self, request: Request) -> Response: - return PydanticJSONResponse( - content=self.metadata, - headers={"Cache-Control": "public, max-age=3600"}, # Cache for 1 hour - ) +from dataclasses import dataclass + +from starlette.requests import Request +from starlette.responses import Response + +from mcp.server.auth.json_response import PydanticJSONResponse +from mcp.shared.auth import OAuthMetadata + + +@dataclass +class MetadataHandler: + metadata: OAuthMetadata + + async def handle(self, request: Request) -> Response: + return PydanticJSONResponse( + content=self.metadata, + headers={"Cache-Control": "public, max-age=3600"}, # Cache for 1 hour + ) diff --git a/src/mcp/server/auth/handlers/register.py b/src/mcp/server/auth/handlers/register.py index 2e25c779a..1c3d5e337 100644 --- a/src/mcp/server/auth/handlers/register.py +++ b/src/mcp/server/auth/handlers/register.py @@ -1,129 +1,129 @@ -import secrets -import time -from dataclasses import dataclass -from typing import Any -from uuid import uuid4 - -from pydantic import BaseModel, RootModel, ValidationError -from starlette.requests import Request -from starlette.responses import Response - -from mcp.server.auth.errors import stringify_pydantic_error -from mcp.server.auth.json_response import PydanticJSONResponse -from mcp.server.auth.provider import ( - OAuthAuthorizationServerProvider, - RegistrationError, - RegistrationErrorCode, -) -from mcp.server.auth.settings import ClientRegistrationOptions -from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata - - -class RegistrationRequest(RootModel[OAuthClientMetadata]): - # this wrapper is a no-op; it's just to separate out the types exposed to the - # provider from what we use in the HTTP handler - root: OAuthClientMetadata - - -class RegistrationErrorResponse(BaseModel): - error: RegistrationErrorCode - error_description: str | None - - -@dataclass -class RegistrationHandler: - provider: OAuthAuthorizationServerProvider[Any, Any, Any] - options: ClientRegistrationOptions - - async def handle(self, request: Request) -> Response: - # Implements dynamic client registration as defined in https://datatracker.ietf.org/doc/html/rfc7591#section-3.1 - try: - # Parse request body as JSON - body = await request.json() - client_metadata = OAuthClientMetadata.model_validate(body) - - # Scope validation is handled below - except ValidationError as validation_error: - return PydanticJSONResponse( - content=RegistrationErrorResponse( - error="invalid_client_metadata", - error_description=stringify_pydantic_error(validation_error), - ), - status_code=400, - ) - - client_id = str(uuid4()) - client_secret = None - if client_metadata.token_endpoint_auth_method != "none": - # cryptographically secure random 32-byte hex string - client_secret = secrets.token_hex(32) - - if client_metadata.scope is None and self.options.default_scopes is not None: - client_metadata.scope = " ".join(self.options.default_scopes) - elif ( - client_metadata.scope is not None and self.options.valid_scopes is not None - ): - requested_scopes = set(client_metadata.scope.split()) - valid_scopes = set(self.options.valid_scopes) - if not requested_scopes.issubset(valid_scopes): - return PydanticJSONResponse( - content=RegistrationErrorResponse( - error="invalid_client_metadata", - error_description="Requested scopes are not valid: " - f"{', '.join(requested_scopes - valid_scopes)}", - ), - status_code=400, - ) - if set(client_metadata.grant_types) != {"authorization_code", "refresh_token"}: - return PydanticJSONResponse( - content=RegistrationErrorResponse( - error="invalid_client_metadata", - error_description="grant_types must be authorization_code " - "and refresh_token", - ), - status_code=400, - ) - - client_id_issued_at = int(time.time()) - client_secret_expires_at = ( - client_id_issued_at + self.options.client_secret_expiry_seconds - if self.options.client_secret_expiry_seconds is not None - else None - ) - - client_info = OAuthClientInformationFull( - client_id=client_id, - client_id_issued_at=client_id_issued_at, - client_secret=client_secret, - client_secret_expires_at=client_secret_expires_at, - # passthrough information from the client request - redirect_uris=client_metadata.redirect_uris, - token_endpoint_auth_method=client_metadata.token_endpoint_auth_method, - grant_types=client_metadata.grant_types, - response_types=client_metadata.response_types, - client_name=client_metadata.client_name, - client_uri=client_metadata.client_uri, - logo_uri=client_metadata.logo_uri, - scope=client_metadata.scope, - contacts=client_metadata.contacts, - tos_uri=client_metadata.tos_uri, - policy_uri=client_metadata.policy_uri, - jwks_uri=client_metadata.jwks_uri, - jwks=client_metadata.jwks, - software_id=client_metadata.software_id, - software_version=client_metadata.software_version, - ) - try: - # Register client - await self.provider.register_client(client_info) - - # Return client information - return PydanticJSONResponse(content=client_info, status_code=201) - except RegistrationError as e: - # Handle registration errors as defined in RFC 7591 Section 3.2.2 - return PydanticJSONResponse( - content=RegistrationErrorResponse( - error=e.error, error_description=e.error_description - ), - status_code=400, - ) +import secrets +import time +from dataclasses import dataclass +from typing import Any +from uuid import uuid4 + +from pydantic import BaseModel, RootModel, ValidationError +from starlette.requests import Request +from starlette.responses import Response + +from mcp.server.auth.errors import stringify_pydantic_error +from mcp.server.auth.json_response import PydanticJSONResponse +from mcp.server.auth.provider import ( + OAuthAuthorizationServerProvider, + RegistrationError, + RegistrationErrorCode, +) +from mcp.server.auth.settings import ClientRegistrationOptions +from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata + + +class RegistrationRequest(RootModel[OAuthClientMetadata]): + # this wrapper is a no-op; it's just to separate out the types exposed to the + # provider from what we use in the HTTP handler + root: OAuthClientMetadata + + +class RegistrationErrorResponse(BaseModel): + error: RegistrationErrorCode + error_description: str | None + + +@dataclass +class RegistrationHandler: + provider: OAuthAuthorizationServerProvider[Any, Any, Any] + options: ClientRegistrationOptions + + async def handle(self, request: Request) -> Response: + # Implements dynamic client registration as defined in https://datatracker.ietf.org/doc/html/rfc7591#section-3.1 + try: + # Parse request body as JSON + body = await request.json() + client_metadata = OAuthClientMetadata.model_validate(body) + + # Scope validation is handled below + except ValidationError as validation_error: + return PydanticJSONResponse( + content=RegistrationErrorResponse( + error="invalid_client_metadata", + error_description=stringify_pydantic_error(validation_error), + ), + status_code=400, + ) + + client_id = str(uuid4()) + client_secret = None + if client_metadata.token_endpoint_auth_method != "none": + # cryptographically secure random 32-byte hex string + client_secret = secrets.token_hex(32) + + if client_metadata.scope is None and self.options.default_scopes is not None: + client_metadata.scope = " ".join(self.options.default_scopes) + elif ( + client_metadata.scope is not None and self.options.valid_scopes is not None + ): + requested_scopes = set(client_metadata.scope.split()) + valid_scopes = set(self.options.valid_scopes) + if not requested_scopes.issubset(valid_scopes): + return PydanticJSONResponse( + content=RegistrationErrorResponse( + error="invalid_client_metadata", + error_description="Requested scopes are not valid: " + f"{', '.join(requested_scopes - valid_scopes)}", + ), + status_code=400, + ) + if set(client_metadata.grant_types) != {"authorization_code", "refresh_token"}: + return PydanticJSONResponse( + content=RegistrationErrorResponse( + error="invalid_client_metadata", + error_description="grant_types must be authorization_code " + "and refresh_token", + ), + status_code=400, + ) + + client_id_issued_at = int(time.time()) + client_secret_expires_at = ( + client_id_issued_at + self.options.client_secret_expiry_seconds + if self.options.client_secret_expiry_seconds is not None + else None + ) + + client_info = OAuthClientInformationFull( + client_id=client_id, + client_id_issued_at=client_id_issued_at, + client_secret=client_secret, + client_secret_expires_at=client_secret_expires_at, + # passthrough information from the client request + redirect_uris=client_metadata.redirect_uris, + token_endpoint_auth_method=client_metadata.token_endpoint_auth_method, + grant_types=client_metadata.grant_types, + response_types=client_metadata.response_types, + client_name=client_metadata.client_name, + client_uri=client_metadata.client_uri, + logo_uri=client_metadata.logo_uri, + scope=client_metadata.scope, + contacts=client_metadata.contacts, + tos_uri=client_metadata.tos_uri, + policy_uri=client_metadata.policy_uri, + jwks_uri=client_metadata.jwks_uri, + jwks=client_metadata.jwks, + software_id=client_metadata.software_id, + software_version=client_metadata.software_version, + ) + try: + # Register client + await self.provider.register_client(client_info) + + # Return client information + return PydanticJSONResponse(content=client_info, status_code=201) + except RegistrationError as e: + # Handle registration errors as defined in RFC 7591 Section 3.2.2 + return PydanticJSONResponse( + content=RegistrationErrorResponse( + error=e.error, error_description=e.error_description + ), + status_code=400, + ) diff --git a/src/mcp/server/auth/handlers/revoke.py b/src/mcp/server/auth/handlers/revoke.py index 43b4dded9..7e3461dbe 100644 --- a/src/mcp/server/auth/handlers/revoke.py +++ b/src/mcp/server/auth/handlers/revoke.py @@ -1,101 +1,101 @@ -from dataclasses import dataclass -from functools import partial -from typing import Any, Literal - -from pydantic import BaseModel, ValidationError -from starlette.requests import Request -from starlette.responses import Response - -from mcp.server.auth.errors import ( - stringify_pydantic_error, -) -from mcp.server.auth.json_response import PydanticJSONResponse -from mcp.server.auth.middleware.client_auth import ( - AuthenticationError, - ClientAuthenticator, -) -from mcp.server.auth.provider import ( - AccessToken, - OAuthAuthorizationServerProvider, - RefreshToken, -) - - -class RevocationRequest(BaseModel): - """ - # See https://datatracker.ietf.org/doc/html/rfc7009#section-2.1 - """ - - token: str - token_type_hint: Literal["access_token", "refresh_token"] | None = None - client_id: str - client_secret: str | None - - -class RevocationErrorResponse(BaseModel): - error: Literal["invalid_request", "unauthorized_client"] - error_description: str | None = None - - -@dataclass -class RevocationHandler: - provider: OAuthAuthorizationServerProvider[Any, Any, Any] - client_authenticator: ClientAuthenticator - - async def handle(self, request: Request) -> Response: - """ - Handler for the OAuth 2.0 Token Revocation endpoint. - """ - try: - form_data = await request.form() - revocation_request = RevocationRequest.model_validate(dict(form_data)) - except ValidationError as e: - return PydanticJSONResponse( - status_code=400, - content=RevocationErrorResponse( - error="invalid_request", - error_description=stringify_pydantic_error(e), - ), - ) - - # Authenticate client - try: - client = await self.client_authenticator.authenticate( - revocation_request.client_id, revocation_request.client_secret - ) - except AuthenticationError as e: - return PydanticJSONResponse( - status_code=401, - content=RevocationErrorResponse( - error="unauthorized_client", - error_description=e.message, - ), - ) - - loaders = [ - self.provider.load_access_token, - partial(self.provider.load_refresh_token, client), - ] - if revocation_request.token_type_hint == "refresh_token": - loaders = reversed(loaders) - - token: None | AccessToken | RefreshToken = None - for loader in loaders: - token = await loader(revocation_request.token) - if token is not None: - break - - # if token is not found, just return HTTP 200 per the RFC - if token and token.client_id == client.client_id: - # Revoke token; provider is not meant to be able to do validation - # at this point that would result in an error - await self.provider.revoke_token(token) - - # Return successful empty response - return Response( - status_code=200, - headers={ - "Cache-Control": "no-store", - "Pragma": "no-cache", - }, - ) +from dataclasses import dataclass +from functools import partial +from typing import Any, Literal + +from pydantic import BaseModel, ValidationError +from starlette.requests import Request +from starlette.responses import Response + +from mcp.server.auth.errors import ( + stringify_pydantic_error, +) +from mcp.server.auth.json_response import PydanticJSONResponse +from mcp.server.auth.middleware.client_auth import ( + AuthenticationError, + ClientAuthenticator, +) +from mcp.server.auth.provider import ( + AccessToken, + OAuthAuthorizationServerProvider, + RefreshToken, +) + + +class RevocationRequest(BaseModel): + """ + # See https://datatracker.ietf.org/doc/html/rfc7009#section-2.1 + """ + + token: str + token_type_hint: Literal["access_token", "refresh_token"] | None = None + client_id: str + client_secret: str | None + + +class RevocationErrorResponse(BaseModel): + error: Literal["invalid_request", "unauthorized_client"] + error_description: str | None = None + + +@dataclass +class RevocationHandler: + provider: OAuthAuthorizationServerProvider[Any, Any, Any] + client_authenticator: ClientAuthenticator + + async def handle(self, request: Request) -> Response: + """ + Handler for the OAuth 2.0 Token Revocation endpoint. + """ + try: + form_data = await request.form() + revocation_request = RevocationRequest.model_validate(dict(form_data)) + except ValidationError as e: + return PydanticJSONResponse( + status_code=400, + content=RevocationErrorResponse( + error="invalid_request", + error_description=stringify_pydantic_error(e), + ), + ) + + # Authenticate client + try: + client = await self.client_authenticator.authenticate( + revocation_request.client_id, revocation_request.client_secret + ) + except AuthenticationError as e: + return PydanticJSONResponse( + status_code=401, + content=RevocationErrorResponse( + error="unauthorized_client", + error_description=e.message, + ), + ) + + loaders = [ + self.provider.load_access_token, + partial(self.provider.load_refresh_token, client), + ] + if revocation_request.token_type_hint == "refresh_token": + loaders = reversed(loaders) + + token: None | AccessToken | RefreshToken = None + for loader in loaders: + token = await loader(revocation_request.token) + if token is not None: + break + + # if token is not found, just return HTTP 200 per the RFC + if token and token.client_id == client.client_id: + # Revoke token; provider is not meant to be able to do validation + # at this point that would result in an error + await self.provider.revoke_token(token) + + # Return successful empty response + return Response( + status_code=200, + headers={ + "Cache-Control": "no-store", + "Pragma": "no-cache", + }, + ) diff --git a/src/mcp/server/auth/handlers/token.py b/src/mcp/server/auth/handlers/token.py index 94a5c4de3..5d33589d4 100644 --- a/src/mcp/server/auth/handlers/token.py +++ b/src/mcp/server/auth/handlers/token.py @@ -1,264 +1,264 @@ -import base64 -import hashlib -import time -from dataclasses import dataclass -from typing import Annotated, Any, Literal - -from pydantic import AnyHttpUrl, BaseModel, Field, RootModel, ValidationError -from starlette.requests import Request - -from mcp.server.auth.errors import ( - stringify_pydantic_error, -) -from mcp.server.auth.json_response import PydanticJSONResponse -from mcp.server.auth.middleware.client_auth import ( - AuthenticationError, - ClientAuthenticator, -) -from mcp.server.auth.provider import ( - OAuthAuthorizationServerProvider, - TokenError, - TokenErrorCode, -) -from mcp.shared.auth import OAuthToken - - -class AuthorizationCodeRequest(BaseModel): - # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3 - grant_type: Literal["authorization_code"] - code: str = Field(..., description="The authorization code") - redirect_uri: AnyHttpUrl | None = Field( - None, description="Must be the same as redirect URI provided in /authorize" - ) - client_id: str - # we use the client_secret param, per https://datatracker.ietf.org/doc/html/rfc6749#section-2.3.1 - client_secret: str | None = None - # See https://datatracker.ietf.org/doc/html/rfc7636#section-4.5 - code_verifier: str = Field(..., description="PKCE code verifier") - - -class RefreshTokenRequest(BaseModel): - # See https://datatracker.ietf.org/doc/html/rfc6749#section-6 - grant_type: Literal["refresh_token"] - refresh_token: str = Field(..., description="The refresh token") - scope: str | None = Field(None, description="Optional scope parameter") - client_id: str - # we use the client_secret param, per https://datatracker.ietf.org/doc/html/rfc6749#section-2.3.1 - client_secret: str | None = None - - -class TokenRequest( - RootModel[ - Annotated[ - AuthorizationCodeRequest | RefreshTokenRequest, - Field(discriminator="grant_type"), - ] - ] -): - root: Annotated[ - AuthorizationCodeRequest | RefreshTokenRequest, - Field(discriminator="grant_type"), - ] - - -class TokenErrorResponse(BaseModel): - """ - See https://datatracker.ietf.org/doc/html/rfc6749#section-5.2 - """ - - error: TokenErrorCode - error_description: str | None = None - error_uri: AnyHttpUrl | None = None - - -class TokenSuccessResponse(RootModel[OAuthToken]): - # this is just a wrapper over OAuthToken; the only reason we do this - # is to have some separation between the HTTP response type, and the - # type returned by the provider - root: OAuthToken - - -@dataclass -class TokenHandler: - provider: OAuthAuthorizationServerProvider[Any, Any, Any] - client_authenticator: ClientAuthenticator - - def response(self, obj: TokenSuccessResponse | TokenErrorResponse): - status_code = 200 - if isinstance(obj, TokenErrorResponse): - status_code = 400 - - return PydanticJSONResponse( - content=obj, - status_code=status_code, - headers={ - "Cache-Control": "no-store", - "Pragma": "no-cache", - }, - ) - - async def handle(self, request: Request): - try: - form_data = await request.form() - token_request = TokenRequest.model_validate(dict(form_data)).root - except ValidationError as validation_error: - return self.response( - TokenErrorResponse( - error="invalid_request", - error_description=stringify_pydantic_error(validation_error), - ) - ) - - try: - client_info = await self.client_authenticator.authenticate( - client_id=token_request.client_id, - client_secret=token_request.client_secret, - ) - except AuthenticationError as e: - return self.response( - TokenErrorResponse( - error="unauthorized_client", - error_description=e.message, - ) - ) - - if token_request.grant_type not in client_info.grant_types: - return self.response( - TokenErrorResponse( - error="unsupported_grant_type", - error_description=( - f"Unsupported grant type (supported grant types are " - f"{client_info.grant_types})" - ), - ) - ) - - tokens: OAuthToken - - match token_request: - case AuthorizationCodeRequest(): - auth_code = await self.provider.load_authorization_code( - client_info, token_request.code - ) - if auth_code is None or auth_code.client_id != token_request.client_id: - # if code belongs to different client, pretend it doesn't exist - return self.response( - TokenErrorResponse( - error="invalid_grant", - error_description="authorization code does not exist", - ) - ) - - # make auth codes expire after a deadline - # see https://datatracker.ietf.org/doc/html/rfc6749#section-10.5 - if auth_code.expires_at < time.time(): - return self.response( - TokenErrorResponse( - error="invalid_grant", - error_description="authorization code has expired", - ) - ) - - # verify redirect_uri doesn't change between /authorize and /tokens - # see https://datatracker.ietf.org/doc/html/rfc6749#section-10.6 - if auth_code.redirect_uri_provided_explicitly: - authorize_request_redirect_uri = auth_code.redirect_uri - else: - authorize_request_redirect_uri = None - if token_request.redirect_uri != authorize_request_redirect_uri: - return self.response( - TokenErrorResponse( - error="invalid_request", - error_description=( - "redirect_uri did not match the one " - "used when creating auth code" - ), - ) - ) - - # Verify PKCE code verifier - sha256 = hashlib.sha256(token_request.code_verifier.encode()).digest() - hashed_code_verifier = ( - base64.urlsafe_b64encode(sha256).decode().rstrip("=") - ) - - if hashed_code_verifier != auth_code.code_challenge: - # see https://datatracker.ietf.org/doc/html/rfc7636#section-4.6 - return self.response( - TokenErrorResponse( - error="invalid_grant", - error_description="incorrect code_verifier", - ) - ) - - try: - # Exchange authorization code for tokens - tokens = await self.provider.exchange_authorization_code( - client_info, auth_code - ) - except TokenError as e: - return self.response( - TokenErrorResponse( - error=e.error, - error_description=e.error_description, - ) - ) - - case RefreshTokenRequest(): - refresh_token = await self.provider.load_refresh_token( - client_info, token_request.refresh_token - ) - if ( - refresh_token is None - or refresh_token.client_id != token_request.client_id - ): - # if token belongs to different client, pretend it doesn't exist - return self.response( - TokenErrorResponse( - error="invalid_grant", - error_description="refresh token does not exist", - ) - ) - - if refresh_token.expires_at and refresh_token.expires_at < time.time(): - # if the refresh token has expired, pretend it doesn't exist - return self.response( - TokenErrorResponse( - error="invalid_grant", - error_description="refresh token has expired", - ) - ) - - # Parse scopes if provided - scopes = ( - token_request.scope.split(" ") - if token_request.scope - else refresh_token.scopes - ) - - for scope in scopes: - if scope not in refresh_token.scopes: - return self.response( - TokenErrorResponse( - error="invalid_scope", - error_description=( - f"cannot request scope `{scope}` " - "not provided by refresh token" - ), - ) - ) - - try: - # Exchange refresh token for new tokens - tokens = await self.provider.exchange_refresh_token( - client_info, refresh_token, scopes - ) - except TokenError as e: - return self.response( - TokenErrorResponse( - error=e.error, - error_description=e.error_description, - ) - ) - - return self.response(TokenSuccessResponse(root=tokens)) +import base64 +import hashlib +import time +from dataclasses import dataclass +from typing import Annotated, Any, Literal + +from pydantic import AnyHttpUrl, BaseModel, Field, RootModel, ValidationError +from starlette.requests import Request + +from mcp.server.auth.errors import ( + stringify_pydantic_error, +) +from mcp.server.auth.json_response import PydanticJSONResponse +from mcp.server.auth.middleware.client_auth import ( + AuthenticationError, + ClientAuthenticator, +) +from mcp.server.auth.provider import ( + OAuthAuthorizationServerProvider, + TokenError, + TokenErrorCode, +) +from mcp.shared.auth import OAuthToken + + +class AuthorizationCodeRequest(BaseModel): + # See https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3 + grant_type: Literal["authorization_code"] + code: str = Field(..., description="The authorization code") + redirect_uri: AnyHttpUrl | None = Field( + None, description="Must be the same as redirect URI provided in /authorize" + ) + client_id: str + # we use the client_secret param, per https://datatracker.ietf.org/doc/html/rfc6749#section-2.3.1 + client_secret: str | None = None + # See https://datatracker.ietf.org/doc/html/rfc7636#section-4.5 + code_verifier: str = Field(..., description="PKCE code verifier") + + +class RefreshTokenRequest(BaseModel): + # See https://datatracker.ietf.org/doc/html/rfc6749#section-6 + grant_type: Literal["refresh_token"] + refresh_token: str = Field(..., description="The refresh token") + scope: str | None = Field(None, description="Optional scope parameter") + client_id: str + # we use the client_secret param, per https://datatracker.ietf.org/doc/html/rfc6749#section-2.3.1 + client_secret: str | None = None + + +class TokenRequest( + RootModel[ + Annotated[ + AuthorizationCodeRequest | RefreshTokenRequest, + Field(discriminator="grant_type"), + ] + ] +): + root: Annotated[ + AuthorizationCodeRequest | RefreshTokenRequest, + Field(discriminator="grant_type"), + ] + + +class TokenErrorResponse(BaseModel): + """ + See https://datatracker.ietf.org/doc/html/rfc6749#section-5.2 + """ + + error: TokenErrorCode + error_description: str | None = None + error_uri: AnyHttpUrl | None = None + + +class TokenSuccessResponse(RootModel[OAuthToken]): + # this is just a wrapper over OAuthToken; the only reason we do this + # is to have some separation between the HTTP response type, and the + # type returned by the provider + root: OAuthToken + + +@dataclass +class TokenHandler: + provider: OAuthAuthorizationServerProvider[Any, Any, Any] + client_authenticator: ClientAuthenticator + + def response(self, obj: TokenSuccessResponse | TokenErrorResponse): + status_code = 200 + if isinstance(obj, TokenErrorResponse): + status_code = 400 + + return PydanticJSONResponse( + content=obj, + status_code=status_code, + headers={ + "Cache-Control": "no-store", + "Pragma": "no-cache", + }, + ) + + async def handle(self, request: Request): + try: + form_data = await request.form() + token_request = TokenRequest.model_validate(dict(form_data)).root + except ValidationError as validation_error: + return self.response( + TokenErrorResponse( + error="invalid_request", + error_description=stringify_pydantic_error(validation_error), + ) + ) + + try: + client_info = await self.client_authenticator.authenticate( + client_id=token_request.client_id, + client_secret=token_request.client_secret, + ) + except AuthenticationError as e: + return self.response( + TokenErrorResponse( + error="unauthorized_client", + error_description=e.message, + ) + ) + + if token_request.grant_type not in client_info.grant_types: + return self.response( + TokenErrorResponse( + error="unsupported_grant_type", + error_description=( + f"Unsupported grant type (supported grant types are " + f"{client_info.grant_types})" + ), + ) + ) + + tokens: OAuthToken + + match token_request: + case AuthorizationCodeRequest(): + auth_code = await self.provider.load_authorization_code( + client_info, token_request.code + ) + if auth_code is None or auth_code.client_id != token_request.client_id: + # if code belongs to different client, pretend it doesn't exist + return self.response( + TokenErrorResponse( + error="invalid_grant", + error_description="authorization code does not exist", + ) + ) + + # make auth codes expire after a deadline + # see https://datatracker.ietf.org/doc/html/rfc6749#section-10.5 + if auth_code.expires_at < time.time(): + return self.response( + TokenErrorResponse( + error="invalid_grant", + error_description="authorization code has expired", + ) + ) + + # verify redirect_uri doesn't change between /authorize and /tokens + # see https://datatracker.ietf.org/doc/html/rfc6749#section-10.6 + if auth_code.redirect_uri_provided_explicitly: + authorize_request_redirect_uri = auth_code.redirect_uri + else: + authorize_request_redirect_uri = None + if token_request.redirect_uri != authorize_request_redirect_uri: + return self.response( + TokenErrorResponse( + error="invalid_request", + error_description=( + "redirect_uri did not match the one " + "used when creating auth code" + ), + ) + ) + + # Verify PKCE code verifier + sha256 = hashlib.sha256(token_request.code_verifier.encode()).digest() + hashed_code_verifier = ( + base64.urlsafe_b64encode(sha256).decode().rstrip("=") + ) + + if hashed_code_verifier != auth_code.code_challenge: + # see https://datatracker.ietf.org/doc/html/rfc7636#section-4.6 + return self.response( + TokenErrorResponse( + error="invalid_grant", + error_description="incorrect code_verifier", + ) + ) + + try: + # Exchange authorization code for tokens + tokens = await self.provider.exchange_authorization_code( + client_info, auth_code + ) + except TokenError as e: + return self.response( + TokenErrorResponse( + error=e.error, + error_description=e.error_description, + ) + ) + + case RefreshTokenRequest(): + refresh_token = await self.provider.load_refresh_token( + client_info, token_request.refresh_token + ) + if ( + refresh_token is None + or refresh_token.client_id != token_request.client_id + ): + # if token belongs to different client, pretend it doesn't exist + return self.response( + TokenErrorResponse( + error="invalid_grant", + error_description="refresh token does not exist", + ) + ) + + if refresh_token.expires_at and refresh_token.expires_at < time.time(): + # if the refresh token has expired, pretend it doesn't exist + return self.response( + TokenErrorResponse( + error="invalid_grant", + error_description="refresh token has expired", + ) + ) + + # Parse scopes if provided + scopes = ( + token_request.scope.split(" ") + if token_request.scope + else refresh_token.scopes + ) + + for scope in scopes: + if scope not in refresh_token.scopes: + return self.response( + TokenErrorResponse( + error="invalid_scope", + error_description=( + f"cannot request scope `{scope}` " + "not provided by refresh token" + ), + ) + ) + + try: + # Exchange refresh token for new tokens + tokens = await self.provider.exchange_refresh_token( + client_info, refresh_token, scopes + ) + except TokenError as e: + return self.response( + TokenErrorResponse( + error=e.error, + error_description=e.error_description, + ) + ) + + return self.response(TokenSuccessResponse(root=tokens)) diff --git a/src/mcp/server/auth/json_response.py b/src/mcp/server/auth/json_response.py index bd95bd693..955698c7f 100644 --- a/src/mcp/server/auth/json_response.py +++ b/src/mcp/server/auth/json_response.py @@ -1,10 +1,10 @@ -from typing import Any - -from starlette.responses import JSONResponse - - -class PydanticJSONResponse(JSONResponse): - # use pydantic json serialization instead of the stock `json.dumps`, - # so that we can handle serializing pydantic models like AnyHttpUrl - def render(self, content: Any) -> bytes: - return content.model_dump_json(exclude_none=True).encode("utf-8") +from typing import Any + +from starlette.responses import JSONResponse + + +class PydanticJSONResponse(JSONResponse): + # use pydantic json serialization instead of the stock `json.dumps`, + # so that we can handle serializing pydantic models like AnyHttpUrl + def render(self, content: Any) -> bytes: + return content.model_dump_json(exclude_none=True).encode("utf-8") diff --git a/src/mcp/server/auth/manager.py b/src/mcp/server/auth/manager.py new file mode 100644 index 000000000..56b20a628 --- /dev/null +++ b/src/mcp/server/auth/manager.py @@ -0,0 +1,77 @@ +from datetime import datetime, timedelta, timezone +from typing import Any + +from jose import jwt +from jose.exceptions import ExpiredSignatureError, JWTClaimsError, JWTError + + +class AuthorizationManager: + """ + Manages token generation, validation, and error handling for authorization + in the MCP Python SDK. + + """ + + def __init__(self, secret_key: str, issuer: str, audience: str) -> None: + """ + Initializes the AuthorizationManager with the required configurations. + """ + self.secret_key = secret_key + self.issuer = issuer + self.audience = audience + + def generate_token(self, payload: dict[str, Any], expires_in: int = 3600) -> str: + """ + Generates a JWT token with the given payload. + """ + expiration = datetime.now(timezone.utc) + timedelta(seconds=expires_in) + payload.update({ + "exp": expiration, + "iss": self.issuer, + "aud": self.audience # Use audience as a single string consistently + }) + + try: + token = jwt.encode(payload, self.secret_key, algorithm="HS256") + return token + except JWTError as e: + raise ValueError(f"Token generation failed: {e}") + + def validate_token(self, token: str) -> dict[str, Any] | None: + """ + Validates the given JWT token and returns the claims if valid. + """ + try: + # Decode with strict audience and issuer checks + decoded_token = jwt.decode( + token, + self.secret_key, + algorithms=["HS256"], + audience=self.audience, + issuer=self.issuer, + options={"verify_signature": True} + ) + if decoded_token.get("aud") != self.audience: + print("Invalid audience.") + return None + if decoded_token.get("iss") != self.issuer: + print("Invalid issuer.") + return None + return decoded_token + except ExpiredSignatureError: + print("Token has expired.") + return None + except JWTClaimsError as e: + print(f"Invalid claims: {e}") + return None + except JWTError as e: + print(f"Invalid token: {e}") + return None + + def get_claim(self, token: str, claim_key: str) -> Any: + """ + Extracts a specific claim from the validated token. + """ + claims = self.validate_token(token) + return claims.get(claim_key) if claims else None + diff --git a/src/mcp/server/auth/middleware/__init__.py b/src/mcp/server/auth/middleware/__init__.py index ba3ff63c3..1fb0ee521 100644 --- a/src/mcp/server/auth/middleware/__init__.py +++ b/src/mcp/server/auth/middleware/__init__.py @@ -1,3 +1,3 @@ -""" -Middleware for MCP authorization. -""" +""" +Middleware for MCP authorization. +""" diff --git a/src/mcp/server/auth/middleware/auth_context.py b/src/mcp/server/auth/middleware/auth_context.py index 1073c07ad..a562f7ec4 100644 --- a/src/mcp/server/auth/middleware/auth_context.py +++ b/src/mcp/server/auth/middleware/auth_context.py @@ -1,50 +1,50 @@ -import contextvars - -from starlette.types import ASGIApp, Receive, Scope, Send - -from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser -from mcp.server.auth.provider import AccessToken - -# Create a contextvar to store the authenticated user -# The default is None, indicating no authenticated user is present -auth_context_var = contextvars.ContextVar[AuthenticatedUser | None]( - "auth_context", default=None -) - - -def get_access_token() -> AccessToken | None: - """ - Get the access token from the current context. - - Returns: - The access token if an authenticated user is available, None otherwise. - """ - auth_user = auth_context_var.get() - return auth_user.access_token if auth_user else None - - -class AuthContextMiddleware: - """ - Middleware that extracts the authenticated user from the request - and sets it in a contextvar for easy access throughout the request lifecycle. - - This middleware should be added after the AuthenticationMiddleware in the - middleware stack to ensure that the user is properly authenticated before - being stored in the context. - """ - - def __init__(self, app: ASGIApp): - self.app = app - - async def __call__(self, scope: Scope, receive: Receive, send: Send): - user = scope.get("user") - if isinstance(user, AuthenticatedUser): - # Set the authenticated user in the contextvar - token = auth_context_var.set(user) - try: - await self.app(scope, receive, send) - finally: - auth_context_var.reset(token) - else: - # No authenticated user, just process the request - await self.app(scope, receive, send) +import contextvars + +from starlette.types import ASGIApp, Receive, Scope, Send + +from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser +from mcp.server.auth.provider import AccessToken + +# Create a contextvar to store the authenticated user +# The default is None, indicating no authenticated user is present +auth_context_var = contextvars.ContextVar[AuthenticatedUser | None]( + "auth_context", default=None +) + + +def get_access_token() -> AccessToken | None: + """ + Get the access token from the current context. + + Returns: + The access token if an authenticated user is available, None otherwise. + """ + auth_user = auth_context_var.get() + return auth_user.access_token if auth_user else None + + +class AuthContextMiddleware: + """ + Middleware that extracts the authenticated user from the request + and sets it in a contextvar for easy access throughout the request lifecycle. + + This middleware should be added after the AuthenticationMiddleware in the + middleware stack to ensure that the user is properly authenticated before + being stored in the context. + """ + + def __init__(self, app: ASGIApp): + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send): + user = scope.get("user") + if isinstance(user, AuthenticatedUser): + # Set the authenticated user in the contextvar + token = auth_context_var.set(user) + try: + await self.app(scope, receive, send) + finally: + auth_context_var.reset(token) + else: + # No authenticated user, just process the request + await self.app(scope, receive, send) diff --git a/src/mcp/server/auth/middleware/client_auth.py b/src/mcp/server/auth/middleware/client_auth.py index 37f7f5066..4c1139809 100644 --- a/src/mcp/server/auth/middleware/client_auth.py +++ b/src/mcp/server/auth/middleware/client_auth.py @@ -1,56 +1,56 @@ -import time -from typing import Any - -from mcp.server.auth.provider import OAuthAuthorizationServerProvider -from mcp.shared.auth import OAuthClientInformationFull - - -class AuthenticationError(Exception): - def __init__(self, message: str): - self.message = message - - -class ClientAuthenticator: - """ - ClientAuthenticator is a callable which validates requests from a client - application, used to verify /token calls. - If, during registration, the client requested to be issued a secret, the - authenticator asserts that /token calls must be authenticated with - that same token. - NOTE: clients can opt for no authentication during registration, in which case this - logic is skipped. - """ - - def __init__(self, provider: OAuthAuthorizationServerProvider[Any, Any, Any]): - """ - Initialize the dependency. - - Args: - provider: Provider to look up client information - """ - self.provider = provider - - async def authenticate( - self, client_id: str, client_secret: str | None - ) -> OAuthClientInformationFull: - # Look up client information - client = await self.provider.get_client(client_id) - if not client: - raise AuthenticationError("Invalid client_id") - - # If client from the store expects a secret, validate that the request provides - # that secret - if client.client_secret: - if not client_secret: - raise AuthenticationError("Client secret is required") - - if client.client_secret != client_secret: - raise AuthenticationError("Invalid client_secret") - - if ( - client.client_secret_expires_at - and client.client_secret_expires_at < int(time.time()) - ): - raise AuthenticationError("Client secret has expired") - - return client +import time +from typing import Any + +from mcp.server.auth.provider import OAuthAuthorizationServerProvider +from mcp.shared.auth import OAuthClientInformationFull + + +class AuthenticationError(Exception): + def __init__(self, message: str): + self.message = message + + +class ClientAuthenticator: + """ + ClientAuthenticator is a callable which validates requests from a client + application, used to verify /token calls. + If, during registration, the client requested to be issued a secret, the + authenticator asserts that /token calls must be authenticated with + that same token. + NOTE: clients can opt for no authentication during registration, in which case this + logic is skipped. + """ + + def __init__(self, provider: OAuthAuthorizationServerProvider[Any, Any, Any]): + """ + Initialize the dependency. + + Args: + provider: Provider to look up client information + """ + self.provider = provider + + async def authenticate( + self, client_id: str, client_secret: str | None + ) -> OAuthClientInformationFull: + # Look up client information + client = await self.provider.get_client(client_id) + if not client: + raise AuthenticationError("Invalid client_id") + + # If client from the store expects a secret, validate that the request provides + # that secret + if client.client_secret: + if not client_secret: + raise AuthenticationError("Client secret is required") + + if client.client_secret != client_secret: + raise AuthenticationError("Invalid client_secret") + + if ( + client.client_secret_expires_at + and client.client_secret_expires_at < int(time.time()) + ): + raise AuthenticationError("Client secret has expired") + + return client diff --git a/src/mcp/server/auth/provider.py b/src/mcp/server/auth/provider.py index be1ac1dbc..357fa789a 100644 --- a/src/mcp/server/auth/provider.py +++ b/src/mcp/server/auth/provider.py @@ -1,289 +1,289 @@ -from dataclasses import dataclass -from typing import Generic, Literal, Protocol, TypeVar -from urllib.parse import parse_qs, urlencode, urlparse, urlunparse - -from pydantic import AnyHttpUrl, BaseModel - -from mcp.shared.auth import ( - OAuthClientInformationFull, - OAuthToken, -) - - -class AuthorizationParams(BaseModel): - state: str | None - scopes: list[str] | None - code_challenge: str - redirect_uri: AnyHttpUrl - redirect_uri_provided_explicitly: bool - - -class AuthorizationCode(BaseModel): - code: str - scopes: list[str] - expires_at: float - client_id: str - code_challenge: str - redirect_uri: AnyHttpUrl - redirect_uri_provided_explicitly: bool - - -class RefreshToken(BaseModel): - token: str - client_id: str - scopes: list[str] - expires_at: int | None = None - - -class AccessToken(BaseModel): - token: str - client_id: str - scopes: list[str] - expires_at: int | None = None - - -RegistrationErrorCode = Literal[ - "invalid_redirect_uri", - "invalid_client_metadata", - "invalid_software_statement", - "unapproved_software_statement", -] - - -@dataclass(frozen=True) -class RegistrationError(Exception): - error: RegistrationErrorCode - error_description: str | None = None - - -AuthorizationErrorCode = Literal[ - "invalid_request", - "unauthorized_client", - "access_denied", - "unsupported_response_type", - "invalid_scope", - "server_error", - "temporarily_unavailable", -] - - -@dataclass(frozen=True) -class AuthorizeError(Exception): - error: AuthorizationErrorCode - error_description: str | None = None - - -TokenErrorCode = Literal[ - "invalid_request", - "invalid_client", - "invalid_grant", - "unauthorized_client", - "unsupported_grant_type", - "invalid_scope", -] - - -@dataclass(frozen=True) -class TokenError(Exception): - error: TokenErrorCode - error_description: str | None = None - - -# NOTE: FastMCP doesn't render any of these types in the user response, so it's -# OK to add fields to subclasses which should not be exposed externally. -AuthorizationCodeT = TypeVar("AuthorizationCodeT", bound=AuthorizationCode) -RefreshTokenT = TypeVar("RefreshTokenT", bound=RefreshToken) -AccessTokenT = TypeVar("AccessTokenT", bound=AccessToken) - - -class OAuthAuthorizationServerProvider( - Protocol, Generic[AuthorizationCodeT, RefreshTokenT, AccessTokenT] -): - async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: - """ - Retrieves client information by client ID. - - Implementors MAY raise NotImplementedError if dynamic client registration is - disabled in ClientRegistrationOptions. - - Args: - client_id: The ID of the client to retrieve. - - Returns: - The client information, or None if the client does not exist. - """ - ... - - async def register_client(self, client_info: OAuthClientInformationFull) -> None: - """ - Saves client information as part of registering it. - - Implementors MAY raise NotImplementedError if dynamic client registration is - disabled in ClientRegistrationOptions. - - Args: - client_info: The client metadata to register. - - Raises: - RegistrationError: If the client metadata is invalid. - """ - ... - - async def authorize( - self, client: OAuthClientInformationFull, params: AuthorizationParams - ) -> str: - """ - Called as part of the /authorize endpoint, and returns a URL that the client - will be redirected to. - Many MCP implementations will redirect to a third-party provider to perform - a second OAuth exchange with that provider. In this sort of setup, the client - has an OAuth connection with the MCP server, and the MCP server has an OAuth - connection with the 3rd-party provider. At the end of this flow, the client - should be redirected to the redirect_uri from params.redirect_uri. - - +--------+ +------------+ +-------------------+ - | | | | | | - | Client | --> | MCP Server | --> | 3rd Party OAuth | - | | | | | Server | - +--------+ +------------+ +-------------------+ - | ^ | - +------------+ | | | - | | | | Redirect | - |redirect_uri|<-----+ +------------------+ - | | - +------------+ - - Implementations will need to define another handler on the MCP server return - flow to perform the second redirect, and generate and store an authorization - code as part of completing the OAuth authorization step. - - Implementations SHOULD generate an authorization code with at least 160 bits of - entropy, - and MUST generate an authorization code with at least 128 bits of entropy. - See https://datatracker.ietf.org/doc/html/rfc6749#section-10.10. - - Args: - client: The client requesting authorization. - params: The parameters of the authorization request. - - Returns: - A URL to redirect the client to for authorization. - - Raises: - AuthorizeError: If the authorization request is invalid. - """ - ... - - async def load_authorization_code( - self, client: OAuthClientInformationFull, authorization_code: str - ) -> AuthorizationCodeT | None: - """ - Loads an AuthorizationCode by its code. - - Args: - client: The client that requested the authorization code. - authorization_code: The authorization code to get the challenge for. - - Returns: - The AuthorizationCode, or None if not found - """ - ... - - async def exchange_authorization_code( - self, client: OAuthClientInformationFull, authorization_code: AuthorizationCodeT - ) -> OAuthToken: - """ - Exchanges an authorization code for an access token and refresh token. - - Args: - client: The client exchanging the authorization code. - authorization_code: The authorization code to exchange. - - Returns: - The OAuth token, containing access and refresh tokens. - - Raises: - TokenError: If the request is invalid - """ - ... - - async def load_refresh_token( - self, client: OAuthClientInformationFull, refresh_token: str - ) -> RefreshTokenT | None: - """ - Loads a RefreshToken by its token string. - - Args: - client: The client that is requesting to load the refresh token. - refresh_token: The refresh token string to load. - - Returns: - The RefreshToken object if found, or None if not found. - """ - - ... - - async def exchange_refresh_token( - self, - client: OAuthClientInformationFull, - refresh_token: RefreshTokenT, - scopes: list[str], - ) -> OAuthToken: - """ - Exchanges a refresh token for an access token and refresh token. - - Implementations SHOULD rotate both the access token and refresh token. - - Args: - client: The client exchanging the refresh token. - refresh_token: The refresh token to exchange. - scopes: Optional scopes to request with the new access token. - - Returns: - The OAuth token, containing access and refresh tokens. - - Raises: - TokenError: If the request is invalid - """ - ... - - async def load_access_token(self, token: str) -> AccessTokenT | None: - """ - Loads an access token by its token. - - Args: - token: The access token to verify. - - Returns: - The AuthInfo, or None if the token is invalid. - """ - ... - - async def revoke_token( - self, - token: AccessTokenT | RefreshTokenT, - ) -> None: - """ - Revokes an access or refresh token. - - If the given token is invalid or already revoked, this method should do nothing. - - Implementations SHOULD revoke both the access token and its corresponding - refresh token, regardless of which of the access token or refresh token is - provided. - - Args: - token: the token to revoke - """ - ... - - -def construct_redirect_uri(redirect_uri_base: str, **params: str | None) -> str: - parsed_uri = urlparse(redirect_uri_base) - query_params = [(k, v) for k, vs in parse_qs(parsed_uri.query) for v in vs] - for k, v in params.items(): - if v is not None: - query_params.append((k, v)) - - redirect_uri = urlunparse(parsed_uri._replace(query=urlencode(query_params))) - return redirect_uri +from dataclasses import dataclass +from typing import Generic, Literal, Protocol, TypeVar +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse + +from pydantic import AnyHttpUrl, BaseModel + +from mcp.shared.auth import ( + OAuthClientInformationFull, + OAuthToken, +) + + +class AuthorizationParams(BaseModel): + state: str | None + scopes: list[str] | None + code_challenge: str + redirect_uri: AnyHttpUrl + redirect_uri_provided_explicitly: bool + + +class AuthorizationCode(BaseModel): + code: str + scopes: list[str] + expires_at: float + client_id: str + code_challenge: str + redirect_uri: AnyHttpUrl + redirect_uri_provided_explicitly: bool + + +class RefreshToken(BaseModel): + token: str + client_id: str + scopes: list[str] + expires_at: int | None = None + + +class AccessToken(BaseModel): + token: str + client_id: str + scopes: list[str] + expires_at: int | None = None + + +RegistrationErrorCode = Literal[ + "invalid_redirect_uri", + "invalid_client_metadata", + "invalid_software_statement", + "unapproved_software_statement", +] + + +@dataclass(frozen=True) +class RegistrationError(Exception): + error: RegistrationErrorCode + error_description: str | None = None + + +AuthorizationErrorCode = Literal[ + "invalid_request", + "unauthorized_client", + "access_denied", + "unsupported_response_type", + "invalid_scope", + "server_error", + "temporarily_unavailable", +] + + +@dataclass(frozen=True) +class AuthorizeError(Exception): + error: AuthorizationErrorCode + error_description: str | None = None + + +TokenErrorCode = Literal[ + "invalid_request", + "invalid_client", + "invalid_grant", + "unauthorized_client", + "unsupported_grant_type", + "invalid_scope", +] + + +@dataclass(frozen=True) +class TokenError(Exception): + error: TokenErrorCode + error_description: str | None = None + + +# NOTE: FastMCP doesn't render any of these types in the user response, so it's +# OK to add fields to subclasses which should not be exposed externally. +AuthorizationCodeT = TypeVar("AuthorizationCodeT", bound=AuthorizationCode) +RefreshTokenT = TypeVar("RefreshTokenT", bound=RefreshToken) +AccessTokenT = TypeVar("AccessTokenT", bound=AccessToken) + + +class OAuthAuthorizationServerProvider( + Protocol, Generic[AuthorizationCodeT, RefreshTokenT, AccessTokenT] +): + async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: + """ + Retrieves client information by client ID. + + Implementors MAY raise NotImplementedError if dynamic client registration is + disabled in ClientRegistrationOptions. + + Args: + client_id: The ID of the client to retrieve. + + Returns: + The client information, or None if the client does not exist. + """ + ... + + async def register_client(self, client_info: OAuthClientInformationFull) -> None: + """ + Saves client information as part of registering it. + + Implementors MAY raise NotImplementedError if dynamic client registration is + disabled in ClientRegistrationOptions. + + Args: + client_info: The client metadata to register. + + Raises: + RegistrationError: If the client metadata is invalid. + """ + ... + + async def authorize( + self, client: OAuthClientInformationFull, params: AuthorizationParams + ) -> str: + """ + Called as part of the /authorize endpoint, and returns a URL that the client + will be redirected to. + Many MCP implementations will redirect to a third-party provider to perform + a second OAuth exchange with that provider. In this sort of setup, the client + has an OAuth connection with the MCP server, and the MCP server has an OAuth + connection with the 3rd-party provider. At the end of this flow, the client + should be redirected to the redirect_uri from params.redirect_uri. + + +--------+ +------------+ +-------------------+ + | | | | | | + | Client | --> | MCP Server | --> | 3rd Party OAuth | + | | | | | Server | + +--------+ +------------+ +-------------------+ + | ^ | + +------------+ | | | + | | | | Redirect | + |redirect_uri|<-----+ +------------------+ + | | + +------------+ + + Implementations will need to define another handler on the MCP server return + flow to perform the second redirect, and generate and store an authorization + code as part of completing the OAuth authorization step. + + Implementations SHOULD generate an authorization code with at least 160 bits of + entropy, + and MUST generate an authorization code with at least 128 bits of entropy. + See https://datatracker.ietf.org/doc/html/rfc6749#section-10.10. + + Args: + client: The client requesting authorization. + params: The parameters of the authorization request. + + Returns: + A URL to redirect the client to for authorization. + + Raises: + AuthorizeError: If the authorization request is invalid. + """ + ... + + async def load_authorization_code( + self, client: OAuthClientInformationFull, authorization_code: str + ) -> AuthorizationCodeT | None: + """ + Loads an AuthorizationCode by its code. + + Args: + client: The client that requested the authorization code. + authorization_code: The authorization code to get the challenge for. + + Returns: + The AuthorizationCode, or None if not found + """ + ... + + async def exchange_authorization_code( + self, client: OAuthClientInformationFull, authorization_code: AuthorizationCodeT + ) -> OAuthToken: + """ + Exchanges an authorization code for an access token and refresh token. + + Args: + client: The client exchanging the authorization code. + authorization_code: The authorization code to exchange. + + Returns: + The OAuth token, containing access and refresh tokens. + + Raises: + TokenError: If the request is invalid + """ + ... + + async def load_refresh_token( + self, client: OAuthClientInformationFull, refresh_token: str + ) -> RefreshTokenT | None: + """ + Loads a RefreshToken by its token string. + + Args: + client: The client that is requesting to load the refresh token. + refresh_token: The refresh token string to load. + + Returns: + The RefreshToken object if found, or None if not found. + """ + + ... + + async def exchange_refresh_token( + self, + client: OAuthClientInformationFull, + refresh_token: RefreshTokenT, + scopes: list[str], + ) -> OAuthToken: + """ + Exchanges a refresh token for an access token and refresh token. + + Implementations SHOULD rotate both the access token and refresh token. + + Args: + client: The client exchanging the refresh token. + refresh_token: The refresh token to exchange. + scopes: Optional scopes to request with the new access token. + + Returns: + The OAuth token, containing access and refresh tokens. + + Raises: + TokenError: If the request is invalid + """ + ... + + async def load_access_token(self, token: str) -> AccessTokenT | None: + """ + Loads an access token by its token. + + Args: + token: The access token to verify. + + Returns: + The AuthInfo, or None if the token is invalid. + """ + ... + + async def revoke_token( + self, + token: AccessTokenT | RefreshTokenT, + ) -> None: + """ + Revokes an access or refresh token. + + If the given token is invalid or already revoked, this method should do nothing. + + Implementations SHOULD revoke both the access token and its corresponding + refresh token, regardless of which of the access token or refresh token is + provided. + + Args: + token: the token to revoke + """ + ... + + +def construct_redirect_uri(redirect_uri_base: str, **params: str | None) -> str: + parsed_uri = urlparse(redirect_uri_base) + query_params = [(k, v) for k, vs in parse_qs(parsed_uri.query) for v in vs] + for k, v in params.items(): + if v is not None: + query_params.append((k, v)) + + redirect_uri = urlunparse(parsed_uri._replace(query=urlencode(query_params))) + return redirect_uri diff --git a/src/mcp/server/auth/settings.py b/src/mcp/server/auth/settings.py index 1086bb77e..6b275f67c 100644 --- a/src/mcp/server/auth/settings.py +++ b/src/mcp/server/auth/settings.py @@ -1,24 +1,24 @@ -from pydantic import AnyHttpUrl, BaseModel, Field - - -class ClientRegistrationOptions(BaseModel): - enabled: bool = False - client_secret_expiry_seconds: int | None = None - valid_scopes: list[str] | None = None - default_scopes: list[str] | None = None - - -class RevocationOptions(BaseModel): - enabled: bool = False - - -class AuthSettings(BaseModel): - issuer_url: AnyHttpUrl = Field( - ..., - description="URL advertised as OAuth issuer; this should be the URL the server " - "is reachable at", - ) - service_documentation_url: AnyHttpUrl | None = None - client_registration_options: ClientRegistrationOptions | None = None - revocation_options: RevocationOptions | None = None - required_scopes: list[str] | None = None +from pydantic import AnyHttpUrl, BaseModel, Field + + +class ClientRegistrationOptions(BaseModel): + enabled: bool = False + client_secret_expiry_seconds: int | None = None + valid_scopes: list[str] | None = None + default_scopes: list[str] | None = None + + +class RevocationOptions(BaseModel): + enabled: bool = False + + +class AuthSettings(BaseModel): + issuer_url: AnyHttpUrl = Field( + ..., + description="URL advertised as OAuth issuer; this should be the URL the server " + "is reachable at", + ) + service_documentation_url: AnyHttpUrl | None = None + client_registration_options: ClientRegistrationOptions | None = None + revocation_options: RevocationOptions | None = None + required_scopes: list[str] | None = None diff --git a/src/mcp/server/fastmcp/__init__.py b/src/mcp/server/fastmcp/__init__.py index 84b052078..f8de56888 100644 --- a/src/mcp/server/fastmcp/__init__.py +++ b/src/mcp/server/fastmcp/__init__.py @@ -1,9 +1,9 @@ -"""FastMCP - A more ergonomic interface for MCP servers.""" - -from importlib.metadata import version - -from .server import Context, FastMCP -from .utilities.types import Image - -__version__ = version("mcp") -__all__ = ["FastMCP", "Context", "Image"] +"""FastMCP - A more ergonomic interface for MCP servers.""" + +from importlib.metadata import version + +from .server import Context, FastMCP +from .utilities.types import Image + +__version__ = version("mcp") +__all__ = ["FastMCP", "Context", "Image"] diff --git a/src/mcp/server/fastmcp/exceptions.py b/src/mcp/server/fastmcp/exceptions.py index fb5bda106..c4ea73518 100644 --- a/src/mcp/server/fastmcp/exceptions.py +++ b/src/mcp/server/fastmcp/exceptions.py @@ -1,21 +1,21 @@ -"""Custom exceptions for FastMCP.""" - - -class FastMCPError(Exception): - """Base error for FastMCP.""" - - -class ValidationError(FastMCPError): - """Error in validating parameters or return values.""" - - -class ResourceError(FastMCPError): - """Error in resource operations.""" - - -class ToolError(FastMCPError): - """Error in tool operations.""" - - -class InvalidSignature(Exception): - """Invalid signature for use with FastMCP.""" +"""Custom exceptions for FastMCP.""" + + +class FastMCPError(Exception): + """Base error for FastMCP.""" + + +class ValidationError(FastMCPError): + """Error in validating parameters or return values.""" + + +class ResourceError(FastMCPError): + """Error in resource operations.""" + + +class ToolError(FastMCPError): + """Error in tool operations.""" + + +class InvalidSignature(Exception): + """Invalid signature for use with FastMCP.""" diff --git a/src/mcp/server/fastmcp/prompts/__init__.py b/src/mcp/server/fastmcp/prompts/__init__.py index 763726964..5fcca4d90 100644 --- a/src/mcp/server/fastmcp/prompts/__init__.py +++ b/src/mcp/server/fastmcp/prompts/__init__.py @@ -1,4 +1,4 @@ -from .base import Prompt -from .manager import PromptManager - -__all__ = ["Prompt", "PromptManager"] +from .base import Prompt +from .manager import PromptManager + +__all__ = ["Prompt", "PromptManager"] diff --git a/src/mcp/server/fastmcp/prompts/base.py b/src/mcp/server/fastmcp/prompts/base.py index aa3d1eac9..70d4edc67 100644 --- a/src/mcp/server/fastmcp/prompts/base.py +++ b/src/mcp/server/fastmcp/prompts/base.py @@ -1,168 +1,168 @@ -"""Base classes for FastMCP prompts.""" - -import inspect -from collections.abc import Awaitable, Callable, Sequence -from typing import Any, Literal - -import pydantic_core -from pydantic import BaseModel, Field, TypeAdapter, validate_call - -from mcp.types import EmbeddedResource, ImageContent, TextContent - -CONTENT_TYPES = TextContent | ImageContent | EmbeddedResource - - -class Message(BaseModel): - """Base class for all prompt messages.""" - - role: Literal["user", "assistant"] - content: CONTENT_TYPES - - def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): - if isinstance(content, str): - content = TextContent(type="text", text=content) - super().__init__(content=content, **kwargs) - - -class UserMessage(Message): - """A message from the user.""" - - role: Literal["user", "assistant"] = "user" - - def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): - super().__init__(content=content, **kwargs) - - -class AssistantMessage(Message): - """A message from the assistant.""" - - role: Literal["user", "assistant"] = "assistant" - - def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): - super().__init__(content=content, **kwargs) - - -message_validator = TypeAdapter[UserMessage | AssistantMessage]( - UserMessage | AssistantMessage -) - -SyncPromptResult = ( - str | Message | dict[str, Any] | Sequence[str | Message | dict[str, Any]] -) -PromptResult = SyncPromptResult | Awaitable[SyncPromptResult] - - -class PromptArgument(BaseModel): - """An argument that can be passed to a prompt.""" - - name: str = Field(description="Name of the argument") - description: str | None = Field( - None, description="Description of what the argument does" - ) - required: bool = Field( - default=False, description="Whether the argument is required" - ) - - -class Prompt(BaseModel): - """A prompt template that can be rendered with parameters.""" - - name: str = Field(description="Name of the prompt") - description: str | None = Field( - None, description="Description of what the prompt does" - ) - arguments: list[PromptArgument] | None = Field( - None, description="Arguments that can be passed to the prompt" - ) - fn: Callable[..., PromptResult | Awaitable[PromptResult]] = Field(exclude=True) - - @classmethod - def from_function( - cls, - fn: Callable[..., PromptResult | Awaitable[PromptResult]], - name: str | None = None, - description: str | None = None, - ) -> "Prompt": - """Create a Prompt from a function. - - The function can return: - - A string (converted to a message) - - A Message object - - A dict (converted to a message) - - A sequence of any of the above - """ - func_name = name or fn.__name__ - - if func_name == "": - raise ValueError("You must provide a name for lambda functions") - - # Get schema from TypeAdapter - will fail if function isn't properly typed - parameters = TypeAdapter(fn).json_schema() - - # Convert parameters to PromptArguments - arguments: list[PromptArgument] = [] - if "properties" in parameters: - for param_name, param in parameters["properties"].items(): - required = param_name in parameters.get("required", []) - arguments.append( - PromptArgument( - name=param_name, - description=param.get("description"), - required=required, - ) - ) - - # ensure the arguments are properly cast - fn = validate_call(fn) - - return cls( - name=func_name, - description=description or fn.__doc__ or "", - arguments=arguments, - fn=fn, - ) - - async def render(self, arguments: dict[str, Any] | None = None) -> list[Message]: - """Render the prompt with arguments.""" - # Validate required arguments - if self.arguments: - required = {arg.name for arg in self.arguments if arg.required} - provided = set(arguments or {}) - missing = required - provided - if missing: - raise ValueError(f"Missing required arguments: {missing}") - - try: - # Call function and check if result is a coroutine - result = self.fn(**(arguments or {})) - if inspect.iscoroutine(result): - result = await result - - # Validate messages - if not isinstance(result, list | tuple): - result = [result] - - # Convert result to messages - messages: list[Message] = [] - for msg in result: # type: ignore[reportUnknownVariableType] - try: - if isinstance(msg, Message): - messages.append(msg) - elif isinstance(msg, dict): - messages.append(message_validator.validate_python(msg)) - elif isinstance(msg, str): - content = TextContent(type="text", text=msg) - messages.append(UserMessage(content=content)) - else: - content = pydantic_core.to_json( - msg, fallback=str, indent=2 - ).decode() - messages.append(Message(role="user", content=content)) - except Exception: - raise ValueError( - f"Could not convert prompt result to message: {msg}" - ) - - return messages - except Exception as e: - raise ValueError(f"Error rendering prompt {self.name}: {e}") +"""Base classes for FastMCP prompts.""" + +import inspect +from collections.abc import Awaitable, Callable, Sequence +from typing import Any, Literal + +import pydantic_core +from pydantic import BaseModel, Field, TypeAdapter, validate_call + +from mcp.types import EmbeddedResource, ImageContent, TextContent + +CONTENT_TYPES = TextContent | ImageContent | EmbeddedResource + + +class Message(BaseModel): + """Base class for all prompt messages.""" + + role: Literal["user", "assistant"] + content: CONTENT_TYPES + + def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): + if isinstance(content, str): + content = TextContent(type="text", text=content) + super().__init__(content=content, **kwargs) + + +class UserMessage(Message): + """A message from the user.""" + + role: Literal["user", "assistant"] = "user" + + def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): + super().__init__(content=content, **kwargs) + + +class AssistantMessage(Message): + """A message from the assistant.""" + + role: Literal["user", "assistant"] = "assistant" + + def __init__(self, content: str | CONTENT_TYPES, **kwargs: Any): + super().__init__(content=content, **kwargs) + + +message_validator = TypeAdapter[UserMessage | AssistantMessage]( + UserMessage | AssistantMessage +) + +SyncPromptResult = ( + str | Message | dict[str, Any] | Sequence[str | Message | dict[str, Any]] +) +PromptResult = SyncPromptResult | Awaitable[SyncPromptResult] + + +class PromptArgument(BaseModel): + """An argument that can be passed to a prompt.""" + + name: str = Field(description="Name of the argument") + description: str | None = Field( + None, description="Description of what the argument does" + ) + required: bool = Field( + default=False, description="Whether the argument is required" + ) + + +class Prompt(BaseModel): + """A prompt template that can be rendered with parameters.""" + + name: str = Field(description="Name of the prompt") + description: str | None = Field( + None, description="Description of what the prompt does" + ) + arguments: list[PromptArgument] | None = Field( + None, description="Arguments that can be passed to the prompt" + ) + fn: Callable[..., PromptResult | Awaitable[PromptResult]] = Field(exclude=True) + + @classmethod + def from_function( + cls, + fn: Callable[..., PromptResult | Awaitable[PromptResult]], + name: str | None = None, + description: str | None = None, + ) -> "Prompt": + """Create a Prompt from a function. + + The function can return: + - A string (converted to a message) + - A Message object + - A dict (converted to a message) + - A sequence of any of the above + """ + func_name = name or fn.__name__ + + if func_name == "": + raise ValueError("You must provide a name for lambda functions") + + # Get schema from TypeAdapter - will fail if function isn't properly typed + parameters = TypeAdapter(fn).json_schema() + + # Convert parameters to PromptArguments + arguments: list[PromptArgument] = [] + if "properties" in parameters: + for param_name, param in parameters["properties"].items(): + required = param_name in parameters.get("required", []) + arguments.append( + PromptArgument( + name=param_name, + description=param.get("description"), + required=required, + ) + ) + + # ensure the arguments are properly cast + fn = validate_call(fn) + + return cls( + name=func_name, + description=description or fn.__doc__ or "", + arguments=arguments, + fn=fn, + ) + + async def render(self, arguments: dict[str, Any] | None = None) -> list[Message]: + """Render the prompt with arguments.""" + # Validate required arguments + if self.arguments: + required = {arg.name for arg in self.arguments if arg.required} + provided = set(arguments or {}) + missing = required - provided + if missing: + raise ValueError(f"Missing required arguments: {missing}") + + try: + # Call function and check if result is a coroutine + result = self.fn(**(arguments or {})) + if inspect.iscoroutine(result): + result = await result + + # Validate messages + if not isinstance(result, list | tuple): + result = [result] + + # Convert result to messages + messages: list[Message] = [] + for msg in result: # type: ignore[reportUnknownVariableType] + try: + if isinstance(msg, Message): + messages.append(msg) + elif isinstance(msg, dict): + messages.append(message_validator.validate_python(msg)) + elif isinstance(msg, str): + content = TextContent(type="text", text=msg) + messages.append(UserMessage(content=content)) + else: + content = pydantic_core.to_json( + msg, fallback=str, indent=2 + ).decode() + messages.append(Message(role="user", content=content)) + except Exception: + raise ValueError( + f"Could not convert prompt result to message: {msg}" + ) + + return messages + except Exception as e: + raise ValueError(f"Error rendering prompt {self.name}: {e}") diff --git a/src/mcp/server/fastmcp/prompts/manager.py b/src/mcp/server/fastmcp/prompts/manager.py index 7ccbdef36..0dabbd550 100644 --- a/src/mcp/server/fastmcp/prompts/manager.py +++ b/src/mcp/server/fastmcp/prompts/manager.py @@ -1,50 +1,50 @@ -"""Prompt management functionality.""" - -from typing import Any - -from mcp.server.fastmcp.prompts.base import Message, Prompt -from mcp.server.fastmcp.utilities.logging import get_logger - -logger = get_logger(__name__) - - -class PromptManager: - """Manages FastMCP prompts.""" - - def __init__(self, warn_on_duplicate_prompts: bool = True): - self._prompts: dict[str, Prompt] = {} - self.warn_on_duplicate_prompts = warn_on_duplicate_prompts - - def get_prompt(self, name: str) -> Prompt | None: - """Get prompt by name.""" - return self._prompts.get(name) - - def list_prompts(self) -> list[Prompt]: - """List all registered prompts.""" - return list(self._prompts.values()) - - def add_prompt( - self, - prompt: Prompt, - ) -> Prompt: - """Add a prompt to the manager.""" - - # Check for duplicates - existing = self._prompts.get(prompt.name) - if existing: - if self.warn_on_duplicate_prompts: - logger.warning(f"Prompt already exists: {prompt.name}") - return existing - - self._prompts[prompt.name] = prompt - return prompt - - async def render_prompt( - self, name: str, arguments: dict[str, Any] | None = None - ) -> list[Message]: - """Render a prompt by name with arguments.""" - prompt = self.get_prompt(name) - if not prompt: - raise ValueError(f"Unknown prompt: {name}") - - return await prompt.render(arguments) +"""Prompt management functionality.""" + +from typing import Any + +from mcp.server.fastmcp.prompts.base import Message, Prompt +from mcp.server.fastmcp.utilities.logging import get_logger + +logger = get_logger(__name__) + + +class PromptManager: + """Manages FastMCP prompts.""" + + def __init__(self, warn_on_duplicate_prompts: bool = True): + self._prompts: dict[str, Prompt] = {} + self.warn_on_duplicate_prompts = warn_on_duplicate_prompts + + def get_prompt(self, name: str) -> Prompt | None: + """Get prompt by name.""" + return self._prompts.get(name) + + def list_prompts(self) -> list[Prompt]: + """List all registered prompts.""" + return list(self._prompts.values()) + + def add_prompt( + self, + prompt: Prompt, + ) -> Prompt: + """Add a prompt to the manager.""" + + # Check for duplicates + existing = self._prompts.get(prompt.name) + if existing: + if self.warn_on_duplicate_prompts: + logger.warning(f"Prompt already exists: {prompt.name}") + return existing + + self._prompts[prompt.name] = prompt + return prompt + + async def render_prompt( + self, name: str, arguments: dict[str, Any] | None = None + ) -> list[Message]: + """Render a prompt by name with arguments.""" + prompt = self.get_prompt(name) + if not prompt: + raise ValueError(f"Unknown prompt: {name}") + + return await prompt.render(arguments) diff --git a/src/mcp/server/fastmcp/prompts/prompt_manager.py b/src/mcp/server/fastmcp/prompts/prompt_manager.py index 389e89624..b8fcd036d 100644 --- a/src/mcp/server/fastmcp/prompts/prompt_manager.py +++ b/src/mcp/server/fastmcp/prompts/prompt_manager.py @@ -1,33 +1,33 @@ -"""Prompt management functionality.""" - -from mcp.server.fastmcp.prompts.base import Prompt -from mcp.server.fastmcp.utilities.logging import get_logger - -logger = get_logger(__name__) - - -class PromptManager: - """Manages FastMCP prompts.""" - - def __init__(self, warn_on_duplicate_prompts: bool = True): - self._prompts: dict[str, Prompt] = {} - self.warn_on_duplicate_prompts = warn_on_duplicate_prompts - - def add_prompt(self, prompt: Prompt) -> Prompt: - """Add a prompt to the manager.""" - logger.debug(f"Adding prompt: {prompt.name}") - existing = self._prompts.get(prompt.name) - if existing: - if self.warn_on_duplicate_prompts: - logger.warning(f"Prompt already exists: {prompt.name}") - return existing - self._prompts[prompt.name] = prompt - return prompt - - def get_prompt(self, name: str) -> Prompt | None: - """Get prompt by name.""" - return self._prompts.get(name) - - def list_prompts(self) -> list[Prompt]: - """List all registered prompts.""" - return list(self._prompts.values()) +"""Prompt management functionality.""" + +from mcp.server.fastmcp.prompts.base import Prompt +from mcp.server.fastmcp.utilities.logging import get_logger + +logger = get_logger(__name__) + + +class PromptManager: + """Manages FastMCP prompts.""" + + def __init__(self, warn_on_duplicate_prompts: bool = True): + self._prompts: dict[str, Prompt] = {} + self.warn_on_duplicate_prompts = warn_on_duplicate_prompts + + def add_prompt(self, prompt: Prompt) -> Prompt: + """Add a prompt to the manager.""" + logger.debug(f"Adding prompt: {prompt.name}") + existing = self._prompts.get(prompt.name) + if existing: + if self.warn_on_duplicate_prompts: + logger.warning(f"Prompt already exists: {prompt.name}") + return existing + self._prompts[prompt.name] = prompt + return prompt + + def get_prompt(self, name: str) -> Prompt | None: + """Get prompt by name.""" + return self._prompts.get(name) + + def list_prompts(self) -> list[Prompt]: + """List all registered prompts.""" + return list(self._prompts.values()) diff --git a/src/mcp/server/fastmcp/resources/__init__.py b/src/mcp/server/fastmcp/resources/__init__.py index b5805fb34..7ba213967 100644 --- a/src/mcp/server/fastmcp/resources/__init__.py +++ b/src/mcp/server/fastmcp/resources/__init__.py @@ -1,23 +1,23 @@ -from .base import Resource -from .resource_manager import ResourceManager -from .templates import ResourceTemplate -from .types import ( - BinaryResource, - DirectoryResource, - FileResource, - FunctionResource, - HttpResource, - TextResource, -) - -__all__ = [ - "Resource", - "TextResource", - "BinaryResource", - "FunctionResource", - "FileResource", - "HttpResource", - "DirectoryResource", - "ResourceTemplate", - "ResourceManager", -] +from .base import Resource +from .resource_manager import ResourceManager +from .templates import ResourceTemplate +from .types import ( + BinaryResource, + DirectoryResource, + FileResource, + FunctionResource, + HttpResource, + TextResource, +) + +__all__ = [ + "Resource", + "TextResource", + "BinaryResource", + "FunctionResource", + "FileResource", + "HttpResource", + "DirectoryResource", + "ResourceTemplate", + "ResourceManager", +] diff --git a/src/mcp/server/fastmcp/resources/base.py b/src/mcp/server/fastmcp/resources/base.py index b2050e7f8..7faa48674 100644 --- a/src/mcp/server/fastmcp/resources/base.py +++ b/src/mcp/server/fastmcp/resources/base.py @@ -1,48 +1,48 @@ -"""Base classes and interfaces for FastMCP resources.""" - -import abc -from typing import Annotated - -from pydantic import ( - AnyUrl, - BaseModel, - ConfigDict, - Field, - UrlConstraints, - ValidationInfo, - field_validator, -) - - -class Resource(BaseModel, abc.ABC): - """Base class for all resources.""" - - model_config = ConfigDict(validate_default=True) - - uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] = Field( - default=..., description="URI of the resource" - ) - name: str | None = Field(description="Name of the resource", default=None) - description: str | None = Field( - description="Description of the resource", default=None - ) - mime_type: str = Field( - default="text/plain", - description="MIME type of the resource content", - pattern=r"^[a-zA-Z0-9]+/[a-zA-Z0-9\-+.]+$", - ) - - @field_validator("name", mode="before") - @classmethod - def set_default_name(cls, name: str | None, info: ValidationInfo) -> str: - """Set default name from URI if not provided.""" - if name: - return name - if uri := info.data.get("uri"): - return str(uri) - raise ValueError("Either name or uri must be provided") - - @abc.abstractmethod - async def read(self) -> str | bytes: - """Read the resource content.""" - pass +"""Base classes and interfaces for FastMCP resources.""" + +import abc +from typing import Annotated + +from pydantic import ( + AnyUrl, + BaseModel, + ConfigDict, + Field, + UrlConstraints, + ValidationInfo, + field_validator, +) + + +class Resource(BaseModel, abc.ABC): + """Base class for all resources.""" + + model_config = ConfigDict(validate_default=True) + + uri: Annotated[AnyUrl, UrlConstraints(host_required=False)] = Field( + default=..., description="URI of the resource" + ) + name: str | None = Field(description="Name of the resource", default=None) + description: str | None = Field( + description="Description of the resource", default=None + ) + mime_type: str = Field( + default="text/plain", + description="MIME type of the resource content", + pattern=r"^[a-zA-Z0-9]+/[a-zA-Z0-9\-+.]+$", + ) + + @field_validator("name", mode="before") + @classmethod + def set_default_name(cls, name: str | None, info: ValidationInfo) -> str: + """Set default name from URI if not provided.""" + if name: + return name + if uri := info.data.get("uri"): + return str(uri) + raise ValueError("Either name or uri must be provided") + + @abc.abstractmethod + async def read(self) -> str | bytes: + """Read the resource content.""" + pass diff --git a/src/mcp/server/fastmcp/resources/resource_manager.py b/src/mcp/server/fastmcp/resources/resource_manager.py index d27e6ac12..5ef99d93a 100644 --- a/src/mcp/server/fastmcp/resources/resource_manager.py +++ b/src/mcp/server/fastmcp/resources/resource_manager.py @@ -1,95 +1,95 @@ -"""Resource manager functionality.""" - -from collections.abc import Callable -from typing import Any - -from pydantic import AnyUrl - -from mcp.server.fastmcp.resources.base import Resource -from mcp.server.fastmcp.resources.templates import ResourceTemplate -from mcp.server.fastmcp.utilities.logging import get_logger - -logger = get_logger(__name__) - - -class ResourceManager: - """Manages FastMCP resources.""" - - def __init__(self, warn_on_duplicate_resources: bool = True): - self._resources: dict[str, Resource] = {} - self._templates: dict[str, ResourceTemplate] = {} - self.warn_on_duplicate_resources = warn_on_duplicate_resources - - def add_resource(self, resource: Resource) -> Resource: - """Add a resource to the manager. - - Args: - resource: A Resource instance to add - - Returns: - The added resource. If a resource with the same URI already exists, - returns the existing resource. - """ - logger.debug( - "Adding resource", - extra={ - "uri": resource.uri, - "type": type(resource).__name__, - "resource_name": resource.name, - }, - ) - existing = self._resources.get(str(resource.uri)) - if existing: - if self.warn_on_duplicate_resources: - logger.warning(f"Resource already exists: {resource.uri}") - return existing - self._resources[str(resource.uri)] = resource - return resource - - def add_template( - self, - fn: Callable[..., Any], - uri_template: str, - name: str | None = None, - description: str | None = None, - mime_type: str | None = None, - ) -> ResourceTemplate: - """Add a template from a function.""" - template = ResourceTemplate.from_function( - fn, - uri_template=uri_template, - name=name, - description=description, - mime_type=mime_type, - ) - self._templates[template.uri_template] = template - return template - - async def get_resource(self, uri: AnyUrl | str) -> Resource | None: - """Get resource by URI, checking concrete resources first, then templates.""" - uri_str = str(uri) - logger.debug("Getting resource", extra={"uri": uri_str}) - - # First check concrete resources - if resource := self._resources.get(uri_str): - return resource - - # Then check templates - for template in self._templates.values(): - if params := template.matches(uri_str): - try: - return await template.create_resource(uri_str, params) - except Exception as e: - raise ValueError(f"Error creating resource from template: {e}") - - raise ValueError(f"Unknown resource: {uri}") - - def list_resources(self) -> list[Resource]: - """List all registered resources.""" - logger.debug("Listing resources", extra={"count": len(self._resources)}) - return list(self._resources.values()) - - def list_templates(self) -> list[ResourceTemplate]: - """List all registered templates.""" - logger.debug("Listing templates", extra={"count": len(self._templates)}) - return list(self._templates.values()) +"""Resource manager functionality.""" + +from collections.abc import Callable +from typing import Any + +from pydantic import AnyUrl + +from mcp.server.fastmcp.resources.base import Resource +from mcp.server.fastmcp.resources.templates import ResourceTemplate +from mcp.server.fastmcp.utilities.logging import get_logger + +logger = get_logger(__name__) + + +class ResourceManager: + """Manages FastMCP resources.""" + + def __init__(self, warn_on_duplicate_resources: bool = True): + self._resources: dict[str, Resource] = {} + self._templates: dict[str, ResourceTemplate] = {} + self.warn_on_duplicate_resources = warn_on_duplicate_resources + + def add_resource(self, resource: Resource) -> Resource: + """Add a resource to the manager. + + Args: + resource: A Resource instance to add + + Returns: + The added resource. If a resource with the same URI already exists, + returns the existing resource. + """ + logger.debug( + "Adding resource", + extra={ + "uri": resource.uri, + "type": type(resource).__name__, + "resource_name": resource.name, + }, + ) + existing = self._resources.get(str(resource.uri)) + if existing: + if self.warn_on_duplicate_resources: + logger.warning(f"Resource already exists: {resource.uri}") + return existing + self._resources[str(resource.uri)] = resource + return resource + + def add_template( + self, + fn: Callable[..., Any], + uri_template: str, + name: str | None = None, + description: str | None = None, + mime_type: str | None = None, + ) -> ResourceTemplate: + """Add a template from a function.""" + template = ResourceTemplate.from_function( + fn, + uri_template=uri_template, + name=name, + description=description, + mime_type=mime_type, + ) + self._templates[template.uri_template] = template + return template + + async def get_resource(self, uri: AnyUrl | str) -> Resource | None: + """Get resource by URI, checking concrete resources first, then templates.""" + uri_str = str(uri) + logger.debug("Getting resource", extra={"uri": uri_str}) + + # First check concrete resources + if resource := self._resources.get(uri_str): + return resource + + # Then check templates + for template in self._templates.values(): + if params := template.matches(uri_str): + try: + return await template.create_resource(uri_str, params) + except Exception as e: + raise ValueError(f"Error creating resource from template: {e}") + + raise ValueError(f"Unknown resource: {uri}") + + def list_resources(self) -> list[Resource]: + """List all registered resources.""" + logger.debug("Listing resources", extra={"count": len(self._resources)}) + return list(self._resources.values()) + + def list_templates(self) -> list[ResourceTemplate]: + """List all registered templates.""" + logger.debug("Listing templates", extra={"count": len(self._templates)}) + return list(self._templates.values()) diff --git a/src/mcp/server/fastmcp/resources/templates.py b/src/mcp/server/fastmcp/resources/templates.py index a30b18253..abfcbf576 100644 --- a/src/mcp/server/fastmcp/resources/templates.py +++ b/src/mcp/server/fastmcp/resources/templates.py @@ -1,85 +1,85 @@ -"""Resource template functionality.""" - -from __future__ import annotations - -import inspect -import re -from collections.abc import Callable -from typing import Any - -from pydantic import BaseModel, Field, TypeAdapter, validate_call - -from mcp.server.fastmcp.resources.types import FunctionResource, Resource - - -class ResourceTemplate(BaseModel): - """A template for dynamically creating resources.""" - - uri_template: str = Field( - description="URI template with parameters (e.g. weather://{city}/current)" - ) - name: str = Field(description="Name of the resource") - description: str | None = Field(description="Description of what the resource does") - mime_type: str = Field( - default="text/plain", description="MIME type of the resource content" - ) - fn: Callable[..., Any] = Field(exclude=True) - parameters: dict[str, Any] = Field( - description="JSON schema for function parameters" - ) - - @classmethod - def from_function( - cls, - fn: Callable[..., Any], - uri_template: str, - name: str | None = None, - description: str | None = None, - mime_type: str | None = None, - ) -> ResourceTemplate: - """Create a template from a function.""" - func_name = name or fn.__name__ - if func_name == "": - raise ValueError("You must provide a name for lambda functions") - - # Get schema from TypeAdapter - will fail if function isn't properly typed - parameters = TypeAdapter(fn).json_schema() - - # ensure the arguments are properly cast - fn = validate_call(fn) - - return cls( - uri_template=uri_template, - name=func_name, - description=description or fn.__doc__ or "", - mime_type=mime_type or "text/plain", - fn=fn, - parameters=parameters, - ) - - def matches(self, uri: str) -> dict[str, Any] | None: - """Check if URI matches template and extract parameters.""" - # Convert template to regex pattern - pattern = self.uri_template.replace("{", "(?P<").replace("}", ">[^/]+)") - match = re.match(f"^{pattern}$", uri) - if match: - return match.groupdict() - return None - - async def create_resource(self, uri: str, params: dict[str, Any]) -> Resource: - """Create a resource from the template with the given parameters.""" - try: - # Call function and check if result is a coroutine - result = self.fn(**params) - if inspect.iscoroutine(result): - result = await result - - return FunctionResource( - uri=uri, # type: ignore - name=self.name, - description=self.description, - mime_type=self.mime_type, - fn=lambda: result, # Capture result in closure - ) - except Exception as e: - raise ValueError(f"Error creating resource from template: {e}") +"""Resource template functionality.""" + +from __future__ import annotations + +import inspect +import re +from collections.abc import Callable +from typing import Any + +from pydantic import BaseModel, Field, TypeAdapter, validate_call + +from mcp.server.fastmcp.resources.types import FunctionResource, Resource + + +class ResourceTemplate(BaseModel): + """A template for dynamically creating resources.""" + + uri_template: str = Field( + description="URI template with parameters (e.g. weather://{city}/current)" + ) + name: str = Field(description="Name of the resource") + description: str | None = Field(description="Description of what the resource does") + mime_type: str = Field( + default="text/plain", description="MIME type of the resource content" + ) + fn: Callable[..., Any] = Field(exclude=True) + parameters: dict[str, Any] = Field( + description="JSON schema for function parameters" + ) + + @classmethod + def from_function( + cls, + fn: Callable[..., Any], + uri_template: str, + name: str | None = None, + description: str | None = None, + mime_type: str | None = None, + ) -> ResourceTemplate: + """Create a template from a function.""" + func_name = name or fn.__name__ + if func_name == "": + raise ValueError("You must provide a name for lambda functions") + + # Get schema from TypeAdapter - will fail if function isn't properly typed + parameters = TypeAdapter(fn).json_schema() + + # ensure the arguments are properly cast + fn = validate_call(fn) + + return cls( + uri_template=uri_template, + name=func_name, + description=description or fn.__doc__ or "", + mime_type=mime_type or "text/plain", + fn=fn, + parameters=parameters, + ) + + def matches(self, uri: str) -> dict[str, Any] | None: + """Check if URI matches template and extract parameters.""" + # Convert template to regex pattern + pattern = self.uri_template.replace("{", "(?P<").replace("}", ">[^/]+)") + match = re.match(f"^{pattern}$", uri) + if match: + return match.groupdict() + return None + + async def create_resource(self, uri: str, params: dict[str, Any]) -> Resource: + """Create a resource from the template with the given parameters.""" + try: + # Call function and check if result is a coroutine + result = self.fn(**params) + if inspect.iscoroutine(result): + result = await result + + return FunctionResource( + uri=uri, # type: ignore + name=self.name, + description=self.description, + mime_type=self.mime_type, + fn=lambda: result, # Capture result in closure + ) + except Exception as e: + raise ValueError(f"Error creating resource from template: {e}") diff --git a/src/mcp/server/fastmcp/tools/__init__.py b/src/mcp/server/fastmcp/tools/__init__.py index ae9c65619..d20c0de65 100644 --- a/src/mcp/server/fastmcp/tools/__init__.py +++ b/src/mcp/server/fastmcp/tools/__init__.py @@ -1,4 +1,4 @@ -from .base import Tool -from .tool_manager import ToolManager - -__all__ = ["Tool", "ToolManager"] +from .base import Tool +from .tool_manager import ToolManager + +__all__ = ["Tool", "ToolManager"] diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 21eb1841d..fe518ab92 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -1,100 +1,100 @@ -from __future__ import annotations as _annotations - -import inspect -from collections.abc import Callable -from typing import TYPE_CHECKING, Any, get_origin - -from pydantic import BaseModel, Field - -from mcp.server.fastmcp.exceptions import ToolError -from mcp.server.fastmcp.utilities.func_metadata import FuncMetadata, func_metadata -from mcp.types import ToolAnnotations - -if TYPE_CHECKING: - from mcp.server.fastmcp.server import Context - from mcp.server.session import ServerSessionT - from mcp.shared.context import LifespanContextT - - -class Tool(BaseModel): - """Internal tool registration info.""" - - fn: Callable[..., Any] = Field(exclude=True) - name: str = Field(description="Name of the tool") - description: str = Field(description="Description of what the tool does") - parameters: dict[str, Any] = Field(description="JSON schema for tool parameters") - fn_metadata: FuncMetadata = Field( - description="Metadata about the function including a pydantic model for tool" - " arguments" - ) - is_async: bool = Field(description="Whether the tool is async") - context_kwarg: str | None = Field( - None, description="Name of the kwarg that should receive context" - ) - annotations: ToolAnnotations | None = Field( - None, description="Optional annotations for the tool" - ) - - @classmethod - def from_function( - cls, - fn: Callable[..., Any], - name: str | None = None, - description: str | None = None, - context_kwarg: str | None = None, - annotations: ToolAnnotations | None = None, - ) -> Tool: - """Create a Tool from a function.""" - from mcp.server.fastmcp.server import Context - - func_name = name or fn.__name__ - - if func_name == "": - raise ValueError("You must provide a name for lambda functions") - - func_doc = description or fn.__doc__ or "" - is_async = inspect.iscoroutinefunction(fn) - - if context_kwarg is None: - sig = inspect.signature(fn) - for param_name, param in sig.parameters.items(): - if get_origin(param.annotation) is not None: - continue - if issubclass(param.annotation, Context): - context_kwarg = param_name - break - - func_arg_metadata = func_metadata( - fn, - skip_names=[context_kwarg] if context_kwarg is not None else [], - ) - parameters = func_arg_metadata.arg_model.model_json_schema() - - return cls( - fn=fn, - name=func_name, - description=func_doc, - parameters=parameters, - fn_metadata=func_arg_metadata, - is_async=is_async, - context_kwarg=context_kwarg, - annotations=annotations, - ) - - async def run( - self, - arguments: dict[str, Any], - context: Context[ServerSessionT, LifespanContextT] | None = None, - ) -> Any: - """Run the tool with arguments.""" - try: - return await self.fn_metadata.call_fn_with_arg_validation( - self.fn, - self.is_async, - arguments, - {self.context_kwarg: context} - if self.context_kwarg is not None - else None, - ) - except Exception as e: - raise ToolError(f"Error executing tool {self.name}: {e}") from e +from __future__ import annotations as _annotations + +import inspect +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, get_origin + +from pydantic import BaseModel, Field + +from mcp.server.fastmcp.exceptions import ToolError +from mcp.server.fastmcp.utilities.func_metadata import FuncMetadata, func_metadata +from mcp.types import ToolAnnotations + +if TYPE_CHECKING: + from mcp.server.fastmcp.server import Context + from mcp.server.session import ServerSessionT + from mcp.shared.context import LifespanContextT + + +class Tool(BaseModel): + """Internal tool registration info.""" + + fn: Callable[..., Any] = Field(exclude=True) + name: str = Field(description="Name of the tool") + description: str = Field(description="Description of what the tool does") + parameters: dict[str, Any] = Field(description="JSON schema for tool parameters") + fn_metadata: FuncMetadata = Field( + description="Metadata about the function including a pydantic model for tool" + " arguments" + ) + is_async: bool = Field(description="Whether the tool is async") + context_kwarg: str | None = Field( + None, description="Name of the kwarg that should receive context" + ) + annotations: ToolAnnotations | None = Field( + None, description="Optional annotations for the tool" + ) + + @classmethod + def from_function( + cls, + fn: Callable[..., Any], + name: str | None = None, + description: str | None = None, + context_kwarg: str | None = None, + annotations: ToolAnnotations | None = None, + ) -> Tool: + """Create a Tool from a function.""" + from mcp.server.fastmcp.server import Context + + func_name = name or fn.__name__ + + if func_name == "": + raise ValueError("You must provide a name for lambda functions") + + func_doc = description or fn.__doc__ or "" + is_async = inspect.iscoroutinefunction(fn) + + if context_kwarg is None: + sig = inspect.signature(fn) + for param_name, param in sig.parameters.items(): + if get_origin(param.annotation) is not None: + continue + if issubclass(param.annotation, Context): + context_kwarg = param_name + break + + func_arg_metadata = func_metadata( + fn, + skip_names=[context_kwarg] if context_kwarg is not None else [], + ) + parameters = func_arg_metadata.arg_model.model_json_schema() + + return cls( + fn=fn, + name=func_name, + description=func_doc, + parameters=parameters, + fn_metadata=func_arg_metadata, + is_async=is_async, + context_kwarg=context_kwarg, + annotations=annotations, + ) + + async def run( + self, + arguments: dict[str, Any], + context: Context[ServerSessionT, LifespanContextT] | None = None, + ) -> Any: + """Run the tool with arguments.""" + try: + return await self.fn_metadata.call_fn_with_arg_validation( + self.fn, + self.is_async, + arguments, + {self.context_kwarg: context} + if self.context_kwarg is not None + else None, + ) + except Exception as e: + raise ToolError(f"Error executing tool {self.name}: {e}") from e diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index 6ec4fd151..2775423ed 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -1,75 +1,140 @@ -from __future__ import annotations as _annotations - -from collections.abc import Callable -from typing import TYPE_CHECKING, Any - -from mcp.server.fastmcp.exceptions import ToolError -from mcp.server.fastmcp.tools.base import Tool -from mcp.server.fastmcp.utilities.logging import get_logger -from mcp.shared.context import LifespanContextT -from mcp.types import ToolAnnotations - -if TYPE_CHECKING: - from mcp.server.fastmcp.server import Context - from mcp.server.session import ServerSessionT - -logger = get_logger(__name__) - - -class ToolManager: - """Manages FastMCP tools.""" - - def __init__( - self, - warn_on_duplicate_tools: bool = True, - *, - tools: list[Tool] | None = None, - ): - self._tools: dict[str, Tool] = {} - if tools is not None: - for tool in tools: - if warn_on_duplicate_tools and tool.name in self._tools: - logger.warning(f"Tool already exists: {tool.name}") - self._tools[tool.name] = tool - - self.warn_on_duplicate_tools = warn_on_duplicate_tools - - def get_tool(self, name: str) -> Tool | None: - """Get tool by name.""" - return self._tools.get(name) - - def list_tools(self) -> list[Tool]: - """List all registered tools.""" - return list(self._tools.values()) - - def add_tool( - self, - fn: Callable[..., Any], - name: str | None = None, - description: str | None = None, - annotations: ToolAnnotations | None = None, - ) -> Tool: - """Add a tool to the server.""" - tool = Tool.from_function( - fn, name=name, description=description, annotations=annotations - ) - existing = self._tools.get(tool.name) - if existing: - if self.warn_on_duplicate_tools: - logger.warning(f"Tool already exists: {tool.name}") - return existing - self._tools[tool.name] = tool - return tool - - async def call_tool( - self, - name: str, - arguments: dict[str, Any], - context: Context[ServerSessionT, LifespanContextT] | None = None, - ) -> Any: - """Call a tool by name with arguments.""" - tool = self.get_tool(name) - if not tool: - raise ToolError(f"Unknown tool: {name}") - - return await tool.run(arguments, context=context) +from __future__ import annotations as _annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from mcp.server.fastmcp.exceptions import ToolError +from mcp.server.fastmcp.tools.base import Tool +from mcp.server.fastmcp.utilities.logging import get_logger +from mcp.shared.context import LifespanContextT +from mcp.types import ToolAnnotations + +if TYPE_CHECKING: + from mcp.server.fastmcp.server import Context + from mcp.server.session import ServerSessionT + +logger = get_logger(__name__) + + +class ToolManager: + """Manages FastMCP tools.""" + + def __init__(self, warn_on_duplicate_tools: bool = True): + self._tools: dict[str, Tool] = {} + self.warn_on_duplicate_tools = warn_on_duplicate_tools + + def get_tool(self, name: str) -> Tool | None: + """Get tool by name.""" + return self._tools.get(name) + + def list_tools(self) -> list[Tool]: + """List all registered tools.""" + return list(self._tools.values()) + + def add_tool( + self, + fn: Callable[..., Any], + name: str | None = None, + description: str | None = None, + annotations: ToolAnnotations | None = None, + ) -> Tool: + """Add a tool to the server.""" + tool = Tool.from_function( + fn, name=name, description=description, annotations=annotations + ) + existing = self._tools.get(tool.name) + if existing: + if self.warn_on_duplicate_tools: + logger.warning(f"Tool already exists: {tool.name}") + return existing + self._tools[tool.name] = tool + return tool + + async def call_tool( + self, + name: str, + arguments: dict[str, Any], + context: Context[ServerSessionT, LifespanContextT] | None = None, + ) -> Any: + """Call a tool by name with arguments.""" + tool = self.get_tool(name) + if not tool: + raise ToolError(f"Unknown tool: {name}") + + return await tool.run(arguments, context=context) +======= +from __future__ import annotations as _annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from mcp.server.fastmcp.exceptions import ToolError +from mcp.server.fastmcp.tools.base import Tool +from mcp.server.fastmcp.utilities.logging import get_logger +from mcp.shared.context import LifespanContextT +from mcp.types import ToolAnnotations + +if TYPE_CHECKING: + from mcp.server.fastmcp.server import Context + from mcp.server.session import ServerSessionT + +logger = get_logger(__name__) + + +class ToolManager: + """Manages FastMCP tools.""" + + def __init__( + self, + warn_on_duplicate_tools: bool = True, + *, + tools: list[Tool] | None = None, + ): + self._tools: dict[str, Tool] = {} + if tools is not None: + for tool in tools: + if warn_on_duplicate_tools and tool.name in self._tools: + logger.warning(f"Tool already exists: {tool.name}") + self._tools[tool.name] = tool + + self.warn_on_duplicate_tools = warn_on_duplicate_tools + + def get_tool(self, name: str) -> Tool | None: + """Get tool by name.""" + return self._tools.get(name) + + def list_tools(self) -> list[Tool]: + """List all registered tools.""" + return list(self._tools.values()) + + def add_tool( + self, + fn: Callable[..., Any], + name: str | None = None, + description: str | None = None, + annotations: ToolAnnotations | None = None, + ) -> Tool: + """Add a tool to the server.""" + tool = Tool.from_function( + fn, name=name, description=description, annotations=annotations + ) + existing = self._tools.get(tool.name) + if existing: + if self.warn_on_duplicate_tools: + logger.warning(f"Tool already exists: {tool.name}") + return existing + self._tools[tool.name] = tool + return tool + + async def call_tool( + self, + name: str, + arguments: dict[str, Any], + context: Context[ServerSessionT, LifespanContextT] | None = None, + ) -> Any: + """Call a tool by name with arguments.""" + tool = self.get_tool(name) + if not tool: + raise ToolError(f"Unknown tool: {name}") + + return await tool.run(arguments, context=context) \ No newline at end of file diff --git a/src/mcp/server/fastmcp/utilities/__init__.py b/src/mcp/server/fastmcp/utilities/__init__.py index be448f97a..c7d785c61 100644 --- a/src/mcp/server/fastmcp/utilities/__init__.py +++ b/src/mcp/server/fastmcp/utilities/__init__.py @@ -1 +1 @@ -"""FastMCP utility modules.""" +"""FastMCP utility modules.""" diff --git a/src/mcp/server/fastmcp/utilities/func_metadata.py b/src/mcp/server/fastmcp/utilities/func_metadata.py index 374391325..b095318dd 100644 --- a/src/mcp/server/fastmcp/utilities/func_metadata.py +++ b/src/mcp/server/fastmcp/utilities/func_metadata.py @@ -1,214 +1,214 @@ -import inspect -import json -from collections.abc import Awaitable, Callable, Sequence -from typing import ( - Annotated, - Any, - ForwardRef, -) - -from pydantic import BaseModel, ConfigDict, Field, WithJsonSchema, create_model -from pydantic._internal._typing_extra import eval_type_backport -from pydantic.fields import FieldInfo -from pydantic_core import PydanticUndefined - -from mcp.server.fastmcp.exceptions import InvalidSignature -from mcp.server.fastmcp.utilities.logging import get_logger - -logger = get_logger(__name__) - - -class ArgModelBase(BaseModel): - """A model representing the arguments to a function.""" - - def model_dump_one_level(self) -> dict[str, Any]: - """Return a dict of the model's fields, one level deep. - - That is, sub-models etc are not dumped - they are kept as pydantic models. - """ - kwargs: dict[str, Any] = {} - for field_name in self.__class__.model_fields.keys(): - kwargs[field_name] = getattr(self, field_name) - return kwargs - - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) - - -class FuncMetadata(BaseModel): - arg_model: Annotated[type[ArgModelBase], WithJsonSchema(None)] - # We can add things in the future like - # - Maybe some args are excluded from attempting to parse from JSON - # - Maybe some args are special (like context) for dependency injection - - async def call_fn_with_arg_validation( - self, - fn: Callable[..., Any] | Awaitable[Any], - fn_is_async: bool, - arguments_to_validate: dict[str, Any], - arguments_to_pass_directly: dict[str, Any] | None, - ) -> Any: - """Call the given function with arguments validated and injected. - - Arguments are first attempted to be parsed from JSON, then validated against - the argument model, before being passed to the function. - """ - arguments_pre_parsed = self.pre_parse_json(arguments_to_validate) - arguments_parsed_model = self.arg_model.model_validate(arguments_pre_parsed) - arguments_parsed_dict = arguments_parsed_model.model_dump_one_level() - - arguments_parsed_dict |= arguments_to_pass_directly or {} - - if fn_is_async: - if isinstance(fn, Awaitable): - return await fn - return await fn(**arguments_parsed_dict) - if isinstance(fn, Callable): - return fn(**arguments_parsed_dict) - raise TypeError("fn must be either Callable or Awaitable") - - def pre_parse_json(self, data: dict[str, Any]) -> dict[str, Any]: - """Pre-parse data from JSON. - - Return a dict with same keys as input but with values parsed from JSON - if appropriate. - - This is to handle cases like `["a", "b", "c"]` being passed in as JSON inside - a string rather than an actual list. Claude desktop is prone to this - in fact - it seems incapable of NOT doing this. For sub-models, it tends to pass - dicts (JSON objects) as JSON strings, which can be pre-parsed here. - """ - new_data = data.copy() # Shallow copy - for field_name in self.arg_model.model_fields.keys(): - if field_name not in data.keys(): - continue - if isinstance(data[field_name], str): - try: - pre_parsed = json.loads(data[field_name]) - except json.JSONDecodeError: - continue # Not JSON - skip - if isinstance(pre_parsed, str | int | float): - # This is likely that the raw value is e.g. `"hello"` which we - # Should really be parsed as '"hello"' in Python - but if we parse - # it as JSON it'll turn into just 'hello'. So we skip it. - continue - new_data[field_name] = pre_parsed - assert new_data.keys() == data.keys() - return new_data - - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) - - -def func_metadata( - func: Callable[..., Any], skip_names: Sequence[str] = () -) -> FuncMetadata: - """Given a function, return metadata including a pydantic model representing its - signature. - - The use case for this is - ``` - meta = func_to_pyd(func) - validated_args = meta.arg_model.model_validate(some_raw_data_dict) - return func(**validated_args.model_dump_one_level()) - ``` - - **critically** it also provides pre-parse helper to attempt to parse things from - JSON. - - Args: - func: The function to convert to a pydantic model - skip_names: A list of parameter names to skip. These will not be included in - the model. - Returns: - A pydantic model representing the function's signature. - """ - sig = _get_typed_signature(func) - params = sig.parameters - dynamic_pydantic_model_params: dict[str, Any] = {} - globalns = getattr(func, "__globals__", {}) - for param in params.values(): - if param.name.startswith("_"): - raise InvalidSignature( - f"Parameter {param.name} of {func.__name__} cannot start with '_'" - ) - if param.name in skip_names: - continue - annotation = param.annotation - - # `x: None` / `x: None = None` - if annotation is None: - annotation = Annotated[ - None, - Field( - default=param.default - if param.default is not inspect.Parameter.empty - else PydanticUndefined - ), - ] - - # Untyped field - if annotation is inspect.Parameter.empty: - annotation = Annotated[ - Any, - Field(), - # 🤷 - WithJsonSchema({"title": param.name, "type": "string"}), - ] - - field_info = FieldInfo.from_annotated_attribute( - _get_typed_annotation(annotation, globalns), - param.default - if param.default is not inspect.Parameter.empty - else PydanticUndefined, - ) - dynamic_pydantic_model_params[param.name] = (field_info.annotation, field_info) - continue - - arguments_model = create_model( - f"{func.__name__}Arguments", - **dynamic_pydantic_model_params, - __base__=ArgModelBase, - ) - resp = FuncMetadata(arg_model=arguments_model) - return resp - - -def _get_typed_annotation(annotation: Any, globalns: dict[str, Any]) -> Any: - def try_eval_type( - value: Any, globalns: dict[str, Any], localns: dict[str, Any] - ) -> tuple[Any, bool]: - try: - return eval_type_backport(value, globalns, localns), True - except NameError: - return value, False - - if isinstance(annotation, str): - annotation = ForwardRef(annotation) - annotation, status = try_eval_type(annotation, globalns, globalns) - - # This check and raise could perhaps be skipped, and we (FastMCP) just call - # model_rebuild right before using it 🤷 - if status is False: - raise InvalidSignature(f"Unable to evaluate type annotation {annotation}") - - return annotation - - -def _get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: - """Get function signature while evaluating forward references""" - signature = inspect.signature(call) - globalns = getattr(call, "__globals__", {}) - typed_params = [ - inspect.Parameter( - name=param.name, - kind=param.kind, - default=param.default, - annotation=_get_typed_annotation(param.annotation, globalns), - ) - for param in signature.parameters.values() - ] - typed_signature = inspect.Signature(typed_params) - return typed_signature +import inspect +import json +from collections.abc import Awaitable, Callable, Sequence +from typing import ( + Annotated, + Any, + ForwardRef, +) + +from pydantic import BaseModel, ConfigDict, Field, WithJsonSchema, create_model +from pydantic._internal._typing_extra import eval_type_backport +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined + +from mcp.server.fastmcp.exceptions import InvalidSignature +from mcp.server.fastmcp.utilities.logging import get_logger + +logger = get_logger(__name__) + + +class ArgModelBase(BaseModel): + """A model representing the arguments to a function.""" + + def model_dump_one_level(self) -> dict[str, Any]: + """Return a dict of the model's fields, one level deep. + + That is, sub-models etc are not dumped - they are kept as pydantic models. + """ + kwargs: dict[str, Any] = {} + for field_name in self.__class__.model_fields.keys(): + kwargs[field_name] = getattr(self, field_name) + return kwargs + + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + + +class FuncMetadata(BaseModel): + arg_model: Annotated[type[ArgModelBase], WithJsonSchema(None)] + # We can add things in the future like + # - Maybe some args are excluded from attempting to parse from JSON + # - Maybe some args are special (like context) for dependency injection + + async def call_fn_with_arg_validation( + self, + fn: Callable[..., Any] | Awaitable[Any], + fn_is_async: bool, + arguments_to_validate: dict[str, Any], + arguments_to_pass_directly: dict[str, Any] | None, + ) -> Any: + """Call the given function with arguments validated and injected. + + Arguments are first attempted to be parsed from JSON, then validated against + the argument model, before being passed to the function. + """ + arguments_pre_parsed = self.pre_parse_json(arguments_to_validate) + arguments_parsed_model = self.arg_model.model_validate(arguments_pre_parsed) + arguments_parsed_dict = arguments_parsed_model.model_dump_one_level() + + arguments_parsed_dict |= arguments_to_pass_directly or {} + + if fn_is_async: + if isinstance(fn, Awaitable): + return await fn + return await fn(**arguments_parsed_dict) + if isinstance(fn, Callable): + return fn(**arguments_parsed_dict) + raise TypeError("fn must be either Callable or Awaitable") + + def pre_parse_json(self, data: dict[str, Any]) -> dict[str, Any]: + """Pre-parse data from JSON. + + Return a dict with same keys as input but with values parsed from JSON + if appropriate. + + This is to handle cases like `["a", "b", "c"]` being passed in as JSON inside + a string rather than an actual list. Claude desktop is prone to this - in fact + it seems incapable of NOT doing this. For sub-models, it tends to pass + dicts (JSON objects) as JSON strings, which can be pre-parsed here. + """ + new_data = data.copy() # Shallow copy + for field_name in self.arg_model.model_fields.keys(): + if field_name not in data.keys(): + continue + if isinstance(data[field_name], str): + try: + pre_parsed = json.loads(data[field_name]) + except json.JSONDecodeError: + continue # Not JSON - skip + if isinstance(pre_parsed, str | int | float): + # This is likely that the raw value is e.g. `"hello"` which we + # Should really be parsed as '"hello"' in Python - but if we parse + # it as JSON it'll turn into just 'hello'. So we skip it. + continue + new_data[field_name] = pre_parsed + assert new_data.keys() == data.keys() + return new_data + + model_config = ConfigDict( + arbitrary_types_allowed=True, + ) + + +def func_metadata( + func: Callable[..., Any], skip_names: Sequence[str] = () +) -> FuncMetadata: + """Given a function, return metadata including a pydantic model representing its + signature. + + The use case for this is + ``` + meta = func_to_pyd(func) + validated_args = meta.arg_model.model_validate(some_raw_data_dict) + return func(**validated_args.model_dump_one_level()) + ``` + + **critically** it also provides pre-parse helper to attempt to parse things from + JSON. + + Args: + func: The function to convert to a pydantic model + skip_names: A list of parameter names to skip. These will not be included in + the model. + Returns: + A pydantic model representing the function's signature. + """ + sig = _get_typed_signature(func) + params = sig.parameters + dynamic_pydantic_model_params: dict[str, Any] = {} + globalns = getattr(func, "__globals__", {}) + for param in params.values(): + if param.name.startswith("_"): + raise InvalidSignature( + f"Parameter {param.name} of {func.__name__} cannot start with '_'" + ) + if param.name in skip_names: + continue + annotation = param.annotation + + # `x: None` / `x: None = None` + if annotation is None: + annotation = Annotated[ + None, + Field( + default=param.default + if param.default is not inspect.Parameter.empty + else PydanticUndefined + ), + ] + + # Untyped field + if annotation is inspect.Parameter.empty: + annotation = Annotated[ + Any, + Field(), + # 🤷 + WithJsonSchema({"title": param.name, "type": "string"}), + ] + + field_info = FieldInfo.from_annotated_attribute( + _get_typed_annotation(annotation, globalns), + param.default + if param.default is not inspect.Parameter.empty + else PydanticUndefined, + ) + dynamic_pydantic_model_params[param.name] = (field_info.annotation, field_info) + continue + + arguments_model = create_model( + f"{func.__name__}Arguments", + **dynamic_pydantic_model_params, + __base__=ArgModelBase, + ) + resp = FuncMetadata(arg_model=arguments_model) + return resp + + +def _get_typed_annotation(annotation: Any, globalns: dict[str, Any]) -> Any: + def try_eval_type( + value: Any, globalns: dict[str, Any], localns: dict[str, Any] + ) -> tuple[Any, bool]: + try: + return eval_type_backport(value, globalns, localns), True + except NameError: + return value, False + + if isinstance(annotation, str): + annotation = ForwardRef(annotation) + annotation, status = try_eval_type(annotation, globalns, globalns) + + # This check and raise could perhaps be skipped, and we (FastMCP) just call + # model_rebuild right before using it 🤷 + if status is False: + raise InvalidSignature(f"Unable to evaluate type annotation {annotation}") + + return annotation + + +def _get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: + """Get function signature while evaluating forward references""" + signature = inspect.signature(call) + globalns = getattr(call, "__globals__", {}) + typed_params = [ + inspect.Parameter( + name=param.name, + kind=param.kind, + default=param.default, + annotation=_get_typed_annotation(param.annotation, globalns), + ) + for param in signature.parameters.values() + ] + typed_signature = inspect.Signature(typed_params) + return typed_signature diff --git a/src/mcp/server/fastmcp/utilities/logging.py b/src/mcp/server/fastmcp/utilities/logging.py index 091d57e69..e40bbd195 100644 --- a/src/mcp/server/fastmcp/utilities/logging.py +++ b/src/mcp/server/fastmcp/utilities/logging.py @@ -1,43 +1,43 @@ -"""Logging utilities for FastMCP.""" - -import logging -from typing import Literal - - -def get_logger(name: str) -> logging.Logger: - """Get a logger nested under MCPnamespace. - - Args: - name: the name of the logger, which will be prefixed with 'FastMCP.' - - Returns: - a configured logger instance - """ - return logging.getLogger(name) - - -def configure_logging( - level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", -) -> None: - """Configure logging for MCP. - - Args: - level: the log level to use - """ - handlers: list[logging.Handler] = [] - try: - from rich.console import Console - from rich.logging import RichHandler - - handlers.append(RichHandler(console=Console(stderr=True), rich_tracebacks=True)) - except ImportError: - pass - - if not handlers: - handlers.append(logging.StreamHandler()) - - logging.basicConfig( - level=level, - format="%(message)s", - handlers=handlers, - ) +"""Logging utilities for FastMCP.""" + +import logging +from typing import Literal + + +def get_logger(name: str) -> logging.Logger: + """Get a logger nested under MCPnamespace. + + Args: + name: the name of the logger, which will be prefixed with 'FastMCP.' + + Returns: + a configured logger instance + """ + return logging.getLogger(name) + + +def configure_logging( + level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", +) -> None: + """Configure logging for MCP. + + Args: + level: the log level to use + """ + handlers: list[logging.Handler] = [] + try: + from rich.console import Console + from rich.logging import RichHandler + + handlers.append(RichHandler(console=Console(stderr=True), rich_tracebacks=True)) + except ImportError: + pass + + if not handlers: + handlers.append(logging.StreamHandler()) + + logging.basicConfig( + level=level, + format="%(message)s", + handlers=handlers, + ) diff --git a/src/mcp/server/fastmcp/utilities/types.py b/src/mcp/server/fastmcp/utilities/types.py index ccaa3d69a..14a07fd6b 100644 --- a/src/mcp/server/fastmcp/utilities/types.py +++ b/src/mcp/server/fastmcp/utilities/types.py @@ -1,54 +1,54 @@ -"""Common types used across FastMCP.""" - -import base64 -from pathlib import Path - -from mcp.types import ImageContent - - -class Image: - """Helper class for returning images from tools.""" - - def __init__( - self, - path: str | Path | None = None, - data: bytes | None = None, - format: str | None = None, - ): - if path is None and data is None: - raise ValueError("Either path or data must be provided") - if path is not None and data is not None: - raise ValueError("Only one of path or data can be provided") - - self.path = Path(path) if path else None - self.data = data - self._format = format - self._mime_type = self._get_mime_type() - - def _get_mime_type(self) -> str: - """Get MIME type from format or guess from file extension.""" - if self._format: - return f"image/{self._format.lower()}" - - if self.path: - suffix = self.path.suffix.lower() - return { - ".png": "image/png", - ".jpg": "image/jpeg", - ".jpeg": "image/jpeg", - ".gif": "image/gif", - ".webp": "image/webp", - }.get(suffix, "application/octet-stream") - return "image/png" # default for raw binary data - - def to_image_content(self) -> ImageContent: - """Convert to MCP ImageContent.""" - if self.path: - with open(self.path, "rb") as f: - data = base64.b64encode(f.read()).decode() - elif self.data is not None: - data = base64.b64encode(self.data).decode() - else: - raise ValueError("No image data available") - - return ImageContent(type="image", data=data, mimeType=self._mime_type) +"""Common types used across FastMCP.""" + +import base64 +from pathlib import Path + +from mcp.types import ImageContent + + +class Image: + """Helper class for returning images from tools.""" + + def __init__( + self, + path: str | Path | None = None, + data: bytes | None = None, + format: str | None = None, + ): + if path is None and data is None: + raise ValueError("Either path or data must be provided") + if path is not None and data is not None: + raise ValueError("Only one of path or data can be provided") + + self.path = Path(path) if path else None + self.data = data + self._format = format + self._mime_type = self._get_mime_type() + + def _get_mime_type(self) -> str: + """Get MIME type from format or guess from file extension.""" + if self._format: + return f"image/{self._format.lower()}" + + if self.path: + suffix = self.path.suffix.lower() + return { + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".webp": "image/webp", + }.get(suffix, "application/octet-stream") + return "image/png" # default for raw binary data + + def to_image_content(self) -> ImageContent: + """Convert to MCP ImageContent.""" + if self.path: + with open(self.path, "rb") as f: + data = base64.b64encode(f.read()).decode() + elif self.data is not None: + data = base64.b64encode(self.data).decode() + else: + raise ValueError("No image data available") + + return ImageContent(type="image", data=data, mimeType=self._mime_type) diff --git a/src/mcp/server/lowlevel/__init__.py b/src/mcp/server/lowlevel/__init__.py index 66df38991..e540c21ea 100644 --- a/src/mcp/server/lowlevel/__init__.py +++ b/src/mcp/server/lowlevel/__init__.py @@ -1,3 +1,3 @@ -from .server import NotificationOptions, Server - -__all__ = ["Server", "NotificationOptions"] +from .server import NotificationOptions, Server + +__all__ = ["Server", "NotificationOptions"] diff --git a/src/mcp/server/lowlevel/helper_types.py b/src/mcp/server/lowlevel/helper_types.py index 3d09b2505..0a6b3fe0b 100644 --- a/src/mcp/server/lowlevel/helper_types.py +++ b/src/mcp/server/lowlevel/helper_types.py @@ -1,9 +1,9 @@ -from dataclasses import dataclass - - -@dataclass -class ReadResourceContents: - """Contents returned from a read_resource call.""" - - content: str | bytes - mime_type: str | None = None +from dataclasses import dataclass + + +@dataclass +class ReadResourceContents: + """Contents returned from a read_resource call.""" + + content: str | bytes + mime_type: str | None = None diff --git a/src/mcp/server/models.py b/src/mcp/server/models.py index 3b5abba78..990d0791e 100644 --- a/src/mcp/server/models.py +++ b/src/mcp/server/models.py @@ -1,17 +1,17 @@ -""" -This module provides simpler types to use with the server for managing prompts -and tools. -""" - -from pydantic import BaseModel - -from mcp.types import ( - ServerCapabilities, -) - - -class InitializationOptions(BaseModel): - server_name: str - server_version: str - capabilities: ServerCapabilities - instructions: str | None = None +""" +This module provides simpler types to use with the server for managing prompts +and tools. +""" + +from pydantic import BaseModel + +from mcp.types import ( + ServerCapabilities, +) + + +class InitializationOptions(BaseModel): + server_name: str + server_version: str + capabilities: ServerCapabilities + instructions: str | None = None diff --git a/src/mcp/server/stdio.py b/src/mcp/server/stdio.py index f0bbe5a31..bf6dc08f3 100644 --- a/src/mcp/server/stdio.py +++ b/src/mcp/server/stdio.py @@ -1,90 +1,90 @@ -""" -Stdio Server Transport Module - -This module provides functionality for creating an stdio-based transport layer -that can be used to communicate with an MCP client through standard input/output -streams. - -Example usage: -``` - async def run_server(): - async with stdio_server() as (read_stream, write_stream): - # read_stream contains incoming JSONRPCMessages from stdin - # write_stream allows sending JSONRPCMessages to stdout - server = await create_my_server() - await server.run(read_stream, write_stream, init_options) - - anyio.run(run_server) -``` -""" - -import sys -from contextlib import asynccontextmanager -from io import TextIOWrapper - -import anyio -import anyio.lowlevel -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - -import mcp.types as types -from mcp.shared.message import SessionMessage - - -@asynccontextmanager -async def stdio_server( - stdin: anyio.AsyncFile[str] | None = None, - stdout: anyio.AsyncFile[str] | None = None, -): - """ - Server transport for stdio: this communicates with an MCP client by reading - from the current process' stdin and writing to stdout. - """ - # Purposely not using context managers for these, as we don't want to close - # standard process handles. Encoding of stdin/stdout as text streams on - # python is platform-dependent (Windows is particularly problematic), so we - # re-wrap the underlying binary stream to ensure UTF-8. - if not stdin: - stdin = anyio.wrap_file(TextIOWrapper(sys.stdin.buffer, encoding="utf-8")) - if not stdout: - stdout = anyio.wrap_file(TextIOWrapper(sys.stdout.buffer, encoding="utf-8")) - - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) - - async def stdin_reader(): - try: - async with read_stream_writer: - async for line in stdin: - try: - message = types.JSONRPCMessage.model_validate_json(line) - except Exception as exc: - await read_stream_writer.send(exc) - continue - - session_message = SessionMessage(message) - await read_stream_writer.send(session_message) - except anyio.ClosedResourceError: - await anyio.lowlevel.checkpoint() - - async def stdout_writer(): - try: - async with write_stream_reader: - async for session_message in write_stream_reader: - json = session_message.message.model_dump_json( - by_alias=True, exclude_none=True - ) - await stdout.write(json + "\n") - await stdout.flush() - except anyio.ClosedResourceError: - await anyio.lowlevel.checkpoint() - - async with anyio.create_task_group() as tg: - tg.start_soon(stdin_reader) - tg.start_soon(stdout_writer) - yield read_stream, write_stream +""" +Stdio Server Transport Module + +This module provides functionality for creating an stdio-based transport layer +that can be used to communicate with an MCP client through standard input/output +streams. + +Example usage: +``` + async def run_server(): + async with stdio_server() as (read_stream, write_stream): + # read_stream contains incoming JSONRPCMessages from stdin + # write_stream allows sending JSONRPCMessages to stdout + server = await create_my_server() + await server.run(read_stream, write_stream, init_options) + + anyio.run(run_server) +``` +""" + +import sys +from contextlib import asynccontextmanager +from io import TextIOWrapper + +import anyio +import anyio.lowlevel +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +import mcp.types as types +from mcp.shared.message import SessionMessage + + +@asynccontextmanager +async def stdio_server( + stdin: anyio.AsyncFile[str] | None = None, + stdout: anyio.AsyncFile[str] | None = None, +): + """ + Server transport for stdio: this communicates with an MCP client by reading + from the current process' stdin and writing to stdout. + """ + # Purposely not using context managers for these, as we don't want to close + # standard process handles. Encoding of stdin/stdout as text streams on + # python is platform-dependent (Windows is particularly problematic), so we + # re-wrap the underlying binary stream to ensure UTF-8. + if not stdin: + stdin = anyio.wrap_file(TextIOWrapper(sys.stdin.buffer, encoding="utf-8")) + if not stdout: + stdout = anyio.wrap_file(TextIOWrapper(sys.stdout.buffer, encoding="utf-8")) + + read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] + read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] + + write_stream: MemoryObjectSendStream[SessionMessage] + write_stream_reader: MemoryObjectReceiveStream[SessionMessage] + + read_stream_writer, read_stream = anyio.create_memory_object_stream(0) + write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + + async def stdin_reader(): + try: + async with read_stream_writer: + async for line in stdin: + try: + message = types.JSONRPCMessage.model_validate_json(line) + except Exception as exc: + await read_stream_writer.send(exc) + continue + + session_message = SessionMessage(message) + await read_stream_writer.send(session_message) + except anyio.ClosedResourceError: + await anyio.lowlevel.checkpoint() + + async def stdout_writer(): + try: + async with write_stream_reader: + async for session_message in write_stream_reader: + json = session_message.message.model_dump_json( + by_alias=True, exclude_none=True + ) + await stdout.write(json + "\n") + await stdout.flush() + except anyio.ClosedResourceError: + await anyio.lowlevel.checkpoint() + + async with anyio.create_task_group() as tg: + tg.start_soon(stdin_reader) + tg.start_soon(stdout_writer) + yield read_stream, write_stream diff --git a/src/mcp/server/streaming_asgi_transport.py b/src/mcp/server/streaming_asgi_transport.py index 54a2fdb8c..5db21673e 100644 --- a/src/mcp/server/streaming_asgi_transport.py +++ b/src/mcp/server/streaming_asgi_transport.py @@ -1,213 +1,213 @@ -""" -A modified version of httpx.ASGITransport that supports streaming responses. - -This transport runs the ASGI app as a separate anyio task, allowing it to -handle streaming responses like SSE where the app doesn't terminate until -the connection is closed. - -This is only intended for writing tests for the SSE transport. -""" - -import typing -from typing import Any, cast - -import anyio -import anyio.abc -import anyio.streams.memory -from httpx._models import Request, Response -from httpx._transports.base import AsyncBaseTransport -from httpx._types import AsyncByteStream -from starlette.types import ASGIApp, Receive, Scope, Send - - -class StreamingASGITransport(AsyncBaseTransport): - """ - A custom AsyncTransport that handles sending requests directly to an ASGI app - and supports streaming responses like SSE. - - Unlike the standard ASGITransport, this transport runs the ASGI app in a - separate anyio task, allowing it to handle responses from apps that don't - terminate immediately (like SSE endpoints). - - Arguments: - - * `app` - The ASGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `root_path` - The root path on which the ASGI application should be mounted. - * `client` - A two-tuple indicating the client IP and port of incoming requests. - * `response_timeout` - Timeout in seconds to wait for the initial response. - Default is 10 seconds. - - TODO: https://github.com/encode/httpx/pull/3059 is adding something similar to - upstream httpx. When that merges, we should delete this & switch back to the - upstream implementation. - """ - - def __init__( - self, - app: ASGIApp, - task_group: anyio.abc.TaskGroup, - raise_app_exceptions: bool = True, - root_path: str = "", - client: tuple[str, int] = ("127.0.0.1", 123), - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.root_path = root_path - self.client = client - self.task_group = task_group - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - # ASGI scope. - scope = { - "type": "http", - "asgi": {"version": "3.0"}, - "http_version": "1.1", - "method": request.method, - "headers": [(k.lower(), v) for (k, v) in request.headers.raw], - "scheme": request.url.scheme, - "path": request.url.path, - "raw_path": request.url.raw_path.split(b"?")[0], - "query_string": request.url.query, - "server": (request.url.host, request.url.port), - "client": self.client, - "root_path": self.root_path, - } - - # Request body - request_body_chunks = request.stream.__aiter__() - request_complete = False - - # Response state - status_code = 499 - response_headers = None - response_started = False - response_complete = anyio.Event() - initial_response_ready = anyio.Event() - - # Synchronization for streaming response - asgi_send_channel, asgi_receive_channel = anyio.create_memory_object_stream[ - dict[str, Any] - ](100) - content_send_channel, content_receive_channel = ( - anyio.create_memory_object_stream[bytes](100) - ) - - # ASGI callables. - async def receive() -> dict[str, Any]: - nonlocal request_complete - - if request_complete: - await response_complete.wait() - return {"type": "http.disconnect"} - - try: - body = await request_body_chunks.__anext__() - except StopAsyncIteration: - request_complete = True - return {"type": "http.request", "body": b"", "more_body": False} - return {"type": "http.request", "body": body, "more_body": True} - - async def send(message: dict[str, Any]) -> None: - nonlocal status_code, response_headers, response_started - - await asgi_send_channel.send(message) - - # Start the ASGI application in a separate task - async def run_app() -> None: - try: - # Cast the receive and send functions to the ASGI types - await self.app( - cast(Scope, scope), cast(Receive, receive), cast(Send, send) - ) - except Exception: - if self.raise_app_exceptions: - raise - - if not response_started: - await asgi_send_channel.send( - {"type": "http.response.start", "status": 500, "headers": []} - ) - - await asgi_send_channel.send( - {"type": "http.response.body", "body": b"", "more_body": False} - ) - finally: - await asgi_send_channel.aclose() - - # Process messages from the ASGI app - async def process_messages() -> None: - nonlocal status_code, response_headers, response_started - - try: - async with asgi_receive_channel: - async for message in asgi_receive_channel: - if message["type"] == "http.response.start": - assert not response_started - status_code = message["status"] - response_headers = message.get("headers", []) - response_started = True - - # As soon as we have headers, we can return a response - initial_response_ready.set() - - elif message["type"] == "http.response.body": - body = message.get("body", b"") - more_body = message.get("more_body", False) - - if body and request.method != "HEAD": - await content_send_channel.send(body) - - if not more_body: - response_complete.set() - await content_send_channel.aclose() - break - finally: - # Ensure events are set even if there's an error - initial_response_ready.set() - response_complete.set() - await content_send_channel.aclose() - - # Create tasks for running the app and processing messages - self.task_group.start_soon(run_app) - self.task_group.start_soon(process_messages) - - # Wait for the initial response or timeout - await initial_response_ready.wait() - - # Create a streaming response - return Response( - status_code, - headers=response_headers, - stream=StreamingASGIResponseStream(content_receive_channel), - ) - - -class StreamingASGIResponseStream(AsyncByteStream): - """ - A modified ASGIResponseStream that supports streaming responses. - - This class extends the standard ASGIResponseStream to handle cases where - the response body continues to be generated after the initial response - is returned. - """ - - def __init__( - self, - receive_channel: anyio.streams.memory.MemoryObjectReceiveStream[bytes], - ) -> None: - self.receive_channel = receive_channel - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - try: - async for chunk in self.receive_channel: - yield chunk - finally: - await self.receive_channel.aclose() +""" +A modified version of httpx.ASGITransport that supports streaming responses. + +This transport runs the ASGI app as a separate anyio task, allowing it to +handle streaming responses like SSE where the app doesn't terminate until +the connection is closed. + +This is only intended for writing tests for the SSE transport. +""" + +import typing +from typing import Any, cast + +import anyio +import anyio.abc +import anyio.streams.memory +from httpx._models import Request, Response +from httpx._transports.base import AsyncBaseTransport +from httpx._types import AsyncByteStream +from starlette.types import ASGIApp, Receive, Scope, Send + + +class StreamingASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app + and supports streaming responses like SSE. + + Unlike the standard ASGITransport, this transport runs the ASGI app in a + separate anyio task, allowing it to handle responses from apps that don't + terminate immediately (like SSE endpoints). + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + * `response_timeout` - Timeout in seconds to wait for the initial response. + Default is 10 seconds. + + TODO: https://github.com/encode/httpx/pull/3059 is adding something similar to + upstream httpx. When that merges, we should delete this & switch back to the + upstream implementation. + """ + + def __init__( + self, + app: ASGIApp, + task_group: anyio.abc.TaskGroup, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + self.task_group = task_group + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request body + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response state + status_code = 499 + response_headers = None + response_started = False + response_complete = anyio.Event() + initial_response_ready = anyio.Event() + + # Synchronization for streaming response + asgi_send_channel, asgi_receive_channel = anyio.create_memory_object_stream[ + dict[str, Any] + ](100) + content_send_channel, content_receive_channel = ( + anyio.create_memory_object_stream[bytes](100) + ) + + # ASGI callables. + async def receive() -> dict[str, Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: dict[str, Any]) -> None: + nonlocal status_code, response_headers, response_started + + await asgi_send_channel.send(message) + + # Start the ASGI application in a separate task + async def run_app() -> None: + try: + # Cast the receive and send functions to the ASGI types + await self.app( + cast(Scope, scope), cast(Receive, receive), cast(Send, send) + ) + except Exception: + if self.raise_app_exceptions: + raise + + if not response_started: + await asgi_send_channel.send( + {"type": "http.response.start", "status": 500, "headers": []} + ) + + await asgi_send_channel.send( + {"type": "http.response.body", "body": b"", "more_body": False} + ) + finally: + await asgi_send_channel.aclose() + + # Process messages from the ASGI app + async def process_messages() -> None: + nonlocal status_code, response_headers, response_started + + try: + async with asgi_receive_channel: + async for message in asgi_receive_channel: + if message["type"] == "http.response.start": + assert not response_started + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + # As soon as we have headers, we can return a response + initial_response_ready.set() + + elif message["type"] == "http.response.body": + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + await content_send_channel.send(body) + + if not more_body: + response_complete.set() + await content_send_channel.aclose() + break + finally: + # Ensure events are set even if there's an error + initial_response_ready.set() + response_complete.set() + await content_send_channel.aclose() + + # Create tasks for running the app and processing messages + self.task_group.start_soon(run_app) + self.task_group.start_soon(process_messages) + + # Wait for the initial response or timeout + await initial_response_ready.wait() + + # Create a streaming response + return Response( + status_code, + headers=response_headers, + stream=StreamingASGIResponseStream(content_receive_channel), + ) + + +class StreamingASGIResponseStream(AsyncByteStream): + """ + A modified ASGIResponseStream that supports streaming responses. + + This class extends the standard ASGIResponseStream to handle cases where + the response body continues to be generated after the initial response + is returned. + """ + + def __init__( + self, + receive_channel: anyio.streams.memory.MemoryObjectReceiveStream[bytes], + ) -> None: + self.receive_channel = receive_channel + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + try: + async for chunk in self.receive_channel: + yield chunk + finally: + await self.receive_channel.aclose() diff --git a/src/mcp/server/websocket.py b/src/mcp/server/websocket.py index 9dc3f2a25..907e2280f 100644 --- a/src/mcp/server/websocket.py +++ b/src/mcp/server/websocket.py @@ -1,64 +1,64 @@ -import logging -from contextlib import asynccontextmanager - -import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from pydantic_core import ValidationError -from starlette.types import Receive, Scope, Send -from starlette.websockets import WebSocket - -import mcp.types as types -from mcp.shared.message import SessionMessage - -logger = logging.getLogger(__name__) - - -@asynccontextmanager -async def websocket_server(scope: Scope, receive: Receive, send: Send): - """ - WebSocket server transport for MCP. This is an ASGI application, suitable to be - used with a framework like Starlette and a server like Hypercorn. - """ - - websocket = WebSocket(scope, receive, send) - await websocket.accept(subprotocol="mcp") - - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) - - async def ws_reader(): - try: - async with read_stream_writer: - async for msg in websocket.iter_text(): - try: - client_message = types.JSONRPCMessage.model_validate_json(msg) - except ValidationError as exc: - await read_stream_writer.send(exc) - continue - - session_message = SessionMessage(client_message) - await read_stream_writer.send(session_message) - except anyio.ClosedResourceError: - await websocket.close() - - async def ws_writer(): - try: - async with write_stream_reader: - async for session_message in write_stream_reader: - obj = session_message.message.model_dump_json( - by_alias=True, exclude_none=True - ) - await websocket.send_text(obj) - except anyio.ClosedResourceError: - await websocket.close() - - async with anyio.create_task_group() as tg: - tg.start_soon(ws_reader) - tg.start_soon(ws_writer) - yield (read_stream, write_stream) +import logging +from contextlib import asynccontextmanager + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from pydantic_core import ValidationError +from starlette.types import Receive, Scope, Send +from starlette.websockets import WebSocket + +import mcp.types as types +from mcp.shared.message import SessionMessage + +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def websocket_server(scope: Scope, receive: Receive, send: Send): + """ + WebSocket server transport for MCP. This is an ASGI application, suitable to be + used with a framework like Starlette and a server like Hypercorn. + """ + + websocket = WebSocket(scope, receive, send) + await websocket.accept(subprotocol="mcp") + + read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] + read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] + + write_stream: MemoryObjectSendStream[SessionMessage] + write_stream_reader: MemoryObjectReceiveStream[SessionMessage] + + read_stream_writer, read_stream = anyio.create_memory_object_stream(0) + write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + + async def ws_reader(): + try: + async with read_stream_writer: + async for msg in websocket.iter_text(): + try: + client_message = types.JSONRPCMessage.model_validate_json(msg) + except ValidationError as exc: + await read_stream_writer.send(exc) + continue + + session_message = SessionMessage(client_message) + await read_stream_writer.send(session_message) + except anyio.ClosedResourceError: + await websocket.close() + + async def ws_writer(): + try: + async with write_stream_reader: + async for session_message in write_stream_reader: + obj = session_message.message.model_dump_json( + by_alias=True, exclude_none=True + ) + await websocket.send_text(obj) + except anyio.ClosedResourceError: + await websocket.close() + + async with anyio.create_task_group() as tg: + tg.start_soon(ws_reader) + tg.start_soon(ws_writer) + yield (read_stream, write_stream) diff --git a/src/mcp/shared/auth.py b/src/mcp/shared/auth.py index 22f8a971d..fed22c4dc 100644 --- a/src/mcp/shared/auth.py +++ b/src/mcp/shared/auth.py @@ -1,137 +1,137 @@ -from typing import Any, Literal - -from pydantic import AnyHttpUrl, BaseModel, Field - - -class OAuthToken(BaseModel): - """ - See https://datatracker.ietf.org/doc/html/rfc6749#section-5.1 - """ - - access_token: str - token_type: Literal["bearer"] = "bearer" - expires_in: int | None = None - scope: str | None = None - refresh_token: str | None = None - - -class InvalidScopeError(Exception): - def __init__(self, message: str): - self.message = message - - -class InvalidRedirectUriError(Exception): - def __init__(self, message: str): - self.message = message - - -class OAuthClientMetadata(BaseModel): - """ - RFC 7591 OAuth 2.0 Dynamic Client Registration metadata. - See https://datatracker.ietf.org/doc/html/rfc7591#section-2 - for the full specification. - """ - - redirect_uris: list[AnyHttpUrl] = Field(..., min_length=1) - # token_endpoint_auth_method: this implementation only supports none & - # client_secret_post; - # ie: we do not support client_secret_basic - token_endpoint_auth_method: Literal["none", "client_secret_post"] = ( - "client_secret_post" - ) - # grant_types: this implementation only supports authorization_code & refresh_token - grant_types: list[Literal["authorization_code", "refresh_token"]] = [ - "authorization_code", - "refresh_token", - ] - # this implementation only supports code; ie: it does not support implicit grants - response_types: list[Literal["code"]] = ["code"] - scope: str | None = None - - # these fields are currently unused, but we support & store them for potential - # future use - client_name: str | None = None - client_uri: AnyHttpUrl | None = None - logo_uri: AnyHttpUrl | None = None - contacts: list[str] | None = None - tos_uri: AnyHttpUrl | None = None - policy_uri: AnyHttpUrl | None = None - jwks_uri: AnyHttpUrl | None = None - jwks: Any | None = None - software_id: str | None = None - software_version: str | None = None - - def validate_scope(self, requested_scope: str | None) -> list[str] | None: - if requested_scope is None: - return None - requested_scopes = requested_scope.split(" ") - allowed_scopes = [] if self.scope is None else self.scope.split(" ") - for scope in requested_scopes: - if scope not in allowed_scopes: - raise InvalidScopeError(f"Client was not registered with scope {scope}") - return requested_scopes - - def validate_redirect_uri(self, redirect_uri: AnyHttpUrl | None) -> AnyHttpUrl: - if redirect_uri is not None: - # Validate redirect_uri against client's registered redirect URIs - if redirect_uri not in self.redirect_uris: - raise InvalidRedirectUriError( - f"Redirect URI '{redirect_uri}' not registered for client" - ) - return redirect_uri - elif len(self.redirect_uris) == 1: - return self.redirect_uris[0] - else: - raise InvalidRedirectUriError( - "redirect_uri must be specified when client " - "has multiple registered URIs" - ) - - -class OAuthClientInformationFull(OAuthClientMetadata): - """ - RFC 7591 OAuth 2.0 Dynamic Client Registration full response - (client information plus metadata). - """ - - client_id: str - client_secret: str | None = None - client_id_issued_at: int | None = None - client_secret_expires_at: int | None = None - - -class OAuthMetadata(BaseModel): - """ - RFC 8414 OAuth 2.0 Authorization Server Metadata. - See https://datatracker.ietf.org/doc/html/rfc8414#section-2 - """ - - issuer: AnyHttpUrl - authorization_endpoint: AnyHttpUrl - token_endpoint: AnyHttpUrl - registration_endpoint: AnyHttpUrl | None = None - scopes_supported: list[str] | None = None - response_types_supported: list[Literal["code"]] = ["code"] - response_modes_supported: list[Literal["query", "fragment"]] | None = None - grant_types_supported: ( - list[Literal["authorization_code", "refresh_token"]] | None - ) = None - token_endpoint_auth_methods_supported: ( - list[Literal["none", "client_secret_post"]] | None - ) = None - token_endpoint_auth_signing_alg_values_supported: None = None - service_documentation: AnyHttpUrl | None = None - ui_locales_supported: list[str] | None = None - op_policy_uri: AnyHttpUrl | None = None - op_tos_uri: AnyHttpUrl | None = None - revocation_endpoint: AnyHttpUrl | None = None - revocation_endpoint_auth_methods_supported: ( - list[Literal["client_secret_post"]] | None - ) = None - revocation_endpoint_auth_signing_alg_values_supported: None = None - introspection_endpoint: AnyHttpUrl | None = None - introspection_endpoint_auth_methods_supported: ( - list[Literal["client_secret_post"]] | None - ) = None - introspection_endpoint_auth_signing_alg_values_supported: None = None - code_challenge_methods_supported: list[Literal["S256"]] | None = None +from typing import Any, Literal + +from pydantic import AnyHttpUrl, BaseModel, Field + + +class OAuthToken(BaseModel): + """ + See https://datatracker.ietf.org/doc/html/rfc6749#section-5.1 + """ + + access_token: str + token_type: Literal["bearer"] = "bearer" + expires_in: int | None = None + scope: str | None = None + refresh_token: str | None = None + + +class InvalidScopeError(Exception): + def __init__(self, message: str): + self.message = message + + +class InvalidRedirectUriError(Exception): + def __init__(self, message: str): + self.message = message + + +class OAuthClientMetadata(BaseModel): + """ + RFC 7591 OAuth 2.0 Dynamic Client Registration metadata. + See https://datatracker.ietf.org/doc/html/rfc7591#section-2 + for the full specification. + """ + + redirect_uris: list[AnyHttpUrl] = Field(..., min_length=1) + # token_endpoint_auth_method: this implementation only supports none & + # client_secret_post; + # ie: we do not support client_secret_basic + token_endpoint_auth_method: Literal["none", "client_secret_post"] = ( + "client_secret_post" + ) + # grant_types: this implementation only supports authorization_code & refresh_token + grant_types: list[Literal["authorization_code", "refresh_token"]] = [ + "authorization_code", + "refresh_token", + ] + # this implementation only supports code; ie: it does not support implicit grants + response_types: list[Literal["code"]] = ["code"] + scope: str | None = None + + # these fields are currently unused, but we support & store them for potential + # future use + client_name: str | None = None + client_uri: AnyHttpUrl | None = None + logo_uri: AnyHttpUrl | None = None + contacts: list[str] | None = None + tos_uri: AnyHttpUrl | None = None + policy_uri: AnyHttpUrl | None = None + jwks_uri: AnyHttpUrl | None = None + jwks: Any | None = None + software_id: str | None = None + software_version: str | None = None + + def validate_scope(self, requested_scope: str | None) -> list[str] | None: + if requested_scope is None: + return None + requested_scopes = requested_scope.split(" ") + allowed_scopes = [] if self.scope is None else self.scope.split(" ") + for scope in requested_scopes: + if scope not in allowed_scopes: + raise InvalidScopeError(f"Client was not registered with scope {scope}") + return requested_scopes + + def validate_redirect_uri(self, redirect_uri: AnyHttpUrl | None) -> AnyHttpUrl: + if redirect_uri is not None: + # Validate redirect_uri against client's registered redirect URIs + if redirect_uri not in self.redirect_uris: + raise InvalidRedirectUriError( + f"Redirect URI '{redirect_uri}' not registered for client" + ) + return redirect_uri + elif len(self.redirect_uris) == 1: + return self.redirect_uris[0] + else: + raise InvalidRedirectUriError( + "redirect_uri must be specified when client " + "has multiple registered URIs" + ) + + +class OAuthClientInformationFull(OAuthClientMetadata): + """ + RFC 7591 OAuth 2.0 Dynamic Client Registration full response + (client information plus metadata). + """ + + client_id: str + client_secret: str | None = None + client_id_issued_at: int | None = None + client_secret_expires_at: int | None = None + + +class OAuthMetadata(BaseModel): + """ + RFC 8414 OAuth 2.0 Authorization Server Metadata. + See https://datatracker.ietf.org/doc/html/rfc8414#section-2 + """ + + issuer: AnyHttpUrl + authorization_endpoint: AnyHttpUrl + token_endpoint: AnyHttpUrl + registration_endpoint: AnyHttpUrl | None = None + scopes_supported: list[str] | None = None + response_types_supported: list[Literal["code"]] = ["code"] + response_modes_supported: list[Literal["query", "fragment"]] | None = None + grant_types_supported: ( + list[Literal["authorization_code", "refresh_token"]] | None + ) = None + token_endpoint_auth_methods_supported: ( + list[Literal["none", "client_secret_post"]] | None + ) = None + token_endpoint_auth_signing_alg_values_supported: None = None + service_documentation: AnyHttpUrl | None = None + ui_locales_supported: list[str] | None = None + op_policy_uri: AnyHttpUrl | None = None + op_tos_uri: AnyHttpUrl | None = None + revocation_endpoint: AnyHttpUrl | None = None + revocation_endpoint_auth_methods_supported: ( + list[Literal["client_secret_post"]] | None + ) = None + revocation_endpoint_auth_signing_alg_values_supported: None = None + introspection_endpoint: AnyHttpUrl | None = None + introspection_endpoint_auth_methods_supported: ( + list[Literal["client_secret_post"]] | None + ) = None + introspection_endpoint_auth_signing_alg_values_supported: None = None + code_challenge_methods_supported: list[Literal["S256"]] | None = None diff --git a/src/mcp/shared/context.py b/src/mcp/shared/context.py index ae85d3a19..24fcae31c 100644 --- a/src/mcp/shared/context.py +++ b/src/mcp/shared/context.py @@ -1,18 +1,18 @@ -from dataclasses import dataclass -from typing import Any, Generic - -from typing_extensions import TypeVar - -from mcp.shared.session import BaseSession -from mcp.types import RequestId, RequestParams - -SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any]) -LifespanContextT = TypeVar("LifespanContextT") - - -@dataclass -class RequestContext(Generic[SessionT, LifespanContextT]): - request_id: RequestId - meta: RequestParams.Meta | None - session: SessionT - lifespan_context: LifespanContextT +from dataclasses import dataclass +from typing import Any, Generic + +from typing_extensions import TypeVar + +from mcp.shared.session import BaseSession +from mcp.types import RequestId, RequestParams + +SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any]) +LifespanContextT = TypeVar("LifespanContextT") + + +@dataclass +class RequestContext(Generic[SessionT, LifespanContextT]): + request_id: RequestId + meta: RequestParams.Meta | None + session: SessionT + lifespan_context: LifespanContextT diff --git a/src/mcp/shared/exceptions.py b/src/mcp/shared/exceptions.py index 97a1c09a9..6aec32cd3 100644 --- a/src/mcp/shared/exceptions.py +++ b/src/mcp/shared/exceptions.py @@ -1,14 +1,14 @@ -from mcp.types import ErrorData - - -class McpError(Exception): - """ - Exception type raised when an error arrives over an MCP connection. - """ - - error: ErrorData - - def __init__(self, error: ErrorData): - """Initialize McpError.""" - super().__init__(error.message) - self.error = error +from mcp.types import ErrorData + + +class McpError(Exception): + """ + Exception type raised when an error arrives over an MCP connection. + """ + + error: ErrorData + + def __init__(self, error: ErrorData): + """Initialize McpError.""" + super().__init__(error.message) + self.error = error diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index b53f8dd63..8da81e147 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -1,105 +1,105 @@ -""" -In-memory transports -""" - -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager -from datetime import timedelta -from typing import Any - -import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - -import mcp.types as types -from mcp.client.session import ( - ClientSession, - ListRootsFnT, - LoggingFnT, - MessageHandlerFnT, - SamplingFnT, -) -from mcp.server import Server -from mcp.shared.message import SessionMessage - -MessageStream = tuple[ - MemoryObjectReceiveStream[SessionMessage | Exception], - MemoryObjectSendStream[SessionMessage], -] - - -@asynccontextmanager -async def create_client_server_memory_streams() -> ( - AsyncGenerator[tuple[MessageStream, MessageStream], None] -): - """ - Creates a pair of bidirectional memory streams for client-server communication. - - Returns: - A tuple of (client_streams, server_streams) where each is a tuple of - (read_stream, write_stream) - """ - # Create streams for both directions - server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[ - SessionMessage | Exception - ](1) - client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[ - SessionMessage | Exception - ](1) - - client_streams = (server_to_client_receive, client_to_server_send) - server_streams = (client_to_server_receive, server_to_client_send) - - async with ( - server_to_client_receive, - client_to_server_send, - client_to_server_receive, - server_to_client_send, - ): - yield client_streams, server_streams - - -@asynccontextmanager -async def create_connected_server_and_client_session( - server: Server[Any], - read_timeout_seconds: timedelta | None = None, - sampling_callback: SamplingFnT | None = None, - list_roots_callback: ListRootsFnT | None = None, - logging_callback: LoggingFnT | None = None, - message_handler: MessageHandlerFnT | None = None, - client_info: types.Implementation | None = None, - raise_exceptions: bool = False, -) -> AsyncGenerator[ClientSession, None]: - """Creates a ClientSession that is connected to a running MCP server.""" - async with create_client_server_memory_streams() as ( - client_streams, - server_streams, - ): - client_read, client_write = client_streams - server_read, server_write = server_streams - - # Create a cancel scope for the server task - async with anyio.create_task_group() as tg: - tg.start_soon( - lambda: server.run( - server_read, - server_write, - server.create_initialization_options(), - raise_exceptions=raise_exceptions, - ) - ) - - try: - async with ClientSession( - read_stream=client_read, - write_stream=client_write, - read_timeout_seconds=read_timeout_seconds, - sampling_callback=sampling_callback, - list_roots_callback=list_roots_callback, - logging_callback=logging_callback, - message_handler=message_handler, - client_info=client_info, - ) as client_session: - await client_session.initialize() - yield client_session - finally: - tg.cancel_scope.cancel() +""" +In-memory transports +""" + +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager +from datetime import timedelta +from typing import Any + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +import mcp.types as types +from mcp.client.session import ( + ClientSession, + ListRootsFnT, + LoggingFnT, + MessageHandlerFnT, + SamplingFnT, +) +from mcp.server import Server +from mcp.shared.message import SessionMessage + +MessageStream = tuple[ + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], +] + + +@asynccontextmanager +async def create_client_server_memory_streams() -> ( + AsyncGenerator[tuple[MessageStream, MessageStream], None] +): + """ + Creates a pair of bidirectional memory streams for client-server communication. + + Returns: + A tuple of (client_streams, server_streams) where each is a tuple of + (read_stream, write_stream) + """ + # Create streams for both directions + server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[ + SessionMessage | Exception + ](1) + client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[ + SessionMessage | Exception + ](1) + + client_streams = (server_to_client_receive, client_to_server_send) + server_streams = (client_to_server_receive, server_to_client_send) + + async with ( + server_to_client_receive, + client_to_server_send, + client_to_server_receive, + server_to_client_send, + ): + yield client_streams, server_streams + + +@asynccontextmanager +async def create_connected_server_and_client_session( + server: Server[Any], + read_timeout_seconds: timedelta | None = None, + sampling_callback: SamplingFnT | None = None, + list_roots_callback: ListRootsFnT | None = None, + logging_callback: LoggingFnT | None = None, + message_handler: MessageHandlerFnT | None = None, + client_info: types.Implementation | None = None, + raise_exceptions: bool = False, +) -> AsyncGenerator[ClientSession, None]: + """Creates a ClientSession that is connected to a running MCP server.""" + async with create_client_server_memory_streams() as ( + client_streams, + server_streams, + ): + client_read, client_write = client_streams + server_read, server_write = server_streams + + # Create a cancel scope for the server task + async with anyio.create_task_group() as tg: + tg.start_soon( + lambda: server.run( + server_read, + server_write, + server.create_initialization_options(), + raise_exceptions=raise_exceptions, + ) + ) + + try: + async with ClientSession( + read_stream=client_read, + write_stream=client_write, + read_timeout_seconds=read_timeout_seconds, + sampling_callback=sampling_callback, + list_roots_callback=list_roots_callback, + logging_callback=logging_callback, + message_handler=message_handler, + client_info=client_info, + ) as client_session: + await client_session.initialize() + yield client_session + finally: + tg.cancel_scope.cancel() diff --git a/src/mcp/shared/message.py b/src/mcp/shared/message.py index 5583f4795..c4c70831f 100644 --- a/src/mcp/shared/message.py +++ b/src/mcp/shared/message.py @@ -1,43 +1,43 @@ -""" -Message wrapper with metadata support. - -This module defines a wrapper type that combines JSONRPCMessage with metadata -to support transport-specific features like resumability. -""" - -from collections.abc import Awaitable, Callable -from dataclasses import dataclass - -from mcp.types import JSONRPCMessage, RequestId - -ResumptionToken = str - -ResumptionTokenUpdateCallback = Callable[[ResumptionToken], Awaitable[None]] - - -@dataclass -class ClientMessageMetadata: - """Metadata specific to client messages.""" - - resumption_token: ResumptionToken | None = None - on_resumption_token_update: Callable[[ResumptionToken], Awaitable[None]] | None = ( - None - ) - - -@dataclass -class ServerMessageMetadata: - """Metadata specific to server messages.""" - - related_request_id: RequestId | None = None - - -MessageMetadata = ClientMessageMetadata | ServerMessageMetadata | None - - -@dataclass -class SessionMessage: - """A message with specific metadata for transport-specific features.""" - - message: JSONRPCMessage - metadata: MessageMetadata = None +""" +Message wrapper with metadata support. + +This module defines a wrapper type that combines JSONRPCMessage with metadata +to support transport-specific features like resumability. +""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from mcp.types import JSONRPCMessage, RequestId + +ResumptionToken = str + +ResumptionTokenUpdateCallback = Callable[[ResumptionToken], Awaitable[None]] + + +@dataclass +class ClientMessageMetadata: + """Metadata specific to client messages.""" + + resumption_token: ResumptionToken | None = None + on_resumption_token_update: Callable[[ResumptionToken], Awaitable[None]] | None = ( + None + ) + + +@dataclass +class ServerMessageMetadata: + """Metadata specific to server messages.""" + + related_request_id: RequestId | None = None + + +MessageMetadata = ClientMessageMetadata | ServerMessageMetadata | None + + +@dataclass +class SessionMessage: + """A message with specific metadata for transport-specific features.""" + + message: JSONRPCMessage + metadata: MessageMetadata = None diff --git a/tests/client/test_list_roots_callback.py b/tests/client/test_list_roots_callback.py index f5b598218..defe8f5a7 100644 --- a/tests/client/test_list_roots_callback.py +++ b/tests/client/test_list_roots_callback.py @@ -1,66 +1,66 @@ -import pytest -from pydantic import FileUrl - -from mcp.client.session import ClientSession -from mcp.server.fastmcp.server import Context -from mcp.shared.context import RequestContext -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) -from mcp.types import ListRootsResult, Root, TextContent - - -@pytest.mark.anyio -async def test_list_roots_callback(): - from mcp.server.fastmcp import FastMCP - - server = FastMCP("test") - - callback_return = ListRootsResult( - roots=[ - Root( - uri=FileUrl("file://users/fake/test"), - name="Test Root 1", - ), - Root( - uri=FileUrl("file://users/fake/test/2"), - name="Test Root 2", - ), - ] - ) - - async def list_roots_callback( - context: RequestContext[ClientSession, None], - ) -> ListRootsResult: - return callback_return - - @server.tool("test_list_roots") - async def test_list_roots(context: Context, message: str): # type: ignore[reportUnknownMemberType] - roots = await context.session.list_roots() - assert roots == callback_return - return True - - # Test with list_roots callback - async with create_session( - server._mcp_server, list_roots_callback=list_roots_callback - ) as client_session: - # Make a request to trigger sampling callback - result = await client_session.call_tool( - "test_list_roots", {"message": "test message"} - ) - assert result.isError is False - assert isinstance(result.content[0], TextContent) - assert result.content[0].text == "true" - - # Test without list_roots callback - async with create_session(server._mcp_server) as client_session: - # Make a request to trigger sampling callback - result = await client_session.call_tool( - "test_list_roots", {"message": "test message"} - ) - assert result.isError is True - assert isinstance(result.content[0], TextContent) - assert ( - result.content[0].text - == "Error executing tool test_list_roots: List roots not supported" - ) +import pytest +from pydantic import FileUrl + +from mcp.client.session import ClientSession +from mcp.server.fastmcp.server import Context +from mcp.shared.context import RequestContext +from mcp.shared.memory import ( + create_connected_server_and_client_session as create_session, +) +from mcp.types import ListRootsResult, Root, TextContent + + +@pytest.mark.anyio +async def test_list_roots_callback(): + from mcp.server.fastmcp import FastMCP + + server = FastMCP("test") + + callback_return = ListRootsResult( + roots=[ + Root( + uri=FileUrl("file://users/fake/test"), + name="Test Root 1", + ), + Root( + uri=FileUrl("file://users/fake/test/2"), + name="Test Root 2", + ), + ] + ) + + async def list_roots_callback( + context: RequestContext[ClientSession, None], + ) -> ListRootsResult: + return callback_return + + @server.tool("test_list_roots") + async def test_list_roots(context: Context, message: str): # type: ignore[reportUnknownMemberType] + roots = await context.session.list_roots() + assert roots == callback_return + return True + + # Test with list_roots callback + async with create_session( + server._mcp_server, list_roots_callback=list_roots_callback + ) as client_session: + # Make a request to trigger sampling callback + result = await client_session.call_tool( + "test_list_roots", {"message": "test message"} + ) + assert result.isError is False + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "true" + + # Test without list_roots callback + async with create_session(server._mcp_server) as client_session: + # Make a request to trigger sampling callback + result = await client_session.call_tool( + "test_list_roots", {"message": "test message"} + ) + assert result.isError is True + assert isinstance(result.content[0], TextContent) + assert ( + result.content[0].text + == "Error executing tool test_list_roots: List roots not supported" + ) diff --git a/tests/client/test_logging_callback.py b/tests/client/test_logging_callback.py index 0c9eeb397..da51f67ba 100644 --- a/tests/client/test_logging_callback.py +++ b/tests/client/test_logging_callback.py @@ -1,85 +1,85 @@ -from typing import Literal - -import pytest - -import mcp.types as types -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) -from mcp.shared.session import RequestResponder -from mcp.types import ( - LoggingMessageNotificationParams, - TextContent, -) - - -class LoggingCollector: - def __init__(self): - self.log_messages: list[LoggingMessageNotificationParams] = [] - - async def __call__(self, params: LoggingMessageNotificationParams) -> None: - self.log_messages.append(params) - - -@pytest.mark.anyio -async def test_logging_callback(): - from mcp.server.fastmcp import FastMCP - - server = FastMCP("test") - logging_collector = LoggingCollector() - - # Create a simple test tool - @server.tool("test_tool") - async def test_tool() -> bool: - # The actual tool is very simple and just returns True - return True - - # Create a function that can send a log notification - @server.tool("test_tool_with_log") - async def test_tool_with_log( - message: str, level: Literal["debug", "info", "warning", "error"], logger: str - ) -> bool: - """Send a log notification to the client.""" - await server.get_context().log( - level=level, - message=message, - logger_name=logger, - ) - return True - - # Create a message handler to catch exceptions - async def message_handler( - message: RequestResponder[types.ServerRequest, types.ClientResult] - | types.ServerNotification - | Exception, - ) -> None: - if isinstance(message, Exception): - raise message - - async with create_session( - server._mcp_server, - logging_callback=logging_collector, - message_handler=message_handler, - ) as client_session: - # First verify our test tool works - result = await client_session.call_tool("test_tool", {}) - assert result.isError is False - assert isinstance(result.content[0], TextContent) - assert result.content[0].text == "true" - - # Now send a log message via our tool - log_result = await client_session.call_tool( - "test_tool_with_log", - { - "message": "Test log message", - "level": "info", - "logger": "test_logger", - }, - ) - assert log_result.isError is False - assert len(logging_collector.log_messages) == 1 - # Create meta object with related_request_id added dynamically - log = logging_collector.log_messages[0] - assert log.level == "info" - assert log.logger == "test_logger" - assert log.data == "Test log message" +from typing import Literal + +import pytest + +import mcp.types as types +from mcp.shared.memory import ( + create_connected_server_and_client_session as create_session, +) +from mcp.shared.session import RequestResponder +from mcp.types import ( + LoggingMessageNotificationParams, + TextContent, +) + + +class LoggingCollector: + def __init__(self): + self.log_messages: list[LoggingMessageNotificationParams] = [] + + async def __call__(self, params: LoggingMessageNotificationParams) -> None: + self.log_messages.append(params) + + +@pytest.mark.anyio +async def test_logging_callback(): + from mcp.server.fastmcp import FastMCP + + server = FastMCP("test") + logging_collector = LoggingCollector() + + # Create a simple test tool + @server.tool("test_tool") + async def test_tool() -> bool: + # The actual tool is very simple and just returns True + return True + + # Create a function that can send a log notification + @server.tool("test_tool_with_log") + async def test_tool_with_log( + message: str, level: Literal["debug", "info", "warning", "error"], logger: str + ) -> bool: + """Send a log notification to the client.""" + await server.get_context().log( + level=level, + message=message, + logger_name=logger, + ) + return True + + # Create a message handler to catch exceptions + async def message_handler( + message: RequestResponder[types.ServerRequest, types.ClientResult] + | types.ServerNotification + | Exception, + ) -> None: + if isinstance(message, Exception): + raise message + + async with create_session( + server._mcp_server, + logging_callback=logging_collector, + message_handler=message_handler, + ) as client_session: + # First verify our test tool works + result = await client_session.call_tool("test_tool", {}) + assert result.isError is False + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "true" + + # Now send a log message via our tool + log_result = await client_session.call_tool( + "test_tool_with_log", + { + "message": "Test log message", + "level": "info", + "logger": "test_logger", + }, + ) + assert log_result.isError is False + assert len(logging_collector.log_messages) == 1 + # Create meta object with related_request_id added dynamically + log = logging_collector.log_messages[0] + assert log.level == "info" + assert log.logger == "test_logger" + assert log.data == "Test log message" diff --git a/tests/client/test_resource_cleanup.py b/tests/client/test_resource_cleanup.py index 990b3a89a..1a8e3edcb 100644 --- a/tests/client/test_resource_cleanup.py +++ b/tests/client/test_resource_cleanup.py @@ -1,68 +1,68 @@ -from unittest.mock import patch - -import anyio -import pytest - -from mcp.shared.session import BaseSession -from mcp.types import ( - ClientRequest, - EmptyResult, - PingRequest, -) - - -@pytest.mark.anyio -async def test_send_request_stream_cleanup(): - """ - Test that send_request properly cleans up streams when an exception occurs. - - This test mocks out most of the session functionality to focus on stream cleanup. - """ - - # Create a mock session with the minimal required functionality - class TestSession(BaseSession): - async def _send_response(self, request_id, response): - pass - - # Create streams - write_stream_send, write_stream_receive = anyio.create_memory_object_stream(1) - read_stream_send, read_stream_receive = anyio.create_memory_object_stream(1) - - # Create the session - session = TestSession( - read_stream_receive, - write_stream_send, - object, # Request type doesn't matter for this test - object, # Notification type doesn't matter for this test - ) - - # Create a test request - request = ClientRequest( - PingRequest( - method="ping", - ) - ) - - # Patch the _write_stream.send method to raise an exception - async def mock_send(*args, **kwargs): - raise RuntimeError("Simulated network error") - - # Record the response streams before the test - initial_stream_count = len(session._response_streams) - - # Run the test with the patched method - with patch.object(session._write_stream, "send", mock_send): - with pytest.raises(RuntimeError): - await session.send_request(request, EmptyResult) - - # Verify that no response streams were leaked - assert len(session._response_streams) == initial_stream_count, ( - f"Expected {initial_stream_count} response streams after request, " - f"but found {len(session._response_streams)}" - ) - - # Clean up - await write_stream_send.aclose() - await write_stream_receive.aclose() - await read_stream_send.aclose() - await read_stream_receive.aclose() +from unittest.mock import patch + +import anyio +import pytest + +from mcp.shared.session import BaseSession +from mcp.types import ( + ClientRequest, + EmptyResult, + PingRequest, +) + + +@pytest.mark.anyio +async def test_send_request_stream_cleanup(): + """ + Test that send_request properly cleans up streams when an exception occurs. + + This test mocks out most of the session functionality to focus on stream cleanup. + """ + + # Create a mock session with the minimal required functionality + class TestSession(BaseSession): + async def _send_response(self, request_id, response): + pass + + # Create streams + write_stream_send, write_stream_receive = anyio.create_memory_object_stream(1) + read_stream_send, read_stream_receive = anyio.create_memory_object_stream(1) + + # Create the session + session = TestSession( + read_stream_receive, + write_stream_send, + object, # Request type doesn't matter for this test + object, # Notification type doesn't matter for this test + ) + + # Create a test request + request = ClientRequest( + PingRequest( + method="ping", + ) + ) + + # Patch the _write_stream.send method to raise an exception + async def mock_send(*args, **kwargs): + raise RuntimeError("Simulated network error") + + # Record the response streams before the test + initial_stream_count = len(session._response_streams) + + # Run the test with the patched method + with patch.object(session._write_stream, "send", mock_send): + with pytest.raises(RuntimeError): + await session.send_request(request, EmptyResult) + + # Verify that no response streams were leaked + assert len(session._response_streams) == initial_stream_count, ( + f"Expected {initial_stream_count} response streams after request, " + f"but found {len(session._response_streams)}" + ) + + # Clean up + await write_stream_send.aclose() + await write_stream_receive.aclose() + await read_stream_send.aclose() + await read_stream_receive.aclose() diff --git a/tests/client/test_sampling_callback.py b/tests/client/test_sampling_callback.py index ba586d4a8..554381921 100644 --- a/tests/client/test_sampling_callback.py +++ b/tests/client/test_sampling_callback.py @@ -1,73 +1,73 @@ -import pytest - -from mcp.client.session import ClientSession -from mcp.shared.context import RequestContext -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) -from mcp.types import ( - CreateMessageRequestParams, - CreateMessageResult, - SamplingMessage, - TextContent, -) - - -@pytest.mark.anyio -async def test_sampling_callback(): - from mcp.server.fastmcp import FastMCP - - server = FastMCP("test") - - callback_return = CreateMessageResult( - role="assistant", - content=TextContent( - type="text", text="This is a response from the sampling callback" - ), - model="test-model", - stopReason="endTurn", - ) - - async def sampling_callback( - context: RequestContext[ClientSession, None], - params: CreateMessageRequestParams, - ) -> CreateMessageResult: - return callback_return - - @server.tool("test_sampling") - async def test_sampling_tool(message: str): - value = await server.get_context().session.create_message( - messages=[ - SamplingMessage( - role="user", content=TextContent(type="text", text=message) - ) - ], - max_tokens=100, - ) - assert value == callback_return - return True - - # Test with sampling callback - async with create_session( - server._mcp_server, sampling_callback=sampling_callback - ) as client_session: - # Make a request to trigger sampling callback - result = await client_session.call_tool( - "test_sampling", {"message": "Test message for sampling"} - ) - assert result.isError is False - assert isinstance(result.content[0], TextContent) - assert result.content[0].text == "true" - - # Test without sampling callback - async with create_session(server._mcp_server) as client_session: - # Make a request to trigger sampling callback - result = await client_session.call_tool( - "test_sampling", {"message": "Test message for sampling"} - ) - assert result.isError is True - assert isinstance(result.content[0], TextContent) - assert ( - result.content[0].text - == "Error executing tool test_sampling: Sampling not supported" - ) +import pytest + +from mcp.client.session import ClientSession +from mcp.shared.context import RequestContext +from mcp.shared.memory import ( + create_connected_server_and_client_session as create_session, +) +from mcp.types import ( + CreateMessageRequestParams, + CreateMessageResult, + SamplingMessage, + TextContent, +) + + +@pytest.mark.anyio +async def test_sampling_callback(): + from mcp.server.fastmcp import FastMCP + + server = FastMCP("test") + + callback_return = CreateMessageResult( + role="assistant", + content=TextContent( + type="text", text="This is a response from the sampling callback" + ), + model="test-model", + stopReason="endTurn", + ) + + async def sampling_callback( + context: RequestContext[ClientSession, None], + params: CreateMessageRequestParams, + ) -> CreateMessageResult: + return callback_return + + @server.tool("test_sampling") + async def test_sampling_tool(message: str): + value = await server.get_context().session.create_message( + messages=[ + SamplingMessage( + role="user", content=TextContent(type="text", text=message) + ) + ], + max_tokens=100, + ) + assert value == callback_return + return True + + # Test with sampling callback + async with create_session( + server._mcp_server, sampling_callback=sampling_callback + ) as client_session: + # Make a request to trigger sampling callback + result = await client_session.call_tool( + "test_sampling", {"message": "Test message for sampling"} + ) + assert result.isError is False + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "true" + + # Test without sampling callback + async with create_session(server._mcp_server) as client_session: + # Make a request to trigger sampling callback + result = await client_session.call_tool( + "test_sampling", {"message": "Test message for sampling"} + ) + assert result.isError is True + assert isinstance(result.content[0], TextContent) + assert ( + result.content[0].text + == "Error executing tool test_sampling: Sampling not supported" + ) diff --git a/tests/conftest.py b/tests/conftest.py index af7e47993..395271324 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,6 @@ -import pytest - - -@pytest.fixture -def anyio_backend(): - return "asyncio" +import pytest + + +@pytest.fixture +def anyio_backend(): + return "asyncio" diff --git a/tests/issues/test_100_tool_listing.py b/tests/issues/test_100_tool_listing.py index 2bc386c96..ead2d48d3 100644 --- a/tests/issues/test_100_tool_listing.py +++ b/tests/issues/test_100_tool_listing.py @@ -1,35 +1,35 @@ -import pytest - -from mcp.server.fastmcp import FastMCP - -pytestmark = pytest.mark.anyio - - -async def test_list_tools_returns_all_tools(): - mcp = FastMCP("TestTools") - - # Create 100 tools with unique names - num_tools = 100 - for i in range(num_tools): - - @mcp.tool(name=f"tool_{i}") - def dummy_tool_func(): - f"""Tool number {i}""" - return i - - globals()[f"dummy_tool_{i}"] = ( - dummy_tool_func # Keep reference to avoid garbage collection - ) - - # Get all tools - tools = await mcp.list_tools() - - # Verify we get all tools - assert len(tools) == num_tools, f"Expected {num_tools} tools, but got {len(tools)}" - - # Verify each tool is unique and has the correct name - tool_names = [tool.name for tool in tools] - expected_names = [f"tool_{i}" for i in range(num_tools)] - assert sorted(tool_names) == sorted( - expected_names - ), "Tool names don't match expected names" +import pytest + +from mcp.server.fastmcp import FastMCP + +pytestmark = pytest.mark.anyio + + +async def test_list_tools_returns_all_tools(): + mcp = FastMCP("TestTools") + + # Create 100 tools with unique names + num_tools = 100 + for i in range(num_tools): + + @mcp.tool(name=f"tool_{i}") + def dummy_tool_func(): + f"""Tool number {i}""" + return i + + globals()[f"dummy_tool_{i}"] = ( + dummy_tool_func # Keep reference to avoid garbage collection + ) + + # Get all tools + tools = await mcp.list_tools() + + # Verify we get all tools + assert len(tools) == num_tools, f"Expected {num_tools} tools, but got {len(tools)}" + + # Verify each tool is unique and has the correct name + tool_names = [tool.name for tool in tools] + expected_names = [f"tool_{i}" for i in range(num_tools)] + assert sorted(tool_names) == sorted( + expected_names + ), "Tool names don't match expected names" diff --git a/tests/issues/test_129_resource_templates.py b/tests/issues/test_129_resource_templates.py index e6eff3d46..bea5df486 100644 --- a/tests/issues/test_129_resource_templates.py +++ b/tests/issues/test_129_resource_templates.py @@ -1,44 +1,44 @@ -import pytest - -from mcp import types -from mcp.server.fastmcp import FastMCP - - -@pytest.mark.anyio -async def test_resource_templates(): - # Create an MCP server - mcp = FastMCP("Demo") - - # Add a dynamic greeting resource - @mcp.resource("greeting://{name}") - def get_greeting(name: str) -> str: - """Get a personalized greeting""" - return f"Hello, {name}!" - - @mcp.resource("users://{user_id}/profile") - def get_user_profile(user_id: str) -> str: - """Dynamic user data""" - return f"Profile data for user {user_id}" - - # Get the list of resource templates using the underlying server - # Note: list_resource_templates() returns a decorator that wraps the handler - # The handler returns a ServerResult with a ListResourceTemplatesResult inside - result = await mcp._mcp_server.request_handlers[types.ListResourceTemplatesRequest]( - types.ListResourceTemplatesRequest( - method="resources/templates/list", params=None, cursor=None - ) - ) - assert isinstance(result.root, types.ListResourceTemplatesResult) - templates = result.root.resourceTemplates - - # Verify we get both templates back - assert len(templates) == 2 - - # Verify template details - greeting_template = next(t for t in templates if t.name == "get_greeting") - assert greeting_template.uriTemplate == "greeting://{name}" - assert greeting_template.description == "Get a personalized greeting" - - profile_template = next(t for t in templates if t.name == "get_user_profile") - assert profile_template.uriTemplate == "users://{user_id}/profile" - assert profile_template.description == "Dynamic user data" +import pytest + +from mcp import types +from mcp.server.fastmcp import FastMCP + + +@pytest.mark.anyio +async def test_resource_templates(): + # Create an MCP server + mcp = FastMCP("Demo") + + # Add a dynamic greeting resource + @mcp.resource("greeting://{name}") + def get_greeting(name: str) -> str: + """Get a personalized greeting""" + return f"Hello, {name}!" + + @mcp.resource("users://{user_id}/profile") + def get_user_profile(user_id: str) -> str: + """Dynamic user data""" + return f"Profile data for user {user_id}" + + # Get the list of resource templates using the underlying server + # Note: list_resource_templates() returns a decorator that wraps the handler + # The handler returns a ServerResult with a ListResourceTemplatesResult inside + result = await mcp._mcp_server.request_handlers[types.ListResourceTemplatesRequest]( + types.ListResourceTemplatesRequest( + method="resources/templates/list", params=None, cursor=None + ) + ) + assert isinstance(result.root, types.ListResourceTemplatesResult) + templates = result.root.resourceTemplates + + # Verify we get both templates back + assert len(templates) == 2 + + # Verify template details + greeting_template = next(t for t in templates if t.name == "get_greeting") + assert greeting_template.uriTemplate == "greeting://{name}" + assert greeting_template.description == "Get a personalized greeting" + + profile_template = next(t for t in templates if t.name == "get_user_profile") + assert profile_template.uriTemplate == "users://{user_id}/profile" + assert profile_template.description == "Dynamic user data" diff --git a/tests/issues/test_141_resource_templates.py b/tests/issues/test_141_resource_templates.py index 3c17cd559..54b4bdd57 100644 --- a/tests/issues/test_141_resource_templates.py +++ b/tests/issues/test_141_resource_templates.py @@ -1,120 +1,120 @@ -import pytest -from pydantic import AnyUrl - -from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) -from mcp.types import ( - ListResourceTemplatesResult, - TextResourceContents, -) - - -@pytest.mark.anyio -async def test_resource_template_edge_cases(): - """Test server-side resource template validation""" - mcp = FastMCP("Demo") - - # Test case 1: Template with multiple parameters - @mcp.resource("resource://users/{user_id}/posts/{post_id}") - def get_user_post(user_id: str, post_id: str) -> str: - return f"Post {post_id} by user {user_id}" - - # Test case 2: Template with optional parameter (should fail) - with pytest.raises(ValueError, match="Mismatch between URI parameters"): - - @mcp.resource("resource://users/{user_id}/profile") - def get_user_profile(user_id: str, optional_param: str | None = None) -> str: - return f"Profile for user {user_id}" - - # Test case 3: Template with mismatched parameters - with pytest.raises(ValueError, match="Mismatch between URI parameters"): - - @mcp.resource("resource://users/{user_id}/profile") - def get_user_profile_mismatch(different_param: str) -> str: - return f"Profile for user {different_param}" - - # Test case 4: Template with extra function parameters - with pytest.raises(ValueError, match="Mismatch between URI parameters"): - - @mcp.resource("resource://users/{user_id}/profile") - def get_user_profile_extra(user_id: str, extra_param: str) -> str: - return f"Profile for user {user_id}" - - # Test case 5: Template with missing function parameters - with pytest.raises(ValueError, match="Mismatch between URI parameters"): - - @mcp.resource("resource://users/{user_id}/profile/{section}") - def get_user_profile_missing(user_id: str) -> str: - return f"Profile for user {user_id}" - - # Verify valid template works - result = await mcp.read_resource("resource://users/123/posts/456") - result_list = list(result) - assert len(result_list) == 1 - assert result_list[0].content == "Post 456 by user 123" - assert result_list[0].mime_type == "text/plain" - - # Verify invalid parameters raise error - with pytest.raises(ValueError, match="Unknown resource"): - await mcp.read_resource("resource://users/123/posts") # Missing post_id - - with pytest.raises(ValueError, match="Unknown resource"): - await mcp.read_resource( - "resource://users/123/posts/456/extra" - ) # Extra path component - - -@pytest.mark.anyio -async def test_resource_template_client_interaction(): - """Test client-side resource template interaction""" - mcp = FastMCP("Demo") - - # Register some templated resources - @mcp.resource("resource://users/{user_id}/posts/{post_id}") - def get_user_post(user_id: str, post_id: str) -> str: - return f"Post {post_id} by user {user_id}" - - @mcp.resource("resource://users/{user_id}/profile") - def get_user_profile(user_id: str) -> str: - return f"Profile for user {user_id}" - - async with client_session(mcp._mcp_server) as session: - # Initialize the session - await session.initialize() - - # List available resources - resources = await session.list_resource_templates() - assert isinstance(resources, ListResourceTemplatesResult) - assert len(resources.resourceTemplates) == 2 - - # Verify resource templates are listed correctly - templates = [r.uriTemplate for r in resources.resourceTemplates] - assert "resource://users/{user_id}/posts/{post_id}" in templates - assert "resource://users/{user_id}/profile" in templates - - # Read a resource with valid parameters - result = await session.read_resource(AnyUrl("resource://users/123/posts/456")) - contents = result.contents[0] - assert isinstance(contents, TextResourceContents) - assert contents.text == "Post 456 by user 123" - assert contents.mimeType == "text/plain" - - # Read another resource with valid parameters - result = await session.read_resource(AnyUrl("resource://users/789/profile")) - contents = result.contents[0] - assert isinstance(contents, TextResourceContents) - assert contents.text == "Profile for user 789" - assert contents.mimeType == "text/plain" - - # Verify invalid resource URIs raise appropriate errors - with pytest.raises(Exception): # Specific exception type may vary - await session.read_resource( - AnyUrl("resource://users/123/posts") - ) # Missing post_id - - with pytest.raises(Exception): # Specific exception type may vary - await session.read_resource( - AnyUrl("resource://users/123/invalid") - ) # Invalid template +import pytest +from pydantic import AnyUrl + +from mcp.server.fastmcp import FastMCP +from mcp.shared.memory import ( + create_connected_server_and_client_session as client_session, +) +from mcp.types import ( + ListResourceTemplatesResult, + TextResourceContents, +) + + +@pytest.mark.anyio +async def test_resource_template_edge_cases(): + """Test server-side resource template validation""" + mcp = FastMCP("Demo") + + # Test case 1: Template with multiple parameters + @mcp.resource("resource://users/{user_id}/posts/{post_id}") + def get_user_post(user_id: str, post_id: str) -> str: + return f"Post {post_id} by user {user_id}" + + # Test case 2: Template with optional parameter (should fail) + with pytest.raises(ValueError, match="Mismatch between URI parameters"): + + @mcp.resource("resource://users/{user_id}/profile") + def get_user_profile(user_id: str, optional_param: str | None = None) -> str: + return f"Profile for user {user_id}" + + # Test case 3: Template with mismatched parameters + with pytest.raises(ValueError, match="Mismatch between URI parameters"): + + @mcp.resource("resource://users/{user_id}/profile") + def get_user_profile_mismatch(different_param: str) -> str: + return f"Profile for user {different_param}" + + # Test case 4: Template with extra function parameters + with pytest.raises(ValueError, match="Mismatch between URI parameters"): + + @mcp.resource("resource://users/{user_id}/profile") + def get_user_profile_extra(user_id: str, extra_param: str) -> str: + return f"Profile for user {user_id}" + + # Test case 5: Template with missing function parameters + with pytest.raises(ValueError, match="Mismatch between URI parameters"): + + @mcp.resource("resource://users/{user_id}/profile/{section}") + def get_user_profile_missing(user_id: str) -> str: + return f"Profile for user {user_id}" + + # Verify valid template works + result = await mcp.read_resource("resource://users/123/posts/456") + result_list = list(result) + assert len(result_list) == 1 + assert result_list[0].content == "Post 456 by user 123" + assert result_list[0].mime_type == "text/plain" + + # Verify invalid parameters raise error + with pytest.raises(ValueError, match="Unknown resource"): + await mcp.read_resource("resource://users/123/posts") # Missing post_id + + with pytest.raises(ValueError, match="Unknown resource"): + await mcp.read_resource( + "resource://users/123/posts/456/extra" + ) # Extra path component + + +@pytest.mark.anyio +async def test_resource_template_client_interaction(): + """Test client-side resource template interaction""" + mcp = FastMCP("Demo") + + # Register some templated resources + @mcp.resource("resource://users/{user_id}/posts/{post_id}") + def get_user_post(user_id: str, post_id: str) -> str: + return f"Post {post_id} by user {user_id}" + + @mcp.resource("resource://users/{user_id}/profile") + def get_user_profile(user_id: str) -> str: + return f"Profile for user {user_id}" + + async with client_session(mcp._mcp_server) as session: + # Initialize the session + await session.initialize() + + # List available resources + resources = await session.list_resource_templates() + assert isinstance(resources, ListResourceTemplatesResult) + assert len(resources.resourceTemplates) == 2 + + # Verify resource templates are listed correctly + templates = [r.uriTemplate for r in resources.resourceTemplates] + assert "resource://users/{user_id}/posts/{post_id}" in templates + assert "resource://users/{user_id}/profile" in templates + + # Read a resource with valid parameters + result = await session.read_resource(AnyUrl("resource://users/123/posts/456")) + contents = result.contents[0] + assert isinstance(contents, TextResourceContents) + assert contents.text == "Post 456 by user 123" + assert contents.mimeType == "text/plain" + + # Read another resource with valid parameters + result = await session.read_resource(AnyUrl("resource://users/789/profile")) + contents = result.contents[0] + assert isinstance(contents, TextResourceContents) + assert contents.text == "Profile for user 789" + assert contents.mimeType == "text/plain" + + # Verify invalid resource URIs raise appropriate errors + with pytest.raises(Exception): # Specific exception type may vary + await session.read_resource( + AnyUrl("resource://users/123/posts") + ) # Missing post_id + + with pytest.raises(Exception): # Specific exception type may vary + await session.read_resource( + AnyUrl("resource://users/123/invalid") + ) # Invalid template diff --git a/tests/issues/test_152_resource_mime_type.py b/tests/issues/test_152_resource_mime_type.py index 1143195e5..192db031a 100644 --- a/tests/issues/test_152_resource_mime_type.py +++ b/tests/issues/test_152_resource_mime_type.py @@ -1,146 +1,146 @@ -import base64 - -import pytest -from pydantic import AnyUrl - -from mcp import types -from mcp.server.fastmcp import FastMCP -from mcp.server.lowlevel import Server -from mcp.server.lowlevel.helper_types import ReadResourceContents -from mcp.shared.memory import ( - create_connected_server_and_client_session as client_session, -) - -pytestmark = pytest.mark.anyio - - -async def test_fastmcp_resource_mime_type(): - """Test that mime_type parameter is respected for resources.""" - mcp = FastMCP("test") - - # Create a small test image as bytes - image_bytes = b"fake_image_data" - base64_string = base64.b64encode(image_bytes).decode("utf-8") - - @mcp.resource("test://image", mime_type="image/png") - def get_image_as_string() -> str: - """Return a test image as base64 string.""" - return base64_string - - @mcp.resource("test://image_bytes", mime_type="image/png") - def get_image_as_bytes() -> bytes: - """Return a test image as bytes.""" - return image_bytes - - # Test that resources are listed with correct mime type - async with client_session(mcp._mcp_server) as client: - # List resources and verify mime types - resources = await client.list_resources() - assert resources.resources is not None - - mapping = {str(r.uri): r for r in resources.resources} - - # Find our resources - string_resource = mapping["test://image"] - bytes_resource = mapping["test://image_bytes"] - - # Verify mime types - assert ( - string_resource.mimeType == "image/png" - ), "String resource mime type not respected" - assert ( - bytes_resource.mimeType == "image/png" - ), "Bytes resource mime type not respected" - - # Also verify the content can be read correctly - string_result = await client.read_resource(AnyUrl("test://image")) - assert len(string_result.contents) == 1 - assert ( - getattr(string_result.contents[0], "text") == base64_string - ), "Base64 string mismatch" - assert ( - string_result.contents[0].mimeType == "image/png" - ), "String content mime type not preserved" - - bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) - assert len(bytes_result.contents) == 1 - assert ( - base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes - ), "Bytes mismatch" - assert ( - bytes_result.contents[0].mimeType == "image/png" - ), "Bytes content mime type not preserved" - - -async def test_lowlevel_resource_mime_type(): - """Test that mime_type parameter is respected for resources.""" - server = Server("test") - - # Create a small test image as bytes - image_bytes = b"fake_image_data" - base64_string = base64.b64encode(image_bytes).decode("utf-8") - - # Create test resources with specific mime types - test_resources = [ - types.Resource( - uri=AnyUrl("test://image"), name="test image", mimeType="image/png" - ), - types.Resource( - uri=AnyUrl("test://image_bytes"), - name="test image bytes", - mimeType="image/png", - ), - ] - - @server.list_resources() - async def handle_list_resources(): - return test_resources - - @server.read_resource() - async def handle_read_resource(uri: AnyUrl): - if str(uri) == "test://image": - return [ReadResourceContents(content=base64_string, mime_type="image/png")] - elif str(uri) == "test://image_bytes": - return [ - ReadResourceContents(content=bytes(image_bytes), mime_type="image/png") - ] - raise Exception(f"Resource not found: {uri}") - - # Test that resources are listed with correct mime type - async with client_session(server) as client: - # List resources and verify mime types - resources = await client.list_resources() - assert resources.resources is not None - - mapping = {str(r.uri): r for r in resources.resources} - - # Find our resources - string_resource = mapping["test://image"] - bytes_resource = mapping["test://image_bytes"] - - # Verify mime types - assert ( - string_resource.mimeType == "image/png" - ), "String resource mime type not respected" - assert ( - bytes_resource.mimeType == "image/png" - ), "Bytes resource mime type not respected" - - # Also verify the content can be read correctly - string_result = await client.read_resource(AnyUrl("test://image")) - assert len(string_result.contents) == 1 - assert ( - getattr(string_result.contents[0], "text") == base64_string - ), "Base64 string mismatch" - assert ( - string_result.contents[0].mimeType == "image/png" - ), "String content mime type not preserved" - - bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) - assert len(bytes_result.contents) == 1 - assert ( - base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes - ), "Bytes mismatch" - assert ( - bytes_result.contents[0].mimeType == "image/png" - ), "Bytes content mime type not preserved" +import base64 + +import pytest +from pydantic import AnyUrl + +from mcp import types +from mcp.server.fastmcp import FastMCP +from mcp.server.lowlevel import Server +from mcp.server.lowlevel.helper_types import ReadResourceContents +from mcp.shared.memory import ( + create_connected_server_and_client_session as client_session, +) + +pytestmark = pytest.mark.anyio + + +async def test_fastmcp_resource_mime_type(): + """Test that mime_type parameter is respected for resources.""" + mcp = FastMCP("test") + + # Create a small test image as bytes + image_bytes = b"fake_image_data" + base64_string = base64.b64encode(image_bytes).decode("utf-8") + + @mcp.resource("test://image", mime_type="image/png") + def get_image_as_string() -> str: + """Return a test image as base64 string.""" + return base64_string + + @mcp.resource("test://image_bytes", mime_type="image/png") + def get_image_as_bytes() -> bytes: + """Return a test image as bytes.""" + return image_bytes + + # Test that resources are listed with correct mime type + async with client_session(mcp._mcp_server) as client: + # List resources and verify mime types + resources = await client.list_resources() + assert resources.resources is not None + + mapping = {str(r.uri): r for r in resources.resources} + + # Find our resources + string_resource = mapping["test://image"] + bytes_resource = mapping["test://image_bytes"] + + # Verify mime types + assert ( + string_resource.mimeType == "image/png" + ), "String resource mime type not respected" + assert ( + bytes_resource.mimeType == "image/png" + ), "Bytes resource mime type not respected" + + # Also verify the content can be read correctly + string_result = await client.read_resource(AnyUrl("test://image")) + assert len(string_result.contents) == 1 + assert ( + getattr(string_result.contents[0], "text") == base64_string + ), "Base64 string mismatch" + assert ( + string_result.contents[0].mimeType == "image/png" + ), "String content mime type not preserved" + + bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) + assert len(bytes_result.contents) == 1 + assert ( + base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes + ), "Bytes mismatch" + assert ( + bytes_result.contents[0].mimeType == "image/png" + ), "Bytes content mime type not preserved" + + +async def test_lowlevel_resource_mime_type(): + """Test that mime_type parameter is respected for resources.""" + server = Server("test") + + # Create a small test image as bytes + image_bytes = b"fake_image_data" + base64_string = base64.b64encode(image_bytes).decode("utf-8") + + # Create test resources with specific mime types + test_resources = [ + types.Resource( + uri=AnyUrl("test://image"), name="test image", mimeType="image/png" + ), + types.Resource( + uri=AnyUrl("test://image_bytes"), + name="test image bytes", + mimeType="image/png", + ), + ] + + @server.list_resources() + async def handle_list_resources(): + return test_resources + + @server.read_resource() + async def handle_read_resource(uri: AnyUrl): + if str(uri) == "test://image": + return [ReadResourceContents(content=base64_string, mime_type="image/png")] + elif str(uri) == "test://image_bytes": + return [ + ReadResourceContents(content=bytes(image_bytes), mime_type="image/png") + ] + raise Exception(f"Resource not found: {uri}") + + # Test that resources are listed with correct mime type + async with client_session(server) as client: + # List resources and verify mime types + resources = await client.list_resources() + assert resources.resources is not None + + mapping = {str(r.uri): r for r in resources.resources} + + # Find our resources + string_resource = mapping["test://image"] + bytes_resource = mapping["test://image_bytes"] + + # Verify mime types + assert ( + string_resource.mimeType == "image/png" + ), "String resource mime type not respected" + assert ( + bytes_resource.mimeType == "image/png" + ), "Bytes resource mime type not respected" + + # Also verify the content can be read correctly + string_result = await client.read_resource(AnyUrl("test://image")) + assert len(string_result.contents) == 1 + assert ( + getattr(string_result.contents[0], "text") == base64_string + ), "Base64 string mismatch" + assert ( + string_result.contents[0].mimeType == "image/png" + ), "String content mime type not preserved" + + bytes_result = await client.read_resource(AnyUrl("test://image_bytes")) + assert len(bytes_result.contents) == 1 + assert ( + base64.b64decode(getattr(bytes_result.contents[0], "blob")) == image_bytes + ), "Bytes mismatch" + assert ( + bytes_result.contents[0].mimeType == "image/png" + ), "Bytes content mime type not preserved" diff --git a/tests/issues/test_188_concurrency.py b/tests/issues/test_188_concurrency.py index d0a86885f..f2164aee0 100644 --- a/tests/issues/test_188_concurrency.py +++ b/tests/issues/test_188_concurrency.py @@ -1,51 +1,51 @@ -import anyio -import pytest -from pydantic import AnyUrl - -from mcp.server.fastmcp import FastMCP -from mcp.shared.memory import ( - create_connected_server_and_client_session as create_session, -) - -_sleep_time_seconds = 0.01 -_resource_name = "slow://slow_resource" - - -@pytest.mark.anyio -async def test_messages_are_executed_concurrently(): - server = FastMCP("test") - - @server.tool("sleep") - async def sleep_tool(): - await anyio.sleep(_sleep_time_seconds) - return "done" - - @server.resource(_resource_name) - async def slow_resource(): - await anyio.sleep(_sleep_time_seconds) - return "slow" - - async with create_session(server._mcp_server) as client_session: - start_time = anyio.current_time() - async with anyio.create_task_group() as tg: - for _ in range(10): - tg.start_soon(client_session.call_tool, "sleep") - tg.start_soon(client_session.read_resource, AnyUrl(_resource_name)) - - end_time = anyio.current_time() - - duration = end_time - start_time - assert duration < 6 * _sleep_time_seconds - print(duration) - - -def main(): - anyio.run(test_messages_are_executed_concurrently) - - -if __name__ == "__main__": - import logging - - logging.basicConfig(level=logging.DEBUG) - - main() +import anyio +import pytest +from pydantic import AnyUrl + +from mcp.server.fastmcp import FastMCP +from mcp.shared.memory import ( + create_connected_server_and_client_session as create_session, +) + +_sleep_time_seconds = 0.01 +_resource_name = "slow://slow_resource" + + +@pytest.mark.anyio +async def test_messages_are_executed_concurrently(): + server = FastMCP("test") + + @server.tool("sleep") + async def sleep_tool(): + await anyio.sleep(_sleep_time_seconds) + return "done" + + @server.resource(_resource_name) + async def slow_resource(): + await anyio.sleep(_sleep_time_seconds) + return "slow" + + async with create_session(server._mcp_server) as client_session: + start_time = anyio.current_time() + async with anyio.create_task_group() as tg: + for _ in range(10): + tg.start_soon(client_session.call_tool, "sleep") + tg.start_soon(client_session.read_resource, AnyUrl(_resource_name)) + + end_time = anyio.current_time() + + duration = end_time - start_time + assert duration < 6 * _sleep_time_seconds + print(duration) + + +def main(): + anyio.run(test_messages_are_executed_concurrently) + + +if __name__ == "__main__": + import logging + + logging.basicConfig(level=logging.DEBUG) + + main() diff --git a/tests/issues/test_192_request_id.py b/tests/issues/test_192_request_id.py index cf5eb6083..6a6185eb3 100644 --- a/tests/issues/test_192_request_id.py +++ b/tests/issues/test_192_request_id.py @@ -1,99 +1,99 @@ -import anyio -import pytest - -from mcp.server.lowlevel import NotificationOptions, Server -from mcp.server.models import InitializationOptions -from mcp.shared.message import SessionMessage -from mcp.types import ( - LATEST_PROTOCOL_VERSION, - ClientCapabilities, - Implementation, - InitializeRequestParams, - JSONRPCMessage, - JSONRPCNotification, - JSONRPCRequest, - NotificationParams, -) - - -@pytest.mark.anyio -async def test_request_id_match() -> None: - """Test that the server preserves request IDs in responses.""" - server = Server("test") - custom_request_id = "test-123" - - # Create memory streams for communication - client_writer, client_reader = anyio.create_memory_object_stream(1) - server_writer, server_reader = anyio.create_memory_object_stream(1) - - # Server task to process the request - async def run_server(): - async with client_reader, server_writer: - await server.run( - client_reader, - server_writer, - InitializationOptions( - server_name="test", - server_version="1.0.0", - capabilities=server.get_capabilities( - notification_options=NotificationOptions(), - experimental_capabilities={}, - ), - ), - raise_exceptions=True, - ) - - # Start server task - async with ( - anyio.create_task_group() as tg, - client_writer, - client_reader, - server_writer, - server_reader, - ): - tg.start_soon(run_server) - - # Send initialize request - init_req = JSONRPCRequest( - id="init-1", - method="initialize", - params=InitializeRequestParams( - protocolVersion=LATEST_PROTOCOL_VERSION, - capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="1.0.0"), - ).model_dump(by_alias=True, exclude_none=True), - jsonrpc="2.0", - ) - - await client_writer.send(SessionMessage(JSONRPCMessage(root=init_req))) - response = ( - await server_reader.receive() - ) # Get init response but don't need to check it - - # Send initialized notification - initialized_notification = JSONRPCNotification( - method="notifications/initialized", - params=NotificationParams().model_dump(by_alias=True, exclude_none=True), - jsonrpc="2.0", - ) - await client_writer.send( - SessionMessage(JSONRPCMessage(root=initialized_notification)) - ) - - # Send ping request with custom ID - ping_request = JSONRPCRequest( - id=custom_request_id, method="ping", params={}, jsonrpc="2.0" - ) - - await client_writer.send(SessionMessage(JSONRPCMessage(root=ping_request))) - - # Read response - response = await server_reader.receive() - - # Verify response ID matches request ID - assert ( - response.message.root.id == custom_request_id - ), "Response ID should match request ID" - - # Cancel server task - tg.cancel_scope.cancel() +import anyio +import pytest + +from mcp.server.lowlevel import NotificationOptions, Server +from mcp.server.models import InitializationOptions +from mcp.shared.message import SessionMessage +from mcp.types import ( + LATEST_PROTOCOL_VERSION, + ClientCapabilities, + Implementation, + InitializeRequestParams, + JSONRPCMessage, + JSONRPCNotification, + JSONRPCRequest, + NotificationParams, +) + + +@pytest.mark.anyio +async def test_request_id_match() -> None: + """Test that the server preserves request IDs in responses.""" + server = Server("test") + custom_request_id = "test-123" + + # Create memory streams for communication + client_writer, client_reader = anyio.create_memory_object_stream(1) + server_writer, server_reader = anyio.create_memory_object_stream(1) + + # Server task to process the request + async def run_server(): + async with client_reader, server_writer: + await server.run( + client_reader, + server_writer, + InitializationOptions( + server_name="test", + server_version="1.0.0", + capabilities=server.get_capabilities( + notification_options=NotificationOptions(), + experimental_capabilities={}, + ), + ), + raise_exceptions=True, + ) + + # Start server task + async with ( + anyio.create_task_group() as tg, + client_writer, + client_reader, + server_writer, + server_reader, + ): + tg.start_soon(run_server) + + # Send initialize request + init_req = JSONRPCRequest( + id="init-1", + method="initialize", + params=InitializeRequestParams( + protocolVersion=LATEST_PROTOCOL_VERSION, + capabilities=ClientCapabilities(), + clientInfo=Implementation(name="test-client", version="1.0.0"), + ).model_dump(by_alias=True, exclude_none=True), + jsonrpc="2.0", + ) + + await client_writer.send(SessionMessage(JSONRPCMessage(root=init_req))) + response = ( + await server_reader.receive() + ) # Get init response but don't need to check it + + # Send initialized notification + initialized_notification = JSONRPCNotification( + method="notifications/initialized", + params=NotificationParams().model_dump(by_alias=True, exclude_none=True), + jsonrpc="2.0", + ) + await client_writer.send( + SessionMessage(JSONRPCMessage(root=initialized_notification)) + ) + + # Send ping request with custom ID + ping_request = JSONRPCRequest( + id=custom_request_id, method="ping", params={}, jsonrpc="2.0" + ) + + await client_writer.send(SessionMessage(JSONRPCMessage(root=ping_request))) + + # Read response + response = await server_reader.receive() + + # Verify response ID matches request ID + assert ( + response.message.root.id == custom_request_id + ), "Response ID should match request ID" + + # Cancel server task + tg.cancel_scope.cancel() diff --git a/tests/issues/test_342_base64_encoding.py b/tests/issues/test_342_base64_encoding.py index cff8ec543..647f49a13 100644 --- a/tests/issues/test_342_base64_encoding.py +++ b/tests/issues/test_342_base64_encoding.py @@ -1,89 +1,89 @@ -"""Test for base64 encoding issue in MCP server. - -This test demonstrates the issue in server.py where the server uses -urlsafe_b64encode but the BlobResourceContents validator expects standard -base64 encoding. - -The test should FAIL before fixing server.py to use b64encode instead of -urlsafe_b64encode. -After the fix, the test should PASS. -""" - -import base64 -from typing import cast - -import pytest -from pydantic import AnyUrl - -from mcp.server.lowlevel.helper_types import ReadResourceContents -from mcp.server.lowlevel.server import Server -from mcp.types import ( - BlobResourceContents, - ReadResourceRequest, - ReadResourceRequestParams, - ReadResourceResult, - ServerResult, -) - - -@pytest.mark.anyio -async def test_server_base64_encoding_issue(): - """Tests that server response can be validated by BlobResourceContents. - - This test will: - 1. Set up a server that returns binary data - 2. Extract the base64-encoded blob from the server's response - 3. Verify the encoded data can be properly validated by BlobResourceContents - - BEFORE FIX: The test will fail because server uses urlsafe_b64encode - AFTER FIX: The test will pass because server uses standard b64encode - """ - server = Server("test") - - # Create binary data that will definitely result in + and / characters - # when encoded with standard base64 - binary_data = bytes(list(range(255)) * 4) - - # Register a resource handler that returns our test data - @server.read_resource() - async def read_resource(uri: AnyUrl) -> list[ReadResourceContents]: - return [ - ReadResourceContents( - content=binary_data, mime_type="application/octet-stream" - ) - ] - - # Get the handler directly from the server - handler = server.request_handlers[ReadResourceRequest] - - # Create a request - request = ReadResourceRequest( - method="resources/read", - params=ReadResourceRequestParams(uri=AnyUrl("test://resource")), - ) - - # Call the handler to get the response - result: ServerResult = await handler(request) - - # After (fixed code): - read_result: ReadResourceResult = cast(ReadResourceResult, result.root) - blob_content = read_result.contents[0] - - # First verify our test data actually produces different encodings - urlsafe_b64 = base64.urlsafe_b64encode(binary_data).decode() - standard_b64 = base64.b64encode(binary_data).decode() - assert urlsafe_b64 != standard_b64, "Test data doesn't demonstrate" - " encoding difference" - - # Now validate the server's output with BlobResourceContents.model_validate - # Before the fix: This should fail with "Invalid base64" because server - # uses urlsafe_b64encode - # After the fix: This should pass because server will use standard b64encode - model_dict = blob_content.model_dump() - - # Direct validation - this will fail before fix, pass after fix - blob_model = BlobResourceContents.model_validate(model_dict) - - # Verify we can decode the data back correctly - decoded = base64.b64decode(blob_model.blob) - assert decoded == binary_data +"""Test for base64 encoding issue in MCP server. + +This test demonstrates the issue in server.py where the server uses +urlsafe_b64encode but the BlobResourceContents validator expects standard +base64 encoding. + +The test should FAIL before fixing server.py to use b64encode instead of +urlsafe_b64encode. +After the fix, the test should PASS. +""" + +import base64 +from typing import cast + +import pytest +from pydantic import AnyUrl + +from mcp.server.lowlevel.helper_types import ReadResourceContents +from mcp.server.lowlevel.server import Server +from mcp.types import ( + BlobResourceContents, + ReadResourceRequest, + ReadResourceRequestParams, + ReadResourceResult, + ServerResult, +) + + +@pytest.mark.anyio +async def test_server_base64_encoding_issue(): + """Tests that server response can be validated by BlobResourceContents. + + This test will: + 1. Set up a server that returns binary data + 2. Extract the base64-encoded blob from the server's response + 3. Verify the encoded data can be properly validated by BlobResourceContents + + BEFORE FIX: The test will fail because server uses urlsafe_b64encode + AFTER FIX: The test will pass because server uses standard b64encode + """ + server = Server("test") + + # Create binary data that will definitely result in + and / characters + # when encoded with standard base64 + binary_data = bytes(list(range(255)) * 4) + + # Register a resource handler that returns our test data + @server.read_resource() + async def read_resource(uri: AnyUrl) -> list[ReadResourceContents]: + return [ + ReadResourceContents( + content=binary_data, mime_type="application/octet-stream" + ) + ] + + # Get the handler directly from the server + handler = server.request_handlers[ReadResourceRequest] + + # Create a request + request = ReadResourceRequest( + method="resources/read", + params=ReadResourceRequestParams(uri=AnyUrl("test://resource")), + ) + + # Call the handler to get the response + result: ServerResult = await handler(request) + + # After (fixed code): + read_result: ReadResourceResult = cast(ReadResourceResult, result.root) + blob_content = read_result.contents[0] + + # First verify our test data actually produces different encodings + urlsafe_b64 = base64.urlsafe_b64encode(binary_data).decode() + standard_b64 = base64.b64encode(binary_data).decode() + assert urlsafe_b64 != standard_b64, "Test data doesn't demonstrate" + " encoding difference" + + # Now validate the server's output with BlobResourceContents.model_validate + # Before the fix: This should fail with "Invalid base64" because server + # uses urlsafe_b64encode + # After the fix: This should pass because server will use standard b64encode + model_dict = blob_content.model_dump() + + # Direct validation - this will fail before fix, pass after fix + blob_model = BlobResourceContents.model_validate(model_dict) + + # Verify we can decode the data back correctly + decoded = base64.b64decode(blob_model.blob) + assert decoded == binary_data diff --git a/tests/issues/test_355_type_error.py b/tests/issues/test_355_type_error.py index 91416e5ca..12a46ed97 100644 --- a/tests/issues/test_355_type_error.py +++ b/tests/issues/test_355_type_error.py @@ -1,50 +1,50 @@ -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager -from dataclasses import dataclass - -from mcp.server.fastmcp import Context, FastMCP - - -class Database: # Replace with your actual DB type - @classmethod - async def connect(cls): - return cls() - - async def disconnect(self): - pass - - def query(self): - return "Hello, World!" - - -# Create a named server -mcp = FastMCP("My App") - - -@dataclass -class AppContext: - db: Database - - -@asynccontextmanager -async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: - """Manage application lifecycle with type-safe context""" - # Initialize on startup - db = await Database.connect() - try: - yield AppContext(db=db) - finally: - # Cleanup on shutdown - await db.disconnect() - - -# Pass lifespan to server -mcp = FastMCP("My App", lifespan=app_lifespan) - - -# Access type-safe lifespan context in tools -@mcp.tool() -def query_db(ctx: Context) -> str: - """Tool that uses initialized resources""" - db = ctx.request_context.lifespan_context.db - return db.query() +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from dataclasses import dataclass + +from mcp.server.fastmcp import Context, FastMCP + + +class Database: # Replace with your actual DB type + @classmethod + async def connect(cls): + return cls() + + async def disconnect(self): + pass + + def query(self): + return "Hello, World!" + + +# Create a named server +mcp = FastMCP("My App") + + +@dataclass +class AppContext: + db: Database + + +@asynccontextmanager +async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: + """Manage application lifecycle with type-safe context""" + # Initialize on startup + db = await Database.connect() + try: + yield AppContext(db=db) + finally: + # Cleanup on shutdown + await db.disconnect() + + +# Pass lifespan to server +mcp = FastMCP("My App", lifespan=app_lifespan) + + +# Access type-safe lifespan context in tools +@mcp.tool() +def query_db(ctx: Context) -> str: + """Tool that uses initialized resources""" + db = ctx.request_context.lifespan_context.db + return db.query() diff --git a/tests/issues/test_88_random_error.py b/tests/issues/test_88_random_error.py index 88e41d66d..c222b650e 100644 --- a/tests/issues/test_88_random_error.py +++ b/tests/issues/test_88_random_error.py @@ -1,109 +1,109 @@ -"""Test to reproduce issue #88: Random error thrown on response.""" - -from collections.abc import Sequence -from datetime import timedelta -from pathlib import Path - -import anyio -import pytest -from anyio.abc import TaskStatus - -from mcp.client.session import ClientSession -from mcp.server.lowlevel import Server -from mcp.shared.exceptions import McpError -from mcp.types import ( - EmbeddedResource, - ImageContent, - TextContent, -) - - -@pytest.mark.anyio -async def test_notification_validation_error(tmp_path: Path): - """Test that timeouts are handled gracefully and don't break the server. - - This test verifies that when a client request times out: - 1. The server task stays alive - 2. The server can still handle new requests - 3. The client can make new requests - 4. No resources are leaked - """ - - server = Server(name="test") - request_count = 0 - slow_request_started = anyio.Event() - slow_request_complete = anyio.Event() - - @server.call_tool() - async def slow_tool( - name: str, arg - ) -> Sequence[TextContent | ImageContent | EmbeddedResource]: - nonlocal request_count - request_count += 1 - - if name == "slow": - # Signal that slow request has started - slow_request_started.set() - # Long enough to ensure timeout - await anyio.sleep(0.2) - # Signal completion - slow_request_complete.set() - return [TextContent(type="text", text=f"slow {request_count}")] - elif name == "fast": - # Fast enough to complete before timeout - await anyio.sleep(0.01) - return [TextContent(type="text", text=f"fast {request_count}")] - return [TextContent(type="text", text=f"unknown {request_count}")] - - async def server_handler( - read_stream, - write_stream, - task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED, - ): - with anyio.CancelScope() as scope: - task_status.started(scope) # type: ignore - await server.run( - read_stream, - write_stream, - server.create_initialization_options(), - raise_exceptions=True, - ) - - async def client(read_stream, write_stream, scope): - # Use a timeout that's: - # - Long enough for fast operations (>10ms) - # - Short enough for slow operations (<200ms) - # - Not too short to avoid flakiness - async with ClientSession( - read_stream, write_stream, read_timeout_seconds=timedelta(milliseconds=50) - ) as session: - await session.initialize() - - # First call should work (fast operation) - result = await session.call_tool("fast") - assert result.content == [TextContent(type="text", text="fast 1")] - assert not slow_request_complete.is_set() - - # Second call should timeout (slow operation) - with pytest.raises(McpError) as exc_info: - await session.call_tool("slow") - assert "Timed out while waiting" in str(exc_info.value) - - # Wait for slow request to complete in the background - with anyio.fail_after(1): # Timeout after 1 second - await slow_request_complete.wait() - - # Third call should work (fast operation), - # proving server is still responsive - result = await session.call_tool("fast") - assert result.content == [TextContent(type="text", text="fast 3")] - scope.cancel() - - # Run server and client in separate task groups to avoid cancellation - server_writer, server_reader = anyio.create_memory_object_stream(1) - client_writer, client_reader = anyio.create_memory_object_stream(1) - - async with anyio.create_task_group() as tg: - scope = await tg.start(server_handler, server_reader, client_writer) - # Run client in a separate task to avoid cancellation - tg.start_soon(client, client_reader, server_writer, scope) +"""Test to reproduce issue #88: Random error thrown on response.""" + +from collections.abc import Sequence +from datetime import timedelta +from pathlib import Path + +import anyio +import pytest +from anyio.abc import TaskStatus + +from mcp.client.session import ClientSession +from mcp.server.lowlevel import Server +from mcp.shared.exceptions import McpError +from mcp.types import ( + EmbeddedResource, + ImageContent, + TextContent, +) + + +@pytest.mark.anyio +async def test_notification_validation_error(tmp_path: Path): + """Test that timeouts are handled gracefully and don't break the server. + + This test verifies that when a client request times out: + 1. The server task stays alive + 2. The server can still handle new requests + 3. The client can make new requests + 4. No resources are leaked + """ + + server = Server(name="test") + request_count = 0 + slow_request_started = anyio.Event() + slow_request_complete = anyio.Event() + + @server.call_tool() + async def slow_tool( + name: str, arg + ) -> Sequence[TextContent | ImageContent | EmbeddedResource]: + nonlocal request_count + request_count += 1 + + if name == "slow": + # Signal that slow request has started + slow_request_started.set() + # Long enough to ensure timeout + await anyio.sleep(0.2) + # Signal completion + slow_request_complete.set() + return [TextContent(type="text", text=f"slow {request_count}")] + elif name == "fast": + # Fast enough to complete before timeout + await anyio.sleep(0.01) + return [TextContent(type="text", text=f"fast {request_count}")] + return [TextContent(type="text", text=f"unknown {request_count}")] + + async def server_handler( + read_stream, + write_stream, + task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED, + ): + with anyio.CancelScope() as scope: + task_status.started(scope) # type: ignore + await server.run( + read_stream, + write_stream, + server.create_initialization_options(), + raise_exceptions=True, + ) + + async def client(read_stream, write_stream, scope): + # Use a timeout that's: + # - Long enough for fast operations (>10ms) + # - Short enough for slow operations (<200ms) + # - Not too short to avoid flakiness + async with ClientSession( + read_stream, write_stream, read_timeout_seconds=timedelta(milliseconds=50) + ) as session: + await session.initialize() + + # First call should work (fast operation) + result = await session.call_tool("fast") + assert result.content == [TextContent(type="text", text="fast 1")] + assert not slow_request_complete.is_set() + + # Second call should timeout (slow operation) + with pytest.raises(McpError) as exc_info: + await session.call_tool("slow") + assert "Timed out while waiting" in str(exc_info.value) + + # Wait for slow request to complete in the background + with anyio.fail_after(1): # Timeout after 1 second + await slow_request_complete.wait() + + # Third call should work (fast operation), + # proving server is still responsive + result = await session.call_tool("fast") + assert result.content == [TextContent(type="text", text="fast 3")] + scope.cancel() + + # Run server and client in separate task groups to avoid cancellation + server_writer, server_reader = anyio.create_memory_object_stream(1) + client_writer, client_reader = anyio.create_memory_object_stream(1) + + async with anyio.create_task_group() as tg: + scope = await tg.start(server_handler, server_reader, client_writer) + # Run client in a separate task to avoid cancellation + tg.start_soon(client, client_reader, server_writer, scope) diff --git a/tests/mcp/server/auth/test_manager.py b/tests/mcp/server/auth/test_manager.py new file mode 100644 index 000000000..a437ab340 --- /dev/null +++ b/tests/mcp/server/auth/test_manager.py @@ -0,0 +1,36 @@ +import pytest + +from mcp.server.auth.manager import AuthorizationManager + + +@pytest.fixture +def auth_manager(): + """ + Fixture for the AuthorizationManager instance. + """ + return AuthorizationManager("secret_key", "issuer", "audience") + +def test_generate_token(auth_manager): + token = auth_manager.generate_token({"user_id": 123}) + assert isinstance(token, str) + +def test_validate_token(auth_manager): + token = auth_manager.generate_token({"user_id": 123}) + claims = auth_manager.validate_token(token) + assert claims.get("user_id") == 123 + +def test_claim_extraction(auth_manager): + token = auth_manager.generate_token({"user_id": 123, "role": "admin"}) + claim = auth_manager.get_claim(token, "role") + assert claim == "admin" + +def test_expired_token(auth_manager): + token = auth_manager.generate_token({"user_id": 123}, expires_in=-1) + claims = auth_manager.validate_token(token) + assert claims is None + +def test_nonexistent_claim(auth_manager): + token = auth_manager.generate_token({"user_id": 123}) + claim = auth_manager.get_claim(token, "email") + assert claim is None + diff --git a/tests/server/auth/middleware/test_auth_context.py b/tests/server/auth/middleware/test_auth_context.py index 916640714..c3096edea 100644 --- a/tests/server/auth/middleware/test_auth_context.py +++ b/tests/server/auth/middleware/test_auth_context.py @@ -1,122 +1,122 @@ -""" -Tests for the AuthContext middleware components. -""" - -import time - -import pytest -from starlette.types import Message, Receive, Scope, Send - -from mcp.server.auth.middleware.auth_context import ( - AuthContextMiddleware, - auth_context_var, - get_access_token, -) -from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser -from mcp.server.auth.provider import AccessToken - - -class MockApp: - """Mock ASGI app for testing.""" - - def __init__(self): - self.called = False - self.scope: Scope | None = None - self.receive: Receive | None = None - self.send: Send | None = None - self.access_token_during_call: AccessToken | None = None - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: - self.called = True - self.scope = scope - self.receive = receive - self.send = send - # Check the context during the call - self.access_token_during_call = get_access_token() - - -@pytest.fixture -def valid_access_token() -> AccessToken: - """Create a valid access token.""" - return AccessToken( - token="valid_token", - client_id="test_client", - scopes=["read", "write"], - expires_at=int(time.time()) + 3600, # 1 hour from now - ) - - -@pytest.mark.anyio -class TestAuthContextMiddleware: - """Tests for the AuthContextMiddleware class.""" - - async def test_with_authenticated_user(self, valid_access_token: AccessToken): - """Test middleware with an authenticated user in scope.""" - app = MockApp() - middleware = AuthContextMiddleware(app) - - # Create an authenticated user - user = AuthenticatedUser(valid_access_token) - - scope: Scope = {"type": "http", "user": user} - - # Create dummy async functions for receive and send - async def receive() -> Message: - return {"type": "http.request"} - - async def send(message: Message) -> None: - pass - - # Verify context is empty before middleware - assert auth_context_var.get() is None - assert get_access_token() is None - - # Run the middleware - await middleware(scope, receive, send) - - # Verify the app was called - assert app.called - assert app.scope == scope - assert app.receive == receive - assert app.send == send - - # Verify the access token was available during the call - assert app.access_token_during_call == valid_access_token - - # Verify context is reset after middleware - assert auth_context_var.get() is None - assert get_access_token() is None - - async def test_with_no_user(self): - """Test middleware with no user in scope.""" - app = MockApp() - middleware = AuthContextMiddleware(app) - - scope: Scope = {"type": "http"} # No user - - # Create dummy async functions for receive and send - async def receive() -> Message: - return {"type": "http.request"} - - async def send(message: Message) -> None: - pass - - # Verify context is empty before middleware - assert auth_context_var.get() is None - assert get_access_token() is None - - # Run the middleware - await middleware(scope, receive, send) - - # Verify the app was called - assert app.called - assert app.scope == scope - assert app.receive == receive - assert app.send == send - - # Verify the access token was not available during the call - assert app.access_token_during_call is None - - # Verify context is still empty after middleware - assert auth_context_var.get() is None - assert get_access_token() is None +""" +Tests for the AuthContext middleware components. +""" + +import time + +import pytest +from starlette.types import Message, Receive, Scope, Send + +from mcp.server.auth.middleware.auth_context import ( + AuthContextMiddleware, + auth_context_var, + get_access_token, +) +from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser +from mcp.server.auth.provider import AccessToken + + +class MockApp: + """Mock ASGI app for testing.""" + + def __init__(self): + self.called = False + self.scope: Scope | None = None + self.receive: Receive | None = None + self.send: Send | None = None + self.access_token_during_call: AccessToken | None = None + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + self.called = True + self.scope = scope + self.receive = receive + self.send = send + # Check the context during the call + self.access_token_during_call = get_access_token() + + +@pytest.fixture +def valid_access_token() -> AccessToken: + """Create a valid access token.""" + return AccessToken( + token="valid_token", + client_id="test_client", + scopes=["read", "write"], + expires_at=int(time.time()) + 3600, # 1 hour from now + ) + + +@pytest.mark.anyio +class TestAuthContextMiddleware: + """Tests for the AuthContextMiddleware class.""" + + async def test_with_authenticated_user(self, valid_access_token: AccessToken): + """Test middleware with an authenticated user in scope.""" + app = MockApp() + middleware = AuthContextMiddleware(app) + + # Create an authenticated user + user = AuthenticatedUser(valid_access_token) + + scope: Scope = {"type": "http", "user": user} + + # Create dummy async functions for receive and send + async def receive() -> Message: + return {"type": "http.request"} + + async def send(message: Message) -> None: + pass + + # Verify context is empty before middleware + assert auth_context_var.get() is None + assert get_access_token() is None + + # Run the middleware + await middleware(scope, receive, send) + + # Verify the app was called + assert app.called + assert app.scope == scope + assert app.receive == receive + assert app.send == send + + # Verify the access token was available during the call + assert app.access_token_during_call == valid_access_token + + # Verify context is reset after middleware + assert auth_context_var.get() is None + assert get_access_token() is None + + async def test_with_no_user(self): + """Test middleware with no user in scope.""" + app = MockApp() + middleware = AuthContextMiddleware(app) + + scope: Scope = {"type": "http"} # No user + + # Create dummy async functions for receive and send + async def receive() -> Message: + return {"type": "http.request"} + + async def send(message: Message) -> None: + pass + + # Verify context is empty before middleware + assert auth_context_var.get() is None + assert get_access_token() is None + + # Run the middleware + await middleware(scope, receive, send) + + # Verify the app was called + assert app.called + assert app.scope == scope + assert app.receive == receive + assert app.send == send + + # Verify the access token was not available during the call + assert app.access_token_during_call is None + + # Verify context is still empty after middleware + assert auth_context_var.get() is None + assert get_access_token() is None diff --git a/tests/server/auth/test_error_handling.py b/tests/server/auth/test_error_handling.py index 18e9933e7..5d82a52b0 100644 --- a/tests/server/auth/test_error_handling.py +++ b/tests/server/auth/test_error_handling.py @@ -1,294 +1,294 @@ -""" -Tests for OAuth error handling in the auth handlers. -""" - -import unittest.mock -from urllib.parse import parse_qs, urlparse - -import httpx -import pytest -from httpx import ASGITransport -from pydantic import AnyHttpUrl -from starlette.applications import Starlette - -from mcp.server.auth.provider import ( - AuthorizeError, - RegistrationError, - TokenError, -) -from mcp.server.auth.routes import create_auth_routes -from tests.server.fastmcp.auth.test_auth_integration import ( - MockOAuthProvider, -) - - -@pytest.fixture -def oauth_provider(): - """Return a MockOAuthProvider instance that can be configured to raise errors.""" - return MockOAuthProvider() - - -@pytest.fixture -def app(oauth_provider): - from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions - - # Enable client registration - client_registration_options = ClientRegistrationOptions(enabled=True) - revocation_options = RevocationOptions(enabled=True) - - # Create auth routes - auth_routes = create_auth_routes( - oauth_provider, - issuer_url=AnyHttpUrl("http://localhost"), - client_registration_options=client_registration_options, - revocation_options=revocation_options, - ) - - # Create Starlette app with routes directly - return Starlette(routes=auth_routes) - - -@pytest.fixture -def client(app): - transport = ASGITransport(app=app) - # Use base_url without a path since routes are directly on the app - return httpx.AsyncClient(transport=transport, base_url="http://localhost") - - -@pytest.fixture -def pkce_challenge(): - """Create a PKCE challenge with code_verifier and code_challenge.""" - import base64 - import hashlib - import secrets - - # Generate a code verifier - code_verifier = secrets.token_urlsafe(64)[:128] - - # Create code challenge using S256 method - code_verifier_bytes = code_verifier.encode("ascii") - sha256 = hashlib.sha256(code_verifier_bytes).digest() - code_challenge = base64.urlsafe_b64encode(sha256).decode().rstrip("=") - - return {"code_verifier": code_verifier, "code_challenge": code_challenge} - - -@pytest.fixture -async def registered_client(client): - """Create and register a test client.""" - # Default client metadata - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "token_endpoint_auth_method": "client_secret_post", - "grant_types": ["authorization_code", "refresh_token"], - "response_types": ["code"], - "client_name": "Test Client", - } - - response = await client.post("/register", json=client_metadata) - assert response.status_code == 201, f"Failed to register client: {response.content}" - - client_info = response.json() - return client_info - - -class TestRegistrationErrorHandling: - @pytest.mark.anyio - async def test_registration_error_handling(self, client, oauth_provider): - # Mock the register_client method to raise a registration error - with unittest.mock.patch.object( - oauth_provider, - "register_client", - side_effect=RegistrationError( - error="invalid_redirect_uri", - error_description="The redirect URI is invalid", - ), - ): - # Prepare a client registration request - client_data = { - "redirect_uris": ["https://client.example.com/callback"], - "token_endpoint_auth_method": "client_secret_post", - "grant_types": ["authorization_code", "refresh_token"], - "response_types": ["code"], - "client_name": "Test Client", - } - - # Send the registration request - response = await client.post( - "/register", - json=client_data, - ) - - # Verify the response - assert response.status_code == 400, response.content - data = response.json() - assert data["error"] == "invalid_redirect_uri" - assert data["error_description"] == "The redirect URI is invalid" - - -class TestAuthorizeErrorHandling: - @pytest.mark.anyio - async def test_authorize_error_handling( - self, client, oauth_provider, registered_client, pkce_challenge - ): - # Mock the authorize method to raise an authorize error - with unittest.mock.patch.object( - oauth_provider, - "authorize", - side_effect=AuthorizeError( - error="access_denied", error_description="The user denied the request" - ), - ): - # Register the client - client_id = registered_client["client_id"] - redirect_uri = registered_client["redirect_uris"][0] - - # Prepare an authorization request - params = { - "client_id": client_id, - "redirect_uri": redirect_uri, - "response_type": "code", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - } - - # Send the authorization request - response = await client.get("/authorize", params=params) - - # Verify the response is a redirect with error parameters - assert response.status_code == 302 - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert query_params["error"][0] == "access_denied" - assert "error_description" in query_params - assert query_params["state"][0] == "test_state" - - -class TestTokenErrorHandling: - @pytest.mark.anyio - async def test_token_error_handling_auth_code( - self, client, oauth_provider, registered_client, pkce_challenge - ): - # Register the client and get an auth code - client_id = registered_client["client_id"] - client_secret = registered_client["client_secret"] - redirect_uri = registered_client["redirect_uris"][0] - - # First get an authorization code - auth_response = await client.get( - "/authorize", - params={ - "client_id": client_id, - "redirect_uri": redirect_uri, - "response_type": "code", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - redirect_url = auth_response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - code = query_params["code"][0] - - # Mock the exchange_authorization_code method to raise a token error - with unittest.mock.patch.object( - oauth_provider, - "exchange_authorization_code", - side_effect=TokenError( - error="invalid_grant", - error_description="The authorization code is invalid", - ), - ): - # Try to exchange the code for tokens - token_response = await client.post( - "/token", - data={ - "grant_type": "authorization_code", - "code": code, - "redirect_uri": redirect_uri, - "client_id": client_id, - "client_secret": client_secret, - "code_verifier": pkce_challenge["code_verifier"], - }, - ) - - # Verify the response - assert token_response.status_code == 400 - data = token_response.json() - assert data["error"] == "invalid_grant" - assert data["error_description"] == "The authorization code is invalid" - - @pytest.mark.anyio - async def test_token_error_handling_refresh_token( - self, client, oauth_provider, registered_client, pkce_challenge - ): - # Register the client and get tokens - client_id = registered_client["client_id"] - client_secret = registered_client["client_secret"] - redirect_uri = registered_client["redirect_uris"][0] - - # First get an authorization code - auth_response = await client.get( - "/authorize", - params={ - "client_id": client_id, - "redirect_uri": redirect_uri, - "response_type": "code", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - assert auth_response.status_code == 302, auth_response.content - - redirect_url = auth_response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - code = query_params["code"][0] - - # Exchange the code for tokens - token_response = await client.post( - "/token", - data={ - "grant_type": "authorization_code", - "code": code, - "redirect_uri": redirect_uri, - "client_id": client_id, - "client_secret": client_secret, - "code_verifier": pkce_challenge["code_verifier"], - }, - ) - - tokens = token_response.json() - refresh_token = tokens["refresh_token"] - - # Mock the exchange_refresh_token method to raise a token error - with unittest.mock.patch.object( - oauth_provider, - "exchange_refresh_token", - side_effect=TokenError( - error="invalid_scope", - error_description="The requested scope is invalid", - ), - ): - # Try to use the refresh token - refresh_response = await client.post( - "/token", - data={ - "grant_type": "refresh_token", - "refresh_token": refresh_token, - "client_id": client_id, - "client_secret": client_secret, - }, - ) - - # Verify the response - assert refresh_response.status_code == 400 - data = refresh_response.json() - assert data["error"] == "invalid_scope" - assert data["error_description"] == "The requested scope is invalid" +""" +Tests for OAuth error handling in the auth handlers. +""" + +import unittest.mock +from urllib.parse import parse_qs, urlparse + +import httpx +import pytest +from httpx import ASGITransport +from pydantic import AnyHttpUrl +from starlette.applications import Starlette + +from mcp.server.auth.provider import ( + AuthorizeError, + RegistrationError, + TokenError, +) +from mcp.server.auth.routes import create_auth_routes +from tests.server.fastmcp.auth.test_auth_integration import ( + MockOAuthProvider, +) + + +@pytest.fixture +def oauth_provider(): + """Return a MockOAuthProvider instance that can be configured to raise errors.""" + return MockOAuthProvider() + + +@pytest.fixture +def app(oauth_provider): + from mcp.server.auth.settings import ClientRegistrationOptions, RevocationOptions + + # Enable client registration + client_registration_options = ClientRegistrationOptions(enabled=True) + revocation_options = RevocationOptions(enabled=True) + + # Create auth routes + auth_routes = create_auth_routes( + oauth_provider, + issuer_url=AnyHttpUrl("http://localhost"), + client_registration_options=client_registration_options, + revocation_options=revocation_options, + ) + + # Create Starlette app with routes directly + return Starlette(routes=auth_routes) + + +@pytest.fixture +def client(app): + transport = ASGITransport(app=app) + # Use base_url without a path since routes are directly on the app + return httpx.AsyncClient(transport=transport, base_url="http://localhost") + + +@pytest.fixture +def pkce_challenge(): + """Create a PKCE challenge with code_verifier and code_challenge.""" + import base64 + import hashlib + import secrets + + # Generate a code verifier + code_verifier = secrets.token_urlsafe(64)[:128] + + # Create code challenge using S256 method + code_verifier_bytes = code_verifier.encode("ascii") + sha256 = hashlib.sha256(code_verifier_bytes).digest() + code_challenge = base64.urlsafe_b64encode(sha256).decode().rstrip("=") + + return {"code_verifier": code_verifier, "code_challenge": code_challenge} + + +@pytest.fixture +async def registered_client(client): + """Create and register a test client.""" + # Default client metadata + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "client_name": "Test Client", + } + + response = await client.post("/register", json=client_metadata) + assert response.status_code == 201, f"Failed to register client: {response.content}" + + client_info = response.json() + return client_info + + +class TestRegistrationErrorHandling: + @pytest.mark.anyio + async def test_registration_error_handling(self, client, oauth_provider): + # Mock the register_client method to raise a registration error + with unittest.mock.patch.object( + oauth_provider, + "register_client", + side_effect=RegistrationError( + error="invalid_redirect_uri", + error_description="The redirect URI is invalid", + ), + ): + # Prepare a client registration request + client_data = { + "redirect_uris": ["https://client.example.com/callback"], + "token_endpoint_auth_method": "client_secret_post", + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "client_name": "Test Client", + } + + # Send the registration request + response = await client.post( + "/register", + json=client_data, + ) + + # Verify the response + assert response.status_code == 400, response.content + data = response.json() + assert data["error"] == "invalid_redirect_uri" + assert data["error_description"] == "The redirect URI is invalid" + + +class TestAuthorizeErrorHandling: + @pytest.mark.anyio + async def test_authorize_error_handling( + self, client, oauth_provider, registered_client, pkce_challenge + ): + # Mock the authorize method to raise an authorize error + with unittest.mock.patch.object( + oauth_provider, + "authorize", + side_effect=AuthorizeError( + error="access_denied", error_description="The user denied the request" + ), + ): + # Register the client + client_id = registered_client["client_id"] + redirect_uri = registered_client["redirect_uris"][0] + + # Prepare an authorization request + params = { + "client_id": client_id, + "redirect_uri": redirect_uri, + "response_type": "code", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + } + + # Send the authorization request + response = await client.get("/authorize", params=params) + + # Verify the response is a redirect with error parameters + assert response.status_code == 302 + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert query_params["error"][0] == "access_denied" + assert "error_description" in query_params + assert query_params["state"][0] == "test_state" + + +class TestTokenErrorHandling: + @pytest.mark.anyio + async def test_token_error_handling_auth_code( + self, client, oauth_provider, registered_client, pkce_challenge + ): + # Register the client and get an auth code + client_id = registered_client["client_id"] + client_secret = registered_client["client_secret"] + redirect_uri = registered_client["redirect_uris"][0] + + # First get an authorization code + auth_response = await client.get( + "/authorize", + params={ + "client_id": client_id, + "redirect_uri": redirect_uri, + "response_type": "code", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + redirect_url = auth_response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + code = query_params["code"][0] + + # Mock the exchange_authorization_code method to raise a token error + with unittest.mock.patch.object( + oauth_provider, + "exchange_authorization_code", + side_effect=TokenError( + error="invalid_grant", + error_description="The authorization code is invalid", + ), + ): + # Try to exchange the code for tokens + token_response = await client.post( + "/token", + data={ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + "client_id": client_id, + "client_secret": client_secret, + "code_verifier": pkce_challenge["code_verifier"], + }, + ) + + # Verify the response + assert token_response.status_code == 400 + data = token_response.json() + assert data["error"] == "invalid_grant" + assert data["error_description"] == "The authorization code is invalid" + + @pytest.mark.anyio + async def test_token_error_handling_refresh_token( + self, client, oauth_provider, registered_client, pkce_challenge + ): + # Register the client and get tokens + client_id = registered_client["client_id"] + client_secret = registered_client["client_secret"] + redirect_uri = registered_client["redirect_uris"][0] + + # First get an authorization code + auth_response = await client.get( + "/authorize", + params={ + "client_id": client_id, + "redirect_uri": redirect_uri, + "response_type": "code", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + assert auth_response.status_code == 302, auth_response.content + + redirect_url = auth_response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + code = query_params["code"][0] + + # Exchange the code for tokens + token_response = await client.post( + "/token", + data={ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + "client_id": client_id, + "client_secret": client_secret, + "code_verifier": pkce_challenge["code_verifier"], + }, + ) + + tokens = token_response.json() + refresh_token = tokens["refresh_token"] + + # Mock the exchange_refresh_token method to raise a token error + with unittest.mock.patch.object( + oauth_provider, + "exchange_refresh_token", + side_effect=TokenError( + error="invalid_scope", + error_description="The requested scope is invalid", + ), + ): + # Try to use the refresh token + refresh_response = await client.post( + "/token", + data={ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client_id, + "client_secret": client_secret, + }, + ) + + # Verify the response + assert refresh_response.status_code == 400 + data = refresh_response.json() + assert data["error"] == "invalid_scope" + assert data["error_description"] == "The requested scope is invalid" diff --git a/tests/server/fastmcp/auth/__init__.py b/tests/server/fastmcp/auth/__init__.py index 64d318ec4..48e4f336f 100644 --- a/tests/server/fastmcp/auth/__init__.py +++ b/tests/server/fastmcp/auth/__init__.py @@ -1,3 +1,3 @@ -""" -Tests for the MCP server auth components. -""" +""" +Tests for the MCP server auth components. +""" diff --git a/tests/server/fastmcp/auth/test_auth_integration.py b/tests/server/fastmcp/auth/test_auth_integration.py index d237e860e..29885321c 100644 --- a/tests/server/fastmcp/auth/test_auth_integration.py +++ b/tests/server/fastmcp/auth/test_auth_integration.py @@ -1,1267 +1,1267 @@ -""" -Integration tests for MCP authorization components. -""" - -import base64 -import hashlib -import secrets -import time -import unittest.mock -from urllib.parse import parse_qs, urlparse - -import httpx -import pytest -from pydantic import AnyHttpUrl -from starlette.applications import Starlette - -from mcp.server.auth.provider import ( - AccessToken, - AuthorizationCode, - AuthorizationParams, - OAuthAuthorizationServerProvider, - RefreshToken, - construct_redirect_uri, -) -from mcp.server.auth.routes import ( - ClientRegistrationOptions, - RevocationOptions, - create_auth_routes, -) -from mcp.shared.auth import ( - OAuthClientInformationFull, - OAuthToken, -) - - -# Mock OAuth provider for testing -class MockOAuthProvider(OAuthAuthorizationServerProvider): - def __init__(self): - self.clients = {} - self.auth_codes = {} # code -> {client_id, code_challenge, redirect_uri} - self.tokens = {} # token -> {client_id, scopes, expires_at} - self.refresh_tokens = {} # refresh_token -> access_token - - async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: - return self.clients.get(client_id) - - async def register_client(self, client_info: OAuthClientInformationFull): - self.clients[client_info.client_id] = client_info - - async def authorize( - self, client: OAuthClientInformationFull, params: AuthorizationParams - ) -> str: - # toy authorize implementation which just immediately generates an authorization - # code and completes the redirect - code = AuthorizationCode( - code=f"code_{int(time.time())}", - client_id=client.client_id, - code_challenge=params.code_challenge, - redirect_uri=params.redirect_uri, - redirect_uri_provided_explicitly=params.redirect_uri_provided_explicitly, - expires_at=time.time() + 300, - scopes=params.scopes or ["read", "write"], - ) - self.auth_codes[code.code] = code - - return construct_redirect_uri( - str(params.redirect_uri), code=code.code, state=params.state - ) - - async def load_authorization_code( - self, client: OAuthClientInformationFull, authorization_code: str - ) -> AuthorizationCode | None: - return self.auth_codes.get(authorization_code) - - async def exchange_authorization_code( - self, client: OAuthClientInformationFull, authorization_code: AuthorizationCode - ) -> OAuthToken: - assert authorization_code.code in self.auth_codes - - # Generate an access token and refresh token - access_token = f"access_{secrets.token_hex(32)}" - refresh_token = f"refresh_{secrets.token_hex(32)}" - - # Store the tokens - self.tokens[access_token] = AccessToken( - token=access_token, - client_id=client.client_id, - scopes=authorization_code.scopes, - expires_at=int(time.time()) + 3600, - ) - - self.refresh_tokens[refresh_token] = access_token - - # Remove the used code - del self.auth_codes[authorization_code.code] - - return OAuthToken( - access_token=access_token, - token_type="bearer", - expires_in=3600, - scope="read write", - refresh_token=refresh_token, - ) - - async def load_refresh_token( - self, client: OAuthClientInformationFull, refresh_token: str - ) -> RefreshToken | None: - old_access_token = self.refresh_tokens.get(refresh_token) - if old_access_token is None: - return None - token_info = self.tokens.get(old_access_token) - if token_info is None: - return None - - # Create a RefreshToken object that matches what is expected in later code - refresh_obj = RefreshToken( - token=refresh_token, - client_id=token_info.client_id, - scopes=token_info.scopes, - expires_at=token_info.expires_at, - ) - - return refresh_obj - - async def exchange_refresh_token( - self, - client: OAuthClientInformationFull, - refresh_token: RefreshToken, - scopes: list[str], - ) -> OAuthToken: - # Check if refresh token exists - assert refresh_token.token in self.refresh_tokens - - old_access_token = self.refresh_tokens[refresh_token.token] - - # Check if the access token exists - assert old_access_token in self.tokens - - # Check if the token was issued to this client - token_info = self.tokens[old_access_token] - assert token_info.client_id == client.client_id - - # Generate a new access token and refresh token - new_access_token = f"access_{secrets.token_hex(32)}" - new_refresh_token = f"refresh_{secrets.token_hex(32)}" - - # Store the new tokens - self.tokens[new_access_token] = AccessToken( - token=new_access_token, - client_id=client.client_id, - scopes=scopes or token_info.scopes, - expires_at=int(time.time()) + 3600, - ) - - self.refresh_tokens[new_refresh_token] = new_access_token - - # Remove the old tokens - del self.refresh_tokens[refresh_token.token] - del self.tokens[old_access_token] - - return OAuthToken( - access_token=new_access_token, - token_type="bearer", - expires_in=3600, - scope=" ".join(scopes) if scopes else " ".join(token_info.scopes), - refresh_token=new_refresh_token, - ) - - async def load_access_token(self, token: str) -> AccessToken | None: - token_info = self.tokens.get(token) - - # Check if token is expired - # if token_info.expires_at < int(time.time()): - # raise InvalidTokenError("Access token has expired") - - return token_info and AccessToken( - token=token, - client_id=token_info.client_id, - scopes=token_info.scopes, - expires_at=token_info.expires_at, - ) - - async def revoke_token(self, token: AccessToken | RefreshToken) -> None: - match token: - case RefreshToken(): - # Remove the refresh token - del self.refresh_tokens[token.token] - - case AccessToken(): - # Remove the access token - del self.tokens[token.token] - - # Also remove any refresh tokens that point to this access token - for refresh_token, access_token in list(self.refresh_tokens.items()): - if access_token == token.token: - del self.refresh_tokens[refresh_token] - - -@pytest.fixture -def mock_oauth_provider(): - return MockOAuthProvider() - - -@pytest.fixture -def auth_app(mock_oauth_provider): - # Create auth router - auth_routes = create_auth_routes( - mock_oauth_provider, - AnyHttpUrl("https://auth.example.com"), - AnyHttpUrl("https://docs.example.com"), - client_registration_options=ClientRegistrationOptions( - enabled=True, - valid_scopes=["read", "write", "profile"], - default_scopes=["read", "write"], - ), - revocation_options=RevocationOptions(enabled=True), - ) - - # Create Starlette app - app = Starlette(routes=auth_routes) - - return app - - -@pytest.fixture -async def test_client(auth_app): - async with httpx.AsyncClient( - transport=httpx.ASGITransport(app=auth_app), base_url="https://mcptest.com" - ) as client: - yield client - - -@pytest.fixture -async def registered_client(test_client: httpx.AsyncClient, request): - """Create and register a test client. - - Parameters can be customized via indirect parameterization: - @pytest.mark.parametrize("registered_client", - [{"grant_types": ["authorization_code"]}], - indirect=True) - """ - # Default client metadata - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "grant_types": ["authorization_code", "refresh_token"], - } - - # Override with any parameters from the test - if hasattr(request, "param") and request.param: - client_metadata.update(request.param) - - response = await test_client.post("/register", json=client_metadata) - assert response.status_code == 201, f"Failed to register client: {response.content}" - - client_info = response.json() - return client_info - - -@pytest.fixture -def pkce_challenge(): - """Create a PKCE challenge with code_verifier and code_challenge.""" - code_verifier = "some_random_verifier_string" - code_challenge = ( - base64.urlsafe_b64encode(hashlib.sha256(code_verifier.encode()).digest()) - .decode() - .rstrip("=") - ) - - return {"code_verifier": code_verifier, "code_challenge": code_challenge} - - -@pytest.fixture -async def auth_code(test_client, registered_client, pkce_challenge, request): - """Get an authorization code. - - Parameters can be customized via indirect parameterization: - @pytest.mark.parametrize("auth_code", - [{"redirect_uri": "https://client.example.com/other-callback"}], - indirect=True) - """ - # Default authorize params - auth_params = { - "response_type": "code", - "client_id": registered_client["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - } - - # Override with any parameters from the test - if hasattr(request, "param") and request.param: - auth_params.update(request.param) - - response = await test_client.get("/authorize", params=auth_params) - assert response.status_code == 302, f"Failed to get auth code: {response.content}" - - # Extract the authorization code - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "code" in query_params, f"No code in response: {query_params}" - auth_code = query_params["code"][0] - - return { - "code": auth_code, - "redirect_uri": auth_params["redirect_uri"], - "state": query_params.get("state", [None])[0], - } - - -@pytest.fixture -async def tokens(test_client, registered_client, auth_code, pkce_challenge, request): - """Exchange authorization code for tokens. - - Parameters can be customized via indirect parameterization: - @pytest.mark.parametrize("tokens", - [{"code_verifier": "wrong_verifier"}], - indirect=True) - """ - # Default token request params - token_params = { - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": auth_code["code"], - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": auth_code["redirect_uri"], - } - - # Override with any parameters from the test - if hasattr(request, "param") and request.param: - token_params.update(request.param) - - response = await test_client.post("/token", data=token_params) - - # Don't assert success here since some tests will intentionally cause errors - return { - "response": response, - "params": token_params, - } - - -class TestAuthEndpoints: - @pytest.mark.anyio - async def test_metadata_endpoint(self, test_client: httpx.AsyncClient): - """Test the OAuth 2.0 metadata endpoint.""" - print("Sending request to metadata endpoint") - response = await test_client.get("/.well-known/oauth-authorization-server") - print(f"Got response: {response.status_code}") - if response.status_code != 200: - print(f"Response content: {response.content}") - assert response.status_code == 200 - - metadata = response.json() - assert metadata["issuer"] == "https://auth.example.com/" - assert ( - metadata["authorization_endpoint"] == "https://auth.example.com/authorize" - ) - assert metadata["token_endpoint"] == "https://auth.example.com/token" - assert metadata["registration_endpoint"] == "https://auth.example.com/register" - assert metadata["revocation_endpoint"] == "https://auth.example.com/revoke" - assert metadata["response_types_supported"] == ["code"] - assert metadata["code_challenge_methods_supported"] == ["S256"] - assert metadata["token_endpoint_auth_methods_supported"] == [ - "client_secret_post" - ] - assert metadata["grant_types_supported"] == [ - "authorization_code", - "refresh_token", - ] - assert metadata["service_documentation"] == "https://docs.example.com/" - - @pytest.mark.anyio - async def test_token_validation_error(self, test_client: httpx.AsyncClient): - """Test token endpoint error - validation error.""" - # Missing required fields - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - # Missing code, code_verifier, client_id, etc. - }, - ) - error_response = response.json() - assert error_response["error"] == "invalid_request" - assert ( - "error_description" in error_response - ) # Contains validation error messages - - @pytest.mark.anyio - async def test_token_invalid_auth_code( - self, test_client, registered_client, pkce_challenge - ): - """Test token endpoint error - authorization code does not exist.""" - # Try to use a non-existent authorization code - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": "non_existent_auth_code", - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": "https://client.example.com/callback", - }, - ) - print(f"Status code: {response.status_code}") - print(f"Response body: {response.content}") - print(f"Response JSON: {response.json()}") - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_grant" - assert ( - "authorization code does not exist" in error_response["error_description"] - ) - - @pytest.mark.anyio - async def test_token_expired_auth_code( - self, - test_client, - registered_client, - auth_code, - pkce_challenge, - mock_oauth_provider, - ): - """Test token endpoint error - authorization code has expired.""" - # Get the current time for our time mocking - current_time = time.time() - - # Find the auth code object - code_value = auth_code["code"] - found_code = None - for code_obj in mock_oauth_provider.auth_codes.values(): - if code_obj.code == code_value: - found_code = code_obj - break - - assert found_code is not None - - # Authorization codes are typically short-lived (5 minutes = 300 seconds) - # So we'll mock time to be 10 minutes (600 seconds) in the future - with unittest.mock.patch("time.time", return_value=current_time + 600): - # Try to use the expired authorization code - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": code_value, - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": auth_code["redirect_uri"], - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_grant" - assert ( - "authorization code has expired" in error_response["error_description"] - ) - - @pytest.mark.anyio - @pytest.mark.parametrize( - "registered_client", - [ - { - "redirect_uris": [ - "https://client.example.com/callback", - "https://client.example.com/other-callback", - ] - } - ], - indirect=True, - ) - async def test_token_redirect_uri_mismatch( - self, test_client, registered_client, auth_code, pkce_challenge - ): - """Test token endpoint error - redirect URI mismatch.""" - # Try to use the code with a different redirect URI - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": auth_code["code"], - "code_verifier": pkce_challenge["code_verifier"], - # Different from the one used in /authorize - "redirect_uri": "https://client.example.com/other-callback", - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_request" - assert "redirect_uri did not match" in error_response["error_description"] - - @pytest.mark.anyio - async def test_token_code_verifier_mismatch( - self, test_client, registered_client, auth_code - ): - """Test token endpoint error - PKCE code verifier mismatch.""" - # Try to use the code with an incorrect code verifier - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": auth_code["code"], - # Different from the one used to create challenge - "code_verifier": "incorrect_code_verifier", - "redirect_uri": auth_code["redirect_uri"], - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_grant" - assert "incorrect code_verifier" in error_response["error_description"] - - @pytest.mark.anyio - async def test_token_invalid_refresh_token(self, test_client, registered_client): - """Test token endpoint error - refresh token does not exist.""" - # Try to use a non-existent refresh token - response = await test_client.post( - "/token", - data={ - "grant_type": "refresh_token", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "refresh_token": "non_existent_refresh_token", - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_grant" - assert "refresh token does not exist" in error_response["error_description"] - - @pytest.mark.anyio - async def test_token_expired_refresh_token( - self, - test_client, - registered_client, - auth_code, - pkce_challenge, - mock_oauth_provider, - ): - """Test token endpoint error - refresh token has expired.""" - # Step 1: First, let's create a token and refresh token at the current time - current_time = time.time() - - # Exchange authorization code for tokens normally - token_response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": auth_code["code"], - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": auth_code["redirect_uri"], - }, - ) - assert token_response.status_code == 200 - tokens = token_response.json() - refresh_token = tokens["refresh_token"] - - # Step 2: Time travel forward 4 hours (tokens expire in 1 hour by default) - # Mock the time.time() function to return a value 4 hours in the future - with unittest.mock.patch( - "time.time", return_value=current_time + 14400 - ): # 4 hours = 14400 seconds - # Try to use the refresh token which should now be considered expired - response = await test_client.post( - "/token", - data={ - "grant_type": "refresh_token", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "refresh_token": refresh_token, - }, - ) - - # In the "future", the token should be considered expired - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_grant" - assert "refresh token has expired" in error_response["error_description"] - - @pytest.mark.anyio - async def test_token_invalid_scope( - self, test_client, registered_client, auth_code, pkce_challenge - ): - """Test token endpoint error - invalid scope in refresh token request.""" - # Exchange authorization code for tokens - token_response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "code": auth_code["code"], - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": auth_code["redirect_uri"], - }, - ) - assert token_response.status_code == 200 - - tokens = token_response.json() - refresh_token = tokens["refresh_token"] - - # Try to use refresh token with an invalid scope - response = await test_client.post( - "/token", - data={ - "grant_type": "refresh_token", - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "refresh_token": refresh_token, - "scope": "read write invalid_scope", # Adding an invalid scope - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_scope" - assert "cannot request scope" in error_response["error_description"] - - @pytest.mark.anyio - async def test_client_registration( - self, test_client: httpx.AsyncClient, mock_oauth_provider: MockOAuthProvider - ): - """Test client registration.""" - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "client_uri": "https://client.example.com", - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 201, response.content - - client_info = response.json() - assert "client_id" in client_info - assert "client_secret" in client_info - assert client_info["client_name"] == "Test Client" - assert client_info["redirect_uris"] == ["https://client.example.com/callback"] - - # Verify that the client was registered - # assert await mock_oauth_provider.clients_store.get_client( - # client_info["client_id"] - # ) is not None - - @pytest.mark.anyio - async def test_client_registration_missing_required_fields( - self, test_client: httpx.AsyncClient - ): - """Test client registration with missing required fields.""" - # Missing redirect_uris which is a required field - client_metadata = { - "client_name": "Test Client", - "client_uri": "https://client.example.com", - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 400 - error_data = response.json() - assert "error" in error_data - assert error_data["error"] == "invalid_client_metadata" - assert error_data["error_description"] == "redirect_uris: Field required" - - @pytest.mark.anyio - async def test_client_registration_invalid_uri( - self, test_client: httpx.AsyncClient - ): - """Test client registration with invalid URIs.""" - # Invalid redirect_uri format - client_metadata = { - "redirect_uris": ["not-a-valid-uri"], - "client_name": "Test Client", - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 400 - error_data = response.json() - assert "error" in error_data - assert error_data["error"] == "invalid_client_metadata" - assert error_data["error_description"] == ( - "redirect_uris.0: Input should be a valid URL, " - "relative URL without a base" - ) - - @pytest.mark.anyio - async def test_client_registration_empty_redirect_uris( - self, test_client: httpx.AsyncClient - ): - """Test client registration with empty redirect_uris array.""" - client_metadata = { - "redirect_uris": [], # Empty array - "client_name": "Test Client", - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 400 - error_data = response.json() - assert "error" in error_data - assert error_data["error"] == "invalid_client_metadata" - assert ( - error_data["error_description"] - == "redirect_uris: List should have at least 1 item after validation, not 0" - ) - - @pytest.mark.anyio - async def test_authorize_form_post( - self, - test_client: httpx.AsyncClient, - mock_oauth_provider: MockOAuthProvider, - pkce_challenge, - ): - """Test the authorization endpoint using POST with form-encoded data.""" - # Register a client - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "grant_types": ["authorization_code", "refresh_token"], - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 201 - client_info = response.json() - - # Use POST with form-encoded data for authorization - response = await test_client.post( - "/authorize", - data={ - "response_type": "code", - "client_id": client_info["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_form_state", - }, - ) - assert response.status_code == 302 - - # Extract the authorization code from the redirect URL - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "code" in query_params - assert query_params["state"][0] == "test_form_state" - - @pytest.mark.anyio - async def test_authorization_get( - self, - test_client: httpx.AsyncClient, - mock_oauth_provider: MockOAuthProvider, - pkce_challenge, - ): - """Test the full authorization flow.""" - # 1. Register a client - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "grant_types": ["authorization_code", "refresh_token"], - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 201 - client_info = response.json() - - # 2. Request authorization using GET with query params - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": client_info["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - assert response.status_code == 302 - - # 3. Extract the authorization code from the redirect URL - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "code" in query_params - assert query_params["state"][0] == "test_state" - auth_code = query_params["code"][0] - - # 4. Exchange the authorization code for tokens - response = await test_client.post( - "/token", - data={ - "grant_type": "authorization_code", - "client_id": client_info["client_id"], - "client_secret": client_info["client_secret"], - "code": auth_code, - "code_verifier": pkce_challenge["code_verifier"], - "redirect_uri": "https://client.example.com/callback", - }, - ) - assert response.status_code == 200 - - token_response = response.json() - assert "access_token" in token_response - assert "token_type" in token_response - assert "refresh_token" in token_response - assert "expires_in" in token_response - assert token_response["token_type"] == "bearer" - - # 5. Verify the access token - access_token = token_response["access_token"] - refresh_token = token_response["refresh_token"] - - # Create a test client with the token - auth_info = await mock_oauth_provider.load_access_token(access_token) - assert auth_info - assert auth_info.client_id == client_info["client_id"] - assert "read" in auth_info.scopes - assert "write" in auth_info.scopes - - # 6. Refresh the token - response = await test_client.post( - "/token", - data={ - "grant_type": "refresh_token", - "client_id": client_info["client_id"], - "client_secret": client_info["client_secret"], - "refresh_token": refresh_token, - "redirect_uri": "https://client.example.com/callback", - }, - ) - assert response.status_code == 200 - - new_token_response = response.json() - assert "access_token" in new_token_response - assert "refresh_token" in new_token_response - assert new_token_response["access_token"] != access_token - assert new_token_response["refresh_token"] != refresh_token - - # 7. Revoke the token - response = await test_client.post( - "/revoke", - data={ - "client_id": client_info["client_id"], - "client_secret": client_info["client_secret"], - "token": new_token_response["access_token"], - }, - ) - assert response.status_code == 200 - - # Verify that the token was revoked - assert ( - await mock_oauth_provider.load_access_token( - new_token_response["access_token"] - ) - is None - ) - - @pytest.mark.anyio - async def test_revoke_invalid_token(self, test_client, registered_client): - """Test revoking an invalid token.""" - response = await test_client.post( - "/revoke", - data={ - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "token": "invalid_token", - }, - ) - # per RFC, this should return 200 even if the token is invalid - assert response.status_code == 200 - - @pytest.mark.anyio - async def test_revoke_with_malformed_token(self, test_client, registered_client): - response = await test_client.post( - "/revoke", - data={ - "client_id": registered_client["client_id"], - "client_secret": registered_client["client_secret"], - "token": 123, - "token_type_hint": "asdf", - }, - ) - assert response.status_code == 400 - error_response = response.json() - assert error_response["error"] == "invalid_request" - assert "token_type_hint" in error_response["error_description"] - - @pytest.mark.anyio - async def test_client_registration_disallowed_scopes( - self, test_client: httpx.AsyncClient - ): - """Test client registration with scopes that are not allowed.""" - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "scope": "read write profile admin", # 'admin' is not in valid_scopes - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 400 - error_data = response.json() - assert "error" in error_data - assert error_data["error"] == "invalid_client_metadata" - assert "scope" in error_data["error_description"] - assert "admin" in error_data["error_description"] - - @pytest.mark.anyio - async def test_client_registration_default_scopes( - self, test_client: httpx.AsyncClient, mock_oauth_provider: MockOAuthProvider - ): - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - # No scope specified - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 201 - client_info = response.json() - - # Verify client was registered successfully - assert client_info["scope"] == "read write" - - # Retrieve the client from the store to verify default scopes - registered_client = await mock_oauth_provider.get_client( - client_info["client_id"] - ) - assert registered_client is not None - - # Check that default scopes were applied - assert registered_client.scope == "read write" - - @pytest.mark.anyio - async def test_client_registration_invalid_grant_type( - self, test_client: httpx.AsyncClient - ): - client_metadata = { - "redirect_uris": ["https://client.example.com/callback"], - "client_name": "Test Client", - "grant_types": ["authorization_code"], - } - - response = await test_client.post( - "/register", - json=client_metadata, - ) - assert response.status_code == 400 - error_data = response.json() - assert "error" in error_data - assert error_data["error"] == "invalid_client_metadata" - assert ( - error_data["error_description"] - == "grant_types must be authorization_code and refresh_token" - ) - - -class TestAuthorizeEndpointErrors: - """Test error handling in the OAuth authorization endpoint.""" - - @pytest.mark.anyio - async def test_authorize_missing_client_id( - self, test_client: httpx.AsyncClient, pkce_challenge - ): - """Test authorization endpoint with missing client_id. - - According to the OAuth2.0 spec, if client_id is missing, the server should - inform the resource owner and NOT redirect. - """ - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - # Missing client_id - "redirect_uri": "https://client.example.com/callback", - "state": "test_state", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - }, - ) - - # Should NOT redirect, should show an error page - assert response.status_code == 400 - # The response should include an error message about missing client_id - assert "client_id" in response.text.lower() - - @pytest.mark.anyio - async def test_authorize_invalid_client_id( - self, test_client: httpx.AsyncClient, pkce_challenge - ): - """Test authorization endpoint with invalid client_id. - - According to the OAuth2.0 spec, if client_id is invalid, the server should - inform the resource owner and NOT redirect. - """ - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": "invalid_client_id_that_does_not_exist", - "redirect_uri": "https://client.example.com/callback", - "state": "test_state", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - }, - ) - - # Should NOT redirect, should show an error page - assert response.status_code == 400 - # The response should include an error message about invalid client_id - assert "client" in response.text.lower() - - @pytest.mark.anyio - async def test_authorize_missing_redirect_uri( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test authorization endpoint with missing redirect_uri. - - If client has only one registered redirect_uri, it can be omitted. - """ - - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": registered_client["client_id"], - # Missing redirect_uri - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - # Should redirect to the registered redirect_uri - assert response.status_code == 302, response.content - redirect_url = response.headers["location"] - assert redirect_url.startswith("https://client.example.com/callback") - - @pytest.mark.anyio - async def test_authorize_invalid_redirect_uri( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test authorization endpoint with invalid redirect_uri. - - According to the OAuth2.0 spec, if redirect_uri is invalid or doesn't match, - the server should inform the resource owner and NOT redirect. - """ - - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": registered_client["client_id"], - # Non-matching URI - "redirect_uri": "https://attacker.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - # Should NOT redirect, should show an error page - assert response.status_code == 400, response.content - # The response should include an error message about redirect_uri mismatch - assert "redirect" in response.text.lower() - - @pytest.mark.anyio - @pytest.mark.parametrize( - "registered_client", - [ - { - "redirect_uris": [ - "https://client.example.com/callback", - "https://client.example.com/other-callback", - ] - } - ], - indirect=True, - ) - async def test_authorize_missing_redirect_uri_multiple_registered( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test endpoint with missing redirect_uri with multiple registered URIs. - - If client has multiple registered redirect_uris, redirect_uri must be provided. - """ - - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": registered_client["client_id"], - # Missing redirect_uri - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - # Should NOT redirect, should return a 400 error - assert response.status_code == 400 - # The response should include an error message about missing redirect_uri - assert "redirect_uri" in response.text.lower() - - @pytest.mark.anyio - async def test_authorize_unsupported_response_type( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test authorization endpoint with unsupported response_type. - - According to the OAuth2.0 spec, for other errors like unsupported_response_type, - the server should redirect with error parameters. - """ - - response = await test_client.get( - "/authorize", - params={ - "response_type": "token", # Unsupported (we only support "code") - "client_id": registered_client["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - # Should redirect with error parameters - assert response.status_code == 302 - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "error" in query_params - assert query_params["error"][0] == "unsupported_response_type" - # State should be preserved - assert "state" in query_params - assert query_params["state"][0] == "test_state" - - @pytest.mark.anyio - async def test_authorize_missing_response_type( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test authorization endpoint with missing response_type. - - Missing required parameter should result in invalid_request error. - """ - - response = await test_client.get( - "/authorize", - params={ - # Missing response_type - "client_id": registered_client["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "state": "test_state", - }, - ) - - # Should redirect with error parameters - assert response.status_code == 302 - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "error" in query_params - assert query_params["error"][0] == "invalid_request" - # State should be preserved - assert "state" in query_params - assert query_params["state"][0] == "test_state" - - @pytest.mark.anyio - async def test_authorize_missing_pkce_challenge( - self, test_client: httpx.AsyncClient, registered_client - ): - """Test authorization endpoint with missing PKCE code_challenge. - - Missing PKCE parameters should result in invalid_request error. - """ - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": registered_client["client_id"], - # Missing code_challenge - "state": "test_state", - # using default URL - }, - ) - - # Should redirect with error parameters - assert response.status_code == 302 - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "error" in query_params - assert query_params["error"][0] == "invalid_request" - # State should be preserved - assert "state" in query_params - assert query_params["state"][0] == "test_state" - - @pytest.mark.anyio - async def test_authorize_invalid_scope( - self, test_client: httpx.AsyncClient, registered_client, pkce_challenge - ): - """Test authorization endpoint with invalid scope. - - Invalid scope should redirect with invalid_scope error. - """ - - response = await test_client.get( - "/authorize", - params={ - "response_type": "code", - "client_id": registered_client["client_id"], - "redirect_uri": "https://client.example.com/callback", - "code_challenge": pkce_challenge["code_challenge"], - "code_challenge_method": "S256", - "scope": "invalid_scope_that_does_not_exist", - "state": "test_state", - }, - ) - - # Should redirect with error parameters - assert response.status_code == 302 - redirect_url = response.headers["location"] - parsed_url = urlparse(redirect_url) - query_params = parse_qs(parsed_url.query) - - assert "error" in query_params - assert query_params["error"][0] == "invalid_scope" - # State should be preserved - assert "state" in query_params - assert query_params["state"][0] == "test_state" +""" +Integration tests for MCP authorization components. +""" + +import base64 +import hashlib +import secrets +import time +import unittest.mock +from urllib.parse import parse_qs, urlparse + +import httpx +import pytest +from pydantic import AnyHttpUrl +from starlette.applications import Starlette + +from mcp.server.auth.provider import ( + AccessToken, + AuthorizationCode, + AuthorizationParams, + OAuthAuthorizationServerProvider, + RefreshToken, + construct_redirect_uri, +) +from mcp.server.auth.routes import ( + ClientRegistrationOptions, + RevocationOptions, + create_auth_routes, +) +from mcp.shared.auth import ( + OAuthClientInformationFull, + OAuthToken, +) + + +# Mock OAuth provider for testing +class MockOAuthProvider(OAuthAuthorizationServerProvider): + def __init__(self): + self.clients = {} + self.auth_codes = {} # code -> {client_id, code_challenge, redirect_uri} + self.tokens = {} # token -> {client_id, scopes, expires_at} + self.refresh_tokens = {} # refresh_token -> access_token + + async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: + return self.clients.get(client_id) + + async def register_client(self, client_info: OAuthClientInformationFull): + self.clients[client_info.client_id] = client_info + + async def authorize( + self, client: OAuthClientInformationFull, params: AuthorizationParams + ) -> str: + # toy authorize implementation which just immediately generates an authorization + # code and completes the redirect + code = AuthorizationCode( + code=f"code_{int(time.time())}", + client_id=client.client_id, + code_challenge=params.code_challenge, + redirect_uri=params.redirect_uri, + redirect_uri_provided_explicitly=params.redirect_uri_provided_explicitly, + expires_at=time.time() + 300, + scopes=params.scopes or ["read", "write"], + ) + self.auth_codes[code.code] = code + + return construct_redirect_uri( + str(params.redirect_uri), code=code.code, state=params.state + ) + + async def load_authorization_code( + self, client: OAuthClientInformationFull, authorization_code: str + ) -> AuthorizationCode | None: + return self.auth_codes.get(authorization_code) + + async def exchange_authorization_code( + self, client: OAuthClientInformationFull, authorization_code: AuthorizationCode + ) -> OAuthToken: + assert authorization_code.code in self.auth_codes + + # Generate an access token and refresh token + access_token = f"access_{secrets.token_hex(32)}" + refresh_token = f"refresh_{secrets.token_hex(32)}" + + # Store the tokens + self.tokens[access_token] = AccessToken( + token=access_token, + client_id=client.client_id, + scopes=authorization_code.scopes, + expires_at=int(time.time()) + 3600, + ) + + self.refresh_tokens[refresh_token] = access_token + + # Remove the used code + del self.auth_codes[authorization_code.code] + + return OAuthToken( + access_token=access_token, + token_type="bearer", + expires_in=3600, + scope="read write", + refresh_token=refresh_token, + ) + + async def load_refresh_token( + self, client: OAuthClientInformationFull, refresh_token: str + ) -> RefreshToken | None: + old_access_token = self.refresh_tokens.get(refresh_token) + if old_access_token is None: + return None + token_info = self.tokens.get(old_access_token) + if token_info is None: + return None + + # Create a RefreshToken object that matches what is expected in later code + refresh_obj = RefreshToken( + token=refresh_token, + client_id=token_info.client_id, + scopes=token_info.scopes, + expires_at=token_info.expires_at, + ) + + return refresh_obj + + async def exchange_refresh_token( + self, + client: OAuthClientInformationFull, + refresh_token: RefreshToken, + scopes: list[str], + ) -> OAuthToken: + # Check if refresh token exists + assert refresh_token.token in self.refresh_tokens + + old_access_token = self.refresh_tokens[refresh_token.token] + + # Check if the access token exists + assert old_access_token in self.tokens + + # Check if the token was issued to this client + token_info = self.tokens[old_access_token] + assert token_info.client_id == client.client_id + + # Generate a new access token and refresh token + new_access_token = f"access_{secrets.token_hex(32)}" + new_refresh_token = f"refresh_{secrets.token_hex(32)}" + + # Store the new tokens + self.tokens[new_access_token] = AccessToken( + token=new_access_token, + client_id=client.client_id, + scopes=scopes or token_info.scopes, + expires_at=int(time.time()) + 3600, + ) + + self.refresh_tokens[new_refresh_token] = new_access_token + + # Remove the old tokens + del self.refresh_tokens[refresh_token.token] + del self.tokens[old_access_token] + + return OAuthToken( + access_token=new_access_token, + token_type="bearer", + expires_in=3600, + scope=" ".join(scopes) if scopes else " ".join(token_info.scopes), + refresh_token=new_refresh_token, + ) + + async def load_access_token(self, token: str) -> AccessToken | None: + token_info = self.tokens.get(token) + + # Check if token is expired + # if token_info.expires_at < int(time.time()): + # raise InvalidTokenError("Access token has expired") + + return token_info and AccessToken( + token=token, + client_id=token_info.client_id, + scopes=token_info.scopes, + expires_at=token_info.expires_at, + ) + + async def revoke_token(self, token: AccessToken | RefreshToken) -> None: + match token: + case RefreshToken(): + # Remove the refresh token + del self.refresh_tokens[token.token] + + case AccessToken(): + # Remove the access token + del self.tokens[token.token] + + # Also remove any refresh tokens that point to this access token + for refresh_token, access_token in list(self.refresh_tokens.items()): + if access_token == token.token: + del self.refresh_tokens[refresh_token] + + +@pytest.fixture +def mock_oauth_provider(): + return MockOAuthProvider() + + +@pytest.fixture +def auth_app(mock_oauth_provider): + # Create auth router + auth_routes = create_auth_routes( + mock_oauth_provider, + AnyHttpUrl("https://auth.example.com"), + AnyHttpUrl("https://docs.example.com"), + client_registration_options=ClientRegistrationOptions( + enabled=True, + valid_scopes=["read", "write", "profile"], + default_scopes=["read", "write"], + ), + revocation_options=RevocationOptions(enabled=True), + ) + + # Create Starlette app + app = Starlette(routes=auth_routes) + + return app + + +@pytest.fixture +async def test_client(auth_app): + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=auth_app), base_url="https://mcptest.com" + ) as client: + yield client + + +@pytest.fixture +async def registered_client(test_client: httpx.AsyncClient, request): + """Create and register a test client. + + Parameters can be customized via indirect parameterization: + @pytest.mark.parametrize("registered_client", + [{"grant_types": ["authorization_code"]}], + indirect=True) + """ + # Default client metadata + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "grant_types": ["authorization_code", "refresh_token"], + } + + # Override with any parameters from the test + if hasattr(request, "param") and request.param: + client_metadata.update(request.param) + + response = await test_client.post("/register", json=client_metadata) + assert response.status_code == 201, f"Failed to register client: {response.content}" + + client_info = response.json() + return client_info + + +@pytest.fixture +def pkce_challenge(): + """Create a PKCE challenge with code_verifier and code_challenge.""" + code_verifier = "some_random_verifier_string" + code_challenge = ( + base64.urlsafe_b64encode(hashlib.sha256(code_verifier.encode()).digest()) + .decode() + .rstrip("=") + ) + + return {"code_verifier": code_verifier, "code_challenge": code_challenge} + + +@pytest.fixture +async def auth_code(test_client, registered_client, pkce_challenge, request): + """Get an authorization code. + + Parameters can be customized via indirect parameterization: + @pytest.mark.parametrize("auth_code", + [{"redirect_uri": "https://client.example.com/other-callback"}], + indirect=True) + """ + # Default authorize params + auth_params = { + "response_type": "code", + "client_id": registered_client["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + } + + # Override with any parameters from the test + if hasattr(request, "param") and request.param: + auth_params.update(request.param) + + response = await test_client.get("/authorize", params=auth_params) + assert response.status_code == 302, f"Failed to get auth code: {response.content}" + + # Extract the authorization code + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "code" in query_params, f"No code in response: {query_params}" + auth_code = query_params["code"][0] + + return { + "code": auth_code, + "redirect_uri": auth_params["redirect_uri"], + "state": query_params.get("state", [None])[0], + } + + +@pytest.fixture +async def tokens(test_client, registered_client, auth_code, pkce_challenge, request): + """Exchange authorization code for tokens. + + Parameters can be customized via indirect parameterization: + @pytest.mark.parametrize("tokens", + [{"code_verifier": "wrong_verifier"}], + indirect=True) + """ + # Default token request params + token_params = { + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": auth_code["code"], + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": auth_code["redirect_uri"], + } + + # Override with any parameters from the test + if hasattr(request, "param") and request.param: + token_params.update(request.param) + + response = await test_client.post("/token", data=token_params) + + # Don't assert success here since some tests will intentionally cause errors + return { + "response": response, + "params": token_params, + } + + +class TestAuthEndpoints: + @pytest.mark.anyio + async def test_metadata_endpoint(self, test_client: httpx.AsyncClient): + """Test the OAuth 2.0 metadata endpoint.""" + print("Sending request to metadata endpoint") + response = await test_client.get("/.well-known/oauth-authorization-server") + print(f"Got response: {response.status_code}") + if response.status_code != 200: + print(f"Response content: {response.content}") + assert response.status_code == 200 + + metadata = response.json() + assert metadata["issuer"] == "https://auth.example.com/" + assert ( + metadata["authorization_endpoint"] == "https://auth.example.com/authorize" + ) + assert metadata["token_endpoint"] == "https://auth.example.com/token" + assert metadata["registration_endpoint"] == "https://auth.example.com/register" + assert metadata["revocation_endpoint"] == "https://auth.example.com/revoke" + assert metadata["response_types_supported"] == ["code"] + assert metadata["code_challenge_methods_supported"] == ["S256"] + assert metadata["token_endpoint_auth_methods_supported"] == [ + "client_secret_post" + ] + assert metadata["grant_types_supported"] == [ + "authorization_code", + "refresh_token", + ] + assert metadata["service_documentation"] == "https://docs.example.com/" + + @pytest.mark.anyio + async def test_token_validation_error(self, test_client: httpx.AsyncClient): + """Test token endpoint error - validation error.""" + # Missing required fields + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + # Missing code, code_verifier, client_id, etc. + }, + ) + error_response = response.json() + assert error_response["error"] == "invalid_request" + assert ( + "error_description" in error_response + ) # Contains validation error messages + + @pytest.mark.anyio + async def test_token_invalid_auth_code( + self, test_client, registered_client, pkce_challenge + ): + """Test token endpoint error - authorization code does not exist.""" + # Try to use a non-existent authorization code + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": "non_existent_auth_code", + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": "https://client.example.com/callback", + }, + ) + print(f"Status code: {response.status_code}") + print(f"Response body: {response.content}") + print(f"Response JSON: {response.json()}") + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_grant" + assert ( + "authorization code does not exist" in error_response["error_description"] + ) + + @pytest.mark.anyio + async def test_token_expired_auth_code( + self, + test_client, + registered_client, + auth_code, + pkce_challenge, + mock_oauth_provider, + ): + """Test token endpoint error - authorization code has expired.""" + # Get the current time for our time mocking + current_time = time.time() + + # Find the auth code object + code_value = auth_code["code"] + found_code = None + for code_obj in mock_oauth_provider.auth_codes.values(): + if code_obj.code == code_value: + found_code = code_obj + break + + assert found_code is not None + + # Authorization codes are typically short-lived (5 minutes = 300 seconds) + # So we'll mock time to be 10 minutes (600 seconds) in the future + with unittest.mock.patch("time.time", return_value=current_time + 600): + # Try to use the expired authorization code + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": code_value, + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": auth_code["redirect_uri"], + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_grant" + assert ( + "authorization code has expired" in error_response["error_description"] + ) + + @pytest.mark.anyio + @pytest.mark.parametrize( + "registered_client", + [ + { + "redirect_uris": [ + "https://client.example.com/callback", + "https://client.example.com/other-callback", + ] + } + ], + indirect=True, + ) + async def test_token_redirect_uri_mismatch( + self, test_client, registered_client, auth_code, pkce_challenge + ): + """Test token endpoint error - redirect URI mismatch.""" + # Try to use the code with a different redirect URI + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": auth_code["code"], + "code_verifier": pkce_challenge["code_verifier"], + # Different from the one used in /authorize + "redirect_uri": "https://client.example.com/other-callback", + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_request" + assert "redirect_uri did not match" in error_response["error_description"] + + @pytest.mark.anyio + async def test_token_code_verifier_mismatch( + self, test_client, registered_client, auth_code + ): + """Test token endpoint error - PKCE code verifier mismatch.""" + # Try to use the code with an incorrect code verifier + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": auth_code["code"], + # Different from the one used to create challenge + "code_verifier": "incorrect_code_verifier", + "redirect_uri": auth_code["redirect_uri"], + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_grant" + assert "incorrect code_verifier" in error_response["error_description"] + + @pytest.mark.anyio + async def test_token_invalid_refresh_token(self, test_client, registered_client): + """Test token endpoint error - refresh token does not exist.""" + # Try to use a non-existent refresh token + response = await test_client.post( + "/token", + data={ + "grant_type": "refresh_token", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "refresh_token": "non_existent_refresh_token", + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_grant" + assert "refresh token does not exist" in error_response["error_description"] + + @pytest.mark.anyio + async def test_token_expired_refresh_token( + self, + test_client, + registered_client, + auth_code, + pkce_challenge, + mock_oauth_provider, + ): + """Test token endpoint error - refresh token has expired.""" + # Step 1: First, let's create a token and refresh token at the current time + current_time = time.time() + + # Exchange authorization code for tokens normally + token_response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": auth_code["code"], + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": auth_code["redirect_uri"], + }, + ) + assert token_response.status_code == 200 + tokens = token_response.json() + refresh_token = tokens["refresh_token"] + + # Step 2: Time travel forward 4 hours (tokens expire in 1 hour by default) + # Mock the time.time() function to return a value 4 hours in the future + with unittest.mock.patch( + "time.time", return_value=current_time + 14400 + ): # 4 hours = 14400 seconds + # Try to use the refresh token which should now be considered expired + response = await test_client.post( + "/token", + data={ + "grant_type": "refresh_token", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "refresh_token": refresh_token, + }, + ) + + # In the "future", the token should be considered expired + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_grant" + assert "refresh token has expired" in error_response["error_description"] + + @pytest.mark.anyio + async def test_token_invalid_scope( + self, test_client, registered_client, auth_code, pkce_challenge + ): + """Test token endpoint error - invalid scope in refresh token request.""" + # Exchange authorization code for tokens + token_response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "code": auth_code["code"], + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": auth_code["redirect_uri"], + }, + ) + assert token_response.status_code == 200 + + tokens = token_response.json() + refresh_token = tokens["refresh_token"] + + # Try to use refresh token with an invalid scope + response = await test_client.post( + "/token", + data={ + "grant_type": "refresh_token", + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "refresh_token": refresh_token, + "scope": "read write invalid_scope", # Adding an invalid scope + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_scope" + assert "cannot request scope" in error_response["error_description"] + + @pytest.mark.anyio + async def test_client_registration( + self, test_client: httpx.AsyncClient, mock_oauth_provider: MockOAuthProvider + ): + """Test client registration.""" + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "client_uri": "https://client.example.com", + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 201, response.content + + client_info = response.json() + assert "client_id" in client_info + assert "client_secret" in client_info + assert client_info["client_name"] == "Test Client" + assert client_info["redirect_uris"] == ["https://client.example.com/callback"] + + # Verify that the client was registered + # assert await mock_oauth_provider.clients_store.get_client( + # client_info["client_id"] + # ) is not None + + @pytest.mark.anyio + async def test_client_registration_missing_required_fields( + self, test_client: httpx.AsyncClient + ): + """Test client registration with missing required fields.""" + # Missing redirect_uris which is a required field + client_metadata = { + "client_name": "Test Client", + "client_uri": "https://client.example.com", + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 400 + error_data = response.json() + assert "error" in error_data + assert error_data["error"] == "invalid_client_metadata" + assert error_data["error_description"] == "redirect_uris: Field required" + + @pytest.mark.anyio + async def test_client_registration_invalid_uri( + self, test_client: httpx.AsyncClient + ): + """Test client registration with invalid URIs.""" + # Invalid redirect_uri format + client_metadata = { + "redirect_uris": ["not-a-valid-uri"], + "client_name": "Test Client", + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 400 + error_data = response.json() + assert "error" in error_data + assert error_data["error"] == "invalid_client_metadata" + assert error_data["error_description"] == ( + "redirect_uris.0: Input should be a valid URL, " + "relative URL without a base" + ) + + @pytest.mark.anyio + async def test_client_registration_empty_redirect_uris( + self, test_client: httpx.AsyncClient + ): + """Test client registration with empty redirect_uris array.""" + client_metadata = { + "redirect_uris": [], # Empty array + "client_name": "Test Client", + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 400 + error_data = response.json() + assert "error" in error_data + assert error_data["error"] == "invalid_client_metadata" + assert ( + error_data["error_description"] + == "redirect_uris: List should have at least 1 item after validation, not 0" + ) + + @pytest.mark.anyio + async def test_authorize_form_post( + self, + test_client: httpx.AsyncClient, + mock_oauth_provider: MockOAuthProvider, + pkce_challenge, + ): + """Test the authorization endpoint using POST with form-encoded data.""" + # Register a client + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "grant_types": ["authorization_code", "refresh_token"], + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 201 + client_info = response.json() + + # Use POST with form-encoded data for authorization + response = await test_client.post( + "/authorize", + data={ + "response_type": "code", + "client_id": client_info["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_form_state", + }, + ) + assert response.status_code == 302 + + # Extract the authorization code from the redirect URL + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "code" in query_params + assert query_params["state"][0] == "test_form_state" + + @pytest.mark.anyio + async def test_authorization_get( + self, + test_client: httpx.AsyncClient, + mock_oauth_provider: MockOAuthProvider, + pkce_challenge, + ): + """Test the full authorization flow.""" + # 1. Register a client + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "grant_types": ["authorization_code", "refresh_token"], + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 201 + client_info = response.json() + + # 2. Request authorization using GET with query params + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": client_info["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + assert response.status_code == 302 + + # 3. Extract the authorization code from the redirect URL + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "code" in query_params + assert query_params["state"][0] == "test_state" + auth_code = query_params["code"][0] + + # 4. Exchange the authorization code for tokens + response = await test_client.post( + "/token", + data={ + "grant_type": "authorization_code", + "client_id": client_info["client_id"], + "client_secret": client_info["client_secret"], + "code": auth_code, + "code_verifier": pkce_challenge["code_verifier"], + "redirect_uri": "https://client.example.com/callback", + }, + ) + assert response.status_code == 200 + + token_response = response.json() + assert "access_token" in token_response + assert "token_type" in token_response + assert "refresh_token" in token_response + assert "expires_in" in token_response + assert token_response["token_type"] == "bearer" + + # 5. Verify the access token + access_token = token_response["access_token"] + refresh_token = token_response["refresh_token"] + + # Create a test client with the token + auth_info = await mock_oauth_provider.load_access_token(access_token) + assert auth_info + assert auth_info.client_id == client_info["client_id"] + assert "read" in auth_info.scopes + assert "write" in auth_info.scopes + + # 6. Refresh the token + response = await test_client.post( + "/token", + data={ + "grant_type": "refresh_token", + "client_id": client_info["client_id"], + "client_secret": client_info["client_secret"], + "refresh_token": refresh_token, + "redirect_uri": "https://client.example.com/callback", + }, + ) + assert response.status_code == 200 + + new_token_response = response.json() + assert "access_token" in new_token_response + assert "refresh_token" in new_token_response + assert new_token_response["access_token"] != access_token + assert new_token_response["refresh_token"] != refresh_token + + # 7. Revoke the token + response = await test_client.post( + "/revoke", + data={ + "client_id": client_info["client_id"], + "client_secret": client_info["client_secret"], + "token": new_token_response["access_token"], + }, + ) + assert response.status_code == 200 + + # Verify that the token was revoked + assert ( + await mock_oauth_provider.load_access_token( + new_token_response["access_token"] + ) + is None + ) + + @pytest.mark.anyio + async def test_revoke_invalid_token(self, test_client, registered_client): + """Test revoking an invalid token.""" + response = await test_client.post( + "/revoke", + data={ + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "token": "invalid_token", + }, + ) + # per RFC, this should return 200 even if the token is invalid + assert response.status_code == 200 + + @pytest.mark.anyio + async def test_revoke_with_malformed_token(self, test_client, registered_client): + response = await test_client.post( + "/revoke", + data={ + "client_id": registered_client["client_id"], + "client_secret": registered_client["client_secret"], + "token": 123, + "token_type_hint": "asdf", + }, + ) + assert response.status_code == 400 + error_response = response.json() + assert error_response["error"] == "invalid_request" + assert "token_type_hint" in error_response["error_description"] + + @pytest.mark.anyio + async def test_client_registration_disallowed_scopes( + self, test_client: httpx.AsyncClient + ): + """Test client registration with scopes that are not allowed.""" + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "scope": "read write profile admin", # 'admin' is not in valid_scopes + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 400 + error_data = response.json() + assert "error" in error_data + assert error_data["error"] == "invalid_client_metadata" + assert "scope" in error_data["error_description"] + assert "admin" in error_data["error_description"] + + @pytest.mark.anyio + async def test_client_registration_default_scopes( + self, test_client: httpx.AsyncClient, mock_oauth_provider: MockOAuthProvider + ): + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + # No scope specified + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 201 + client_info = response.json() + + # Verify client was registered successfully + assert client_info["scope"] == "read write" + + # Retrieve the client from the store to verify default scopes + registered_client = await mock_oauth_provider.get_client( + client_info["client_id"] + ) + assert registered_client is not None + + # Check that default scopes were applied + assert registered_client.scope == "read write" + + @pytest.mark.anyio + async def test_client_registration_invalid_grant_type( + self, test_client: httpx.AsyncClient + ): + client_metadata = { + "redirect_uris": ["https://client.example.com/callback"], + "client_name": "Test Client", + "grant_types": ["authorization_code"], + } + + response = await test_client.post( + "/register", + json=client_metadata, + ) + assert response.status_code == 400 + error_data = response.json() + assert "error" in error_data + assert error_data["error"] == "invalid_client_metadata" + assert ( + error_data["error_description"] + == "grant_types must be authorization_code and refresh_token" + ) + + +class TestAuthorizeEndpointErrors: + """Test error handling in the OAuth authorization endpoint.""" + + @pytest.mark.anyio + async def test_authorize_missing_client_id( + self, test_client: httpx.AsyncClient, pkce_challenge + ): + """Test authorization endpoint with missing client_id. + + According to the OAuth2.0 spec, if client_id is missing, the server should + inform the resource owner and NOT redirect. + """ + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + # Missing client_id + "redirect_uri": "https://client.example.com/callback", + "state": "test_state", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + }, + ) + + # Should NOT redirect, should show an error page + assert response.status_code == 400 + # The response should include an error message about missing client_id + assert "client_id" in response.text.lower() + + @pytest.mark.anyio + async def test_authorize_invalid_client_id( + self, test_client: httpx.AsyncClient, pkce_challenge + ): + """Test authorization endpoint with invalid client_id. + + According to the OAuth2.0 spec, if client_id is invalid, the server should + inform the resource owner and NOT redirect. + """ + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": "invalid_client_id_that_does_not_exist", + "redirect_uri": "https://client.example.com/callback", + "state": "test_state", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + }, + ) + + # Should NOT redirect, should show an error page + assert response.status_code == 400 + # The response should include an error message about invalid client_id + assert "client" in response.text.lower() + + @pytest.mark.anyio + async def test_authorize_missing_redirect_uri( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test authorization endpoint with missing redirect_uri. + + If client has only one registered redirect_uri, it can be omitted. + """ + + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": registered_client["client_id"], + # Missing redirect_uri + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + # Should redirect to the registered redirect_uri + assert response.status_code == 302, response.content + redirect_url = response.headers["location"] + assert redirect_url.startswith("https://client.example.com/callback") + + @pytest.mark.anyio + async def test_authorize_invalid_redirect_uri( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test authorization endpoint with invalid redirect_uri. + + According to the OAuth2.0 spec, if redirect_uri is invalid or doesn't match, + the server should inform the resource owner and NOT redirect. + """ + + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": registered_client["client_id"], + # Non-matching URI + "redirect_uri": "https://attacker.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + # Should NOT redirect, should show an error page + assert response.status_code == 400, response.content + # The response should include an error message about redirect_uri mismatch + assert "redirect" in response.text.lower() + + @pytest.mark.anyio + @pytest.mark.parametrize( + "registered_client", + [ + { + "redirect_uris": [ + "https://client.example.com/callback", + "https://client.example.com/other-callback", + ] + } + ], + indirect=True, + ) + async def test_authorize_missing_redirect_uri_multiple_registered( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test endpoint with missing redirect_uri with multiple registered URIs. + + If client has multiple registered redirect_uris, redirect_uri must be provided. + """ + + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": registered_client["client_id"], + # Missing redirect_uri + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + # Should NOT redirect, should return a 400 error + assert response.status_code == 400 + # The response should include an error message about missing redirect_uri + assert "redirect_uri" in response.text.lower() + + @pytest.mark.anyio + async def test_authorize_unsupported_response_type( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test authorization endpoint with unsupported response_type. + + According to the OAuth2.0 spec, for other errors like unsupported_response_type, + the server should redirect with error parameters. + """ + + response = await test_client.get( + "/authorize", + params={ + "response_type": "token", # Unsupported (we only support "code") + "client_id": registered_client["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + # Should redirect with error parameters + assert response.status_code == 302 + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "error" in query_params + assert query_params["error"][0] == "unsupported_response_type" + # State should be preserved + assert "state" in query_params + assert query_params["state"][0] == "test_state" + + @pytest.mark.anyio + async def test_authorize_missing_response_type( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test authorization endpoint with missing response_type. + + Missing required parameter should result in invalid_request error. + """ + + response = await test_client.get( + "/authorize", + params={ + # Missing response_type + "client_id": registered_client["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "state": "test_state", + }, + ) + + # Should redirect with error parameters + assert response.status_code == 302 + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "error" in query_params + assert query_params["error"][0] == "invalid_request" + # State should be preserved + assert "state" in query_params + assert query_params["state"][0] == "test_state" + + @pytest.mark.anyio + async def test_authorize_missing_pkce_challenge( + self, test_client: httpx.AsyncClient, registered_client + ): + """Test authorization endpoint with missing PKCE code_challenge. + + Missing PKCE parameters should result in invalid_request error. + """ + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": registered_client["client_id"], + # Missing code_challenge + "state": "test_state", + # using default URL + }, + ) + + # Should redirect with error parameters + assert response.status_code == 302 + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "error" in query_params + assert query_params["error"][0] == "invalid_request" + # State should be preserved + assert "state" in query_params + assert query_params["state"][0] == "test_state" + + @pytest.mark.anyio + async def test_authorize_invalid_scope( + self, test_client: httpx.AsyncClient, registered_client, pkce_challenge + ): + """Test authorization endpoint with invalid scope. + + Invalid scope should redirect with invalid_scope error. + """ + + response = await test_client.get( + "/authorize", + params={ + "response_type": "code", + "client_id": registered_client["client_id"], + "redirect_uri": "https://client.example.com/callback", + "code_challenge": pkce_challenge["code_challenge"], + "code_challenge_method": "S256", + "scope": "invalid_scope_that_does_not_exist", + "state": "test_state", + }, + ) + + # Should redirect with error parameters + assert response.status_code == 302 + redirect_url = response.headers["location"] + parsed_url = urlparse(redirect_url) + query_params = parse_qs(parsed_url.query) + + assert "error" in query_params + assert query_params["error"][0] == "invalid_scope" + # State should be preserved + assert "state" in query_params + assert query_params["state"][0] == "test_state" diff --git a/tests/server/fastmcp/prompts/test_base.py b/tests/server/fastmcp/prompts/test_base.py index c4af044a6..589c2adc4 100644 --- a/tests/server/fastmcp/prompts/test_base.py +++ b/tests/server/fastmcp/prompts/test_base.py @@ -1,206 +1,206 @@ -import pytest -from pydantic import FileUrl - -from mcp.server.fastmcp.prompts.base import ( - AssistantMessage, - Message, - Prompt, - TextContent, - UserMessage, -) -from mcp.types import EmbeddedResource, TextResourceContents - - -class TestRenderPrompt: - @pytest.mark.anyio - async def test_basic_fn(self): - def fn() -> str: - return "Hello, world!" - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage(content=TextContent(type="text", text="Hello, world!")) - ] - - @pytest.mark.anyio - async def test_async_fn(self): - async def fn() -> str: - return "Hello, world!" - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage(content=TextContent(type="text", text="Hello, world!")) - ] - - @pytest.mark.anyio - async def test_fn_with_args(self): - async def fn(name: str, age: int = 30) -> str: - return f"Hello, {name}! You're {age} years old." - - prompt = Prompt.from_function(fn) - assert await prompt.render(arguments={"name": "World"}) == [ - UserMessage( - content=TextContent( - type="text", text="Hello, World! You're 30 years old." - ) - ) - ] - - @pytest.mark.anyio - async def test_fn_with_invalid_kwargs(self): - async def fn(name: str, age: int = 30) -> str: - return f"Hello, {name}! You're {age} years old." - - prompt = Prompt.from_function(fn) - with pytest.raises(ValueError): - await prompt.render(arguments={"age": 40}) - - @pytest.mark.anyio - async def test_fn_returns_message(self): - async def fn() -> UserMessage: - return UserMessage(content="Hello, world!") - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage(content=TextContent(type="text", text="Hello, world!")) - ] - - @pytest.mark.anyio - async def test_fn_returns_assistant_message(self): - async def fn() -> AssistantMessage: - return AssistantMessage( - content=TextContent(type="text", text="Hello, world!") - ) - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - AssistantMessage(content=TextContent(type="text", text="Hello, world!")) - ] - - @pytest.mark.anyio - async def test_fn_returns_multiple_messages(self): - expected = [ - UserMessage("Hello, world!"), - AssistantMessage("How can I help you today?"), - UserMessage("I'm looking for a restaurant in the center of town."), - ] - - async def fn() -> list[Message]: - return expected - - prompt = Prompt.from_function(fn) - assert await prompt.render() == expected - - @pytest.mark.anyio - async def test_fn_returns_list_of_strings(self): - expected = [ - "Hello, world!", - "I'm looking for a restaurant in the center of town.", - ] - - async def fn() -> list[str]: - return expected - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [UserMessage(t) for t in expected] - - @pytest.mark.anyio - async def test_fn_returns_resource_content(self): - """Test returning a message with resource content.""" - - async def fn() -> UserMessage: - return UserMessage( - content=EmbeddedResource( - type="resource", - resource=TextResourceContents( - uri=FileUrl("file://file.txt"), - text="File contents", - mimeType="text/plain", - ), - ) - ) - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage( - content=EmbeddedResource( - type="resource", - resource=TextResourceContents( - uri=FileUrl("file://file.txt"), - text="File contents", - mimeType="text/plain", - ), - ) - ) - ] - - @pytest.mark.anyio - async def test_fn_returns_mixed_content(self): - """Test returning messages with mixed content types.""" - - async def fn() -> list[Message]: - return [ - UserMessage(content="Please analyze this file:"), - UserMessage( - content=EmbeddedResource( - type="resource", - resource=TextResourceContents( - uri=FileUrl("file://file.txt"), - text="File contents", - mimeType="text/plain", - ), - ) - ), - AssistantMessage(content="I'll help analyze that file."), - ] - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage( - content=TextContent(type="text", text="Please analyze this file:") - ), - UserMessage( - content=EmbeddedResource( - type="resource", - resource=TextResourceContents( - uri=FileUrl("file://file.txt"), - text="File contents", - mimeType="text/plain", - ), - ) - ), - AssistantMessage( - content=TextContent(type="text", text="I'll help analyze that file.") - ), - ] - - @pytest.mark.anyio - async def test_fn_returns_dict_with_resource(self): - """Test returning a dict with resource content.""" - - async def fn() -> dict: - return { - "role": "user", - "content": { - "type": "resource", - "resource": { - "uri": FileUrl("file://file.txt"), - "text": "File contents", - "mimeType": "text/plain", - }, - }, - } - - prompt = Prompt.from_function(fn) - assert await prompt.render() == [ - UserMessage( - content=EmbeddedResource( - type="resource", - resource=TextResourceContents( - uri=FileUrl("file://file.txt"), - text="File contents", - mimeType="text/plain", - ), - ) - ) - ] +import pytest +from pydantic import FileUrl + +from mcp.server.fastmcp.prompts.base import ( + AssistantMessage, + Message, + Prompt, + TextContent, + UserMessage, +) +from mcp.types import EmbeddedResource, TextResourceContents + + +class TestRenderPrompt: + @pytest.mark.anyio + async def test_basic_fn(self): + def fn() -> str: + return "Hello, world!" + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage(content=TextContent(type="text", text="Hello, world!")) + ] + + @pytest.mark.anyio + async def test_async_fn(self): + async def fn() -> str: + return "Hello, world!" + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage(content=TextContent(type="text", text="Hello, world!")) + ] + + @pytest.mark.anyio + async def test_fn_with_args(self): + async def fn(name: str, age: int = 30) -> str: + return f"Hello, {name}! You're {age} years old." + + prompt = Prompt.from_function(fn) + assert await prompt.render(arguments={"name": "World"}) == [ + UserMessage( + content=TextContent( + type="text", text="Hello, World! You're 30 years old." + ) + ) + ] + + @pytest.mark.anyio + async def test_fn_with_invalid_kwargs(self): + async def fn(name: str, age: int = 30) -> str: + return f"Hello, {name}! You're {age} years old." + + prompt = Prompt.from_function(fn) + with pytest.raises(ValueError): + await prompt.render(arguments={"age": 40}) + + @pytest.mark.anyio + async def test_fn_returns_message(self): + async def fn() -> UserMessage: + return UserMessage(content="Hello, world!") + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage(content=TextContent(type="text", text="Hello, world!")) + ] + + @pytest.mark.anyio + async def test_fn_returns_assistant_message(self): + async def fn() -> AssistantMessage: + return AssistantMessage( + content=TextContent(type="text", text="Hello, world!") + ) + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + AssistantMessage(content=TextContent(type="text", text="Hello, world!")) + ] + + @pytest.mark.anyio + async def test_fn_returns_multiple_messages(self): + expected = [ + UserMessage("Hello, world!"), + AssistantMessage("How can I help you today?"), + UserMessage("I'm looking for a restaurant in the center of town."), + ] + + async def fn() -> list[Message]: + return expected + + prompt = Prompt.from_function(fn) + assert await prompt.render() == expected + + @pytest.mark.anyio + async def test_fn_returns_list_of_strings(self): + expected = [ + "Hello, world!", + "I'm looking for a restaurant in the center of town.", + ] + + async def fn() -> list[str]: + return expected + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [UserMessage(t) for t in expected] + + @pytest.mark.anyio + async def test_fn_returns_resource_content(self): + """Test returning a message with resource content.""" + + async def fn() -> UserMessage: + return UserMessage( + content=EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=FileUrl("file://file.txt"), + text="File contents", + mimeType="text/plain", + ), + ) + ) + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage( + content=EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=FileUrl("file://file.txt"), + text="File contents", + mimeType="text/plain", + ), + ) + ) + ] + + @pytest.mark.anyio + async def test_fn_returns_mixed_content(self): + """Test returning messages with mixed content types.""" + + async def fn() -> list[Message]: + return [ + UserMessage(content="Please analyze this file:"), + UserMessage( + content=EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=FileUrl("file://file.txt"), + text="File contents", + mimeType="text/plain", + ), + ) + ), + AssistantMessage(content="I'll help analyze that file."), + ] + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage( + content=TextContent(type="text", text="Please analyze this file:") + ), + UserMessage( + content=EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=FileUrl("file://file.txt"), + text="File contents", + mimeType="text/plain", + ), + ) + ), + AssistantMessage( + content=TextContent(type="text", text="I'll help analyze that file.") + ), + ] + + @pytest.mark.anyio + async def test_fn_returns_dict_with_resource(self): + """Test returning a dict with resource content.""" + + async def fn() -> dict: + return { + "role": "user", + "content": { + "type": "resource", + "resource": { + "uri": FileUrl("file://file.txt"), + "text": "File contents", + "mimeType": "text/plain", + }, + }, + } + + prompt = Prompt.from_function(fn) + assert await prompt.render() == [ + UserMessage( + content=EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=FileUrl("file://file.txt"), + text="File contents", + mimeType="text/plain", + ), + ) + ) + ] diff --git a/tests/server/fastmcp/prompts/test_manager.py b/tests/server/fastmcp/prompts/test_manager.py index c64a4a564..bb5d55168 100644 --- a/tests/server/fastmcp/prompts/test_manager.py +++ b/tests/server/fastmcp/prompts/test_manager.py @@ -1,112 +1,112 @@ -import pytest - -from mcp.server.fastmcp.prompts.base import Prompt, TextContent, UserMessage -from mcp.server.fastmcp.prompts.manager import PromptManager - - -class TestPromptManager: - def test_add_prompt(self): - """Test adding a prompt to the manager.""" - - def fn() -> str: - return "Hello, world!" - - manager = PromptManager() - prompt = Prompt.from_function(fn) - added = manager.add_prompt(prompt) - assert added == prompt - assert manager.get_prompt("fn") == prompt - - def test_add_duplicate_prompt(self, caplog): - """Test adding the same prompt twice.""" - - def fn() -> str: - return "Hello, world!" - - manager = PromptManager() - prompt = Prompt.from_function(fn) - first = manager.add_prompt(prompt) - second = manager.add_prompt(prompt) - assert first == second - assert "Prompt already exists" in caplog.text - - def test_disable_warn_on_duplicate_prompts(self, caplog): - """Test disabling warning on duplicate prompts.""" - - def fn() -> str: - return "Hello, world!" - - manager = PromptManager(warn_on_duplicate_prompts=False) - prompt = Prompt.from_function(fn) - first = manager.add_prompt(prompt) - second = manager.add_prompt(prompt) - assert first == second - assert "Prompt already exists" not in caplog.text - - def test_list_prompts(self): - """Test listing all prompts.""" - - def fn1() -> str: - return "Hello, world!" - - def fn2() -> str: - return "Goodbye, world!" - - manager = PromptManager() - prompt1 = Prompt.from_function(fn1) - prompt2 = Prompt.from_function(fn2) - manager.add_prompt(prompt1) - manager.add_prompt(prompt2) - prompts = manager.list_prompts() - assert len(prompts) == 2 - assert prompts == [prompt1, prompt2] - - @pytest.mark.anyio - async def test_render_prompt(self): - """Test rendering a prompt.""" - - def fn() -> str: - return "Hello, world!" - - manager = PromptManager() - prompt = Prompt.from_function(fn) - manager.add_prompt(prompt) - messages = await manager.render_prompt("fn") - assert messages == [ - UserMessage(content=TextContent(type="text", text="Hello, world!")) - ] - - @pytest.mark.anyio - async def test_render_prompt_with_args(self): - """Test rendering a prompt with arguments.""" - - def fn(name: str) -> str: - return f"Hello, {name}!" - - manager = PromptManager() - prompt = Prompt.from_function(fn) - manager.add_prompt(prompt) - messages = await manager.render_prompt("fn", arguments={"name": "World"}) - assert messages == [ - UserMessage(content=TextContent(type="text", text="Hello, World!")) - ] - - @pytest.mark.anyio - async def test_render_unknown_prompt(self): - """Test rendering a non-existent prompt.""" - manager = PromptManager() - with pytest.raises(ValueError, match="Unknown prompt: unknown"): - await manager.render_prompt("unknown") - - @pytest.mark.anyio - async def test_render_prompt_with_missing_args(self): - """Test rendering a prompt with missing required arguments.""" - - def fn(name: str) -> str: - return f"Hello, {name}!" - - manager = PromptManager() - prompt = Prompt.from_function(fn) - manager.add_prompt(prompt) - with pytest.raises(ValueError, match="Missing required arguments"): - await manager.render_prompt("fn") +import pytest + +from mcp.server.fastmcp.prompts.base import Prompt, TextContent, UserMessage +from mcp.server.fastmcp.prompts.manager import PromptManager + + +class TestPromptManager: + def test_add_prompt(self): + """Test adding a prompt to the manager.""" + + def fn() -> str: + return "Hello, world!" + + manager = PromptManager() + prompt = Prompt.from_function(fn) + added = manager.add_prompt(prompt) + assert added == prompt + assert manager.get_prompt("fn") == prompt + + def test_add_duplicate_prompt(self, caplog): + """Test adding the same prompt twice.""" + + def fn() -> str: + return "Hello, world!" + + manager = PromptManager() + prompt = Prompt.from_function(fn) + first = manager.add_prompt(prompt) + second = manager.add_prompt(prompt) + assert first == second + assert "Prompt already exists" in caplog.text + + def test_disable_warn_on_duplicate_prompts(self, caplog): + """Test disabling warning on duplicate prompts.""" + + def fn() -> str: + return "Hello, world!" + + manager = PromptManager(warn_on_duplicate_prompts=False) + prompt = Prompt.from_function(fn) + first = manager.add_prompt(prompt) + second = manager.add_prompt(prompt) + assert first == second + assert "Prompt already exists" not in caplog.text + + def test_list_prompts(self): + """Test listing all prompts.""" + + def fn1() -> str: + return "Hello, world!" + + def fn2() -> str: + return "Goodbye, world!" + + manager = PromptManager() + prompt1 = Prompt.from_function(fn1) + prompt2 = Prompt.from_function(fn2) + manager.add_prompt(prompt1) + manager.add_prompt(prompt2) + prompts = manager.list_prompts() + assert len(prompts) == 2 + assert prompts == [prompt1, prompt2] + + @pytest.mark.anyio + async def test_render_prompt(self): + """Test rendering a prompt.""" + + def fn() -> str: + return "Hello, world!" + + manager = PromptManager() + prompt = Prompt.from_function(fn) + manager.add_prompt(prompt) + messages = await manager.render_prompt("fn") + assert messages == [ + UserMessage(content=TextContent(type="text", text="Hello, world!")) + ] + + @pytest.mark.anyio + async def test_render_prompt_with_args(self): + """Test rendering a prompt with arguments.""" + + def fn(name: str) -> str: + return f"Hello, {name}!" + + manager = PromptManager() + prompt = Prompt.from_function(fn) + manager.add_prompt(prompt) + messages = await manager.render_prompt("fn", arguments={"name": "World"}) + assert messages == [ + UserMessage(content=TextContent(type="text", text="Hello, World!")) + ] + + @pytest.mark.anyio + async def test_render_unknown_prompt(self): + """Test rendering a non-existent prompt.""" + manager = PromptManager() + with pytest.raises(ValueError, match="Unknown prompt: unknown"): + await manager.render_prompt("unknown") + + @pytest.mark.anyio + async def test_render_prompt_with_missing_args(self): + """Test rendering a prompt with missing required arguments.""" + + def fn(name: str) -> str: + return f"Hello, {name}!" + + manager = PromptManager() + prompt = Prompt.from_function(fn) + manager.add_prompt(prompt) + with pytest.raises(ValueError, match="Missing required arguments"): + await manager.render_prompt("fn") diff --git a/tests/server/fastmcp/resources/test_file_resources.py b/tests/server/fastmcp/resources/test_file_resources.py index 36cbca32c..f9ff3e6f8 100644 --- a/tests/server/fastmcp/resources/test_file_resources.py +++ b/tests/server/fastmcp/resources/test_file_resources.py @@ -1,119 +1,119 @@ -import os -from pathlib import Path -from tempfile import NamedTemporaryFile - -import pytest -from pydantic import FileUrl - -from mcp.server.fastmcp.resources import FileResource - - -@pytest.fixture -def temp_file(): - """Create a temporary file for testing. - - File is automatically cleaned up after the test if it still exists. - """ - content = "test content" - with NamedTemporaryFile(mode="w", delete=False) as f: - f.write(content) - path = Path(f.name).resolve() - yield path - try: - path.unlink() - except FileNotFoundError: - pass # File was already deleted by the test - - -class TestFileResource: - """Test FileResource functionality.""" - - def test_file_resource_creation(self, temp_file: Path): - """Test creating a FileResource.""" - resource = FileResource( - uri=FileUrl(temp_file.as_uri()), - name="test", - description="test file", - path=temp_file, - ) - assert str(resource.uri) == temp_file.as_uri() - assert resource.name == "test" - assert resource.description == "test file" - assert resource.mime_type == "text/plain" # default - assert resource.path == temp_file - assert resource.is_binary is False # default - - def test_file_resource_str_path_conversion(self, temp_file: Path): - """Test FileResource handles string paths.""" - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=Path(str(temp_file)), - ) - assert isinstance(resource.path, Path) - assert resource.path.is_absolute() - - @pytest.mark.anyio - async def test_read_text_file(self, temp_file: Path): - """Test reading a text file.""" - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - content = await resource.read() - assert content == "test content" - assert resource.mime_type == "text/plain" - - @pytest.mark.anyio - async def test_read_binary_file(self, temp_file: Path): - """Test reading a file as binary.""" - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - is_binary=True, - ) - content = await resource.read() - assert isinstance(content, bytes) - assert content == b"test content" - - def test_relative_path_error(self): - """Test error on relative path.""" - with pytest.raises(ValueError, match="Path must be absolute"): - FileResource( - uri=FileUrl("file:///test.txt"), - name="test", - path=Path("test.txt"), - ) - - @pytest.mark.anyio - async def test_missing_file_error(self, temp_file: Path): - """Test error when file doesn't exist.""" - # Create path to non-existent file - missing = temp_file.parent / "missing.txt" - resource = FileResource( - uri=FileUrl("file:///missing.txt"), - name="test", - path=missing, - ) - with pytest.raises(ValueError, match="Error reading file"): - await resource.read() - - @pytest.mark.skipif( - os.name == "nt", reason="File permissions behave differently on Windows" - ) - @pytest.mark.anyio - async def test_permission_error(self, temp_file: Path): - """Test reading a file without permissions.""" - temp_file.chmod(0o000) # Remove all permissions - try: - resource = FileResource( - uri=FileUrl(temp_file.as_uri()), - name="test", - path=temp_file, - ) - with pytest.raises(ValueError, match="Error reading file"): - await resource.read() - finally: - temp_file.chmod(0o644) # Restore permissions +import os +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from pydantic import FileUrl + +from mcp.server.fastmcp.resources import FileResource + + +@pytest.fixture +def temp_file(): + """Create a temporary file for testing. + + File is automatically cleaned up after the test if it still exists. + """ + content = "test content" + with NamedTemporaryFile(mode="w", delete=False) as f: + f.write(content) + path = Path(f.name).resolve() + yield path + try: + path.unlink() + except FileNotFoundError: + pass # File was already deleted by the test + + +class TestFileResource: + """Test FileResource functionality.""" + + def test_file_resource_creation(self, temp_file: Path): + """Test creating a FileResource.""" + resource = FileResource( + uri=FileUrl(temp_file.as_uri()), + name="test", + description="test file", + path=temp_file, + ) + assert str(resource.uri) == temp_file.as_uri() + assert resource.name == "test" + assert resource.description == "test file" + assert resource.mime_type == "text/plain" # default + assert resource.path == temp_file + assert resource.is_binary is False # default + + def test_file_resource_str_path_conversion(self, temp_file: Path): + """Test FileResource handles string paths.""" + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=Path(str(temp_file)), + ) + assert isinstance(resource.path, Path) + assert resource.path.is_absolute() + + @pytest.mark.anyio + async def test_read_text_file(self, temp_file: Path): + """Test reading a text file.""" + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + content = await resource.read() + assert content == "test content" + assert resource.mime_type == "text/plain" + + @pytest.mark.anyio + async def test_read_binary_file(self, temp_file: Path): + """Test reading a file as binary.""" + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + is_binary=True, + ) + content = await resource.read() + assert isinstance(content, bytes) + assert content == b"test content" + + def test_relative_path_error(self): + """Test error on relative path.""" + with pytest.raises(ValueError, match="Path must be absolute"): + FileResource( + uri=FileUrl("file:///test.txt"), + name="test", + path=Path("test.txt"), + ) + + @pytest.mark.anyio + async def test_missing_file_error(self, temp_file: Path): + """Test error when file doesn't exist.""" + # Create path to non-existent file + missing = temp_file.parent / "missing.txt" + resource = FileResource( + uri=FileUrl("file:///missing.txt"), + name="test", + path=missing, + ) + with pytest.raises(ValueError, match="Error reading file"): + await resource.read() + + @pytest.mark.skipif( + os.name == "nt", reason="File permissions behave differently on Windows" + ) + @pytest.mark.anyio + async def test_permission_error(self, temp_file: Path): + """Test reading a file without permissions.""" + temp_file.chmod(0o000) # Remove all permissions + try: + resource = FileResource( + uri=FileUrl(temp_file.as_uri()), + name="test", + path=temp_file, + ) + with pytest.raises(ValueError, match="Error reading file"): + await resource.read() + finally: + temp_file.chmod(0o644) # Restore permissions diff --git a/tests/server/fastmcp/resources/test_resource_manager.py b/tests/server/fastmcp/resources/test_resource_manager.py index 4423e5315..0e94dcc05 100644 --- a/tests/server/fastmcp/resources/test_resource_manager.py +++ b/tests/server/fastmcp/resources/test_resource_manager.py @@ -1,141 +1,141 @@ -from pathlib import Path -from tempfile import NamedTemporaryFile - -import pytest -from pydantic import AnyUrl, FileUrl - -from mcp.server.fastmcp.resources import ( - FileResource, - FunctionResource, - ResourceManager, - ResourceTemplate, -) - - -@pytest.fixture -def temp_file(): - """Create a temporary file for testing. - - File is automatically cleaned up after the test if it still exists. - """ - content = "test content" - with NamedTemporaryFile(mode="w", delete=False) as f: - f.write(content) - path = Path(f.name).resolve() - yield path - try: - path.unlink() - except FileNotFoundError: - pass # File was already deleted by the test - - -class TestResourceManager: - """Test ResourceManager functionality.""" - - def test_add_resource(self, temp_file: Path): - """Test adding a resource.""" - manager = ResourceManager() - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - added = manager.add_resource(resource) - assert added == resource - assert manager.list_resources() == [resource] - - def test_add_duplicate_resource(self, temp_file: Path): - """Test adding the same resource twice.""" - manager = ResourceManager() - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - first = manager.add_resource(resource) - second = manager.add_resource(resource) - assert first == second - assert manager.list_resources() == [resource] - - def test_warn_on_duplicate_resources(self, temp_file: Path, caplog): - """Test warning on duplicate resources.""" - manager = ResourceManager() - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - manager.add_resource(resource) - manager.add_resource(resource) - assert "Resource already exists" in caplog.text - - def test_disable_warn_on_duplicate_resources(self, temp_file: Path, caplog): - """Test disabling warning on duplicate resources.""" - manager = ResourceManager(warn_on_duplicate_resources=False) - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - manager.add_resource(resource) - manager.add_resource(resource) - assert "Resource already exists" not in caplog.text - - @pytest.mark.anyio - async def test_get_resource(self, temp_file: Path): - """Test getting a resource by URI.""" - manager = ResourceManager() - resource = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test", - path=temp_file, - ) - manager.add_resource(resource) - retrieved = await manager.get_resource(resource.uri) - assert retrieved == resource - - @pytest.mark.anyio - async def test_get_resource_from_template(self): - """Test getting a resource through a template.""" - manager = ResourceManager() - - def greet(name: str) -> str: - return f"Hello, {name}!" - - template = ResourceTemplate.from_function( - fn=greet, - uri_template="greet://{name}", - name="greeter", - ) - manager._templates[template.uri_template] = template - - resource = await manager.get_resource(AnyUrl("greet://world")) - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert content == "Hello, world!" - - @pytest.mark.anyio - async def test_get_unknown_resource(self): - """Test getting a non-existent resource.""" - manager = ResourceManager() - with pytest.raises(ValueError, match="Unknown resource"): - await manager.get_resource(AnyUrl("unknown://test")) - - def test_list_resources(self, temp_file: Path): - """Test listing all resources.""" - manager = ResourceManager() - resource1 = FileResource( - uri=FileUrl(f"file://{temp_file}"), - name="test1", - path=temp_file, - ) - resource2 = FileResource( - uri=FileUrl(f"file://{temp_file}2"), - name="test2", - path=temp_file, - ) - manager.add_resource(resource1) - manager.add_resource(resource2) - resources = manager.list_resources() - assert len(resources) == 2 - assert resources == [resource1, resource2] +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from pydantic import AnyUrl, FileUrl + +from mcp.server.fastmcp.resources import ( + FileResource, + FunctionResource, + ResourceManager, + ResourceTemplate, +) + + +@pytest.fixture +def temp_file(): + """Create a temporary file for testing. + + File is automatically cleaned up after the test if it still exists. + """ + content = "test content" + with NamedTemporaryFile(mode="w", delete=False) as f: + f.write(content) + path = Path(f.name).resolve() + yield path + try: + path.unlink() + except FileNotFoundError: + pass # File was already deleted by the test + + +class TestResourceManager: + """Test ResourceManager functionality.""" + + def test_add_resource(self, temp_file: Path): + """Test adding a resource.""" + manager = ResourceManager() + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + added = manager.add_resource(resource) + assert added == resource + assert manager.list_resources() == [resource] + + def test_add_duplicate_resource(self, temp_file: Path): + """Test adding the same resource twice.""" + manager = ResourceManager() + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + first = manager.add_resource(resource) + second = manager.add_resource(resource) + assert first == second + assert manager.list_resources() == [resource] + + def test_warn_on_duplicate_resources(self, temp_file: Path, caplog): + """Test warning on duplicate resources.""" + manager = ResourceManager() + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + manager.add_resource(resource) + manager.add_resource(resource) + assert "Resource already exists" in caplog.text + + def test_disable_warn_on_duplicate_resources(self, temp_file: Path, caplog): + """Test disabling warning on duplicate resources.""" + manager = ResourceManager(warn_on_duplicate_resources=False) + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + manager.add_resource(resource) + manager.add_resource(resource) + assert "Resource already exists" not in caplog.text + + @pytest.mark.anyio + async def test_get_resource(self, temp_file: Path): + """Test getting a resource by URI.""" + manager = ResourceManager() + resource = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test", + path=temp_file, + ) + manager.add_resource(resource) + retrieved = await manager.get_resource(resource.uri) + assert retrieved == resource + + @pytest.mark.anyio + async def test_get_resource_from_template(self): + """Test getting a resource through a template.""" + manager = ResourceManager() + + def greet(name: str) -> str: + return f"Hello, {name}!" + + template = ResourceTemplate.from_function( + fn=greet, + uri_template="greet://{name}", + name="greeter", + ) + manager._templates[template.uri_template] = template + + resource = await manager.get_resource(AnyUrl("greet://world")) + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert content == "Hello, world!" + + @pytest.mark.anyio + async def test_get_unknown_resource(self): + """Test getting a non-existent resource.""" + manager = ResourceManager() + with pytest.raises(ValueError, match="Unknown resource"): + await manager.get_resource(AnyUrl("unknown://test")) + + def test_list_resources(self, temp_file: Path): + """Test listing all resources.""" + manager = ResourceManager() + resource1 = FileResource( + uri=FileUrl(f"file://{temp_file}"), + name="test1", + path=temp_file, + ) + resource2 = FileResource( + uri=FileUrl(f"file://{temp_file}2"), + name="test2", + path=temp_file, + ) + manager.add_resource(resource1) + manager.add_resource(resource2) + resources = manager.list_resources() + assert len(resources) == 2 + assert resources == [resource1, resource2] diff --git a/tests/server/fastmcp/resources/test_resource_template.py b/tests/server/fastmcp/resources/test_resource_template.py index f47244361..ef8f7e809 100644 --- a/tests/server/fastmcp/resources/test_resource_template.py +++ b/tests/server/fastmcp/resources/test_resource_template.py @@ -1,188 +1,188 @@ -import json - -import pytest -from pydantic import BaseModel - -from mcp.server.fastmcp.resources import FunctionResource, ResourceTemplate - - -class TestResourceTemplate: - """Test ResourceTemplate functionality.""" - - def test_template_creation(self): - """Test creating a template from a function.""" - - def my_func(key: str, value: int) -> dict: - return {"key": key, "value": value} - - template = ResourceTemplate.from_function( - fn=my_func, - uri_template="test://{key}/{value}", - name="test", - ) - assert template.uri_template == "test://{key}/{value}" - assert template.name == "test" - assert template.mime_type == "text/plain" # default - test_input = {"key": "test", "value": 42} - assert template.fn(**test_input) == my_func(**test_input) - - def test_template_matches(self): - """Test matching URIs against a template.""" - - def my_func(key: str, value: int) -> dict: - return {"key": key, "value": value} - - template = ResourceTemplate.from_function( - fn=my_func, - uri_template="test://{key}/{value}", - name="test", - ) - - # Valid match - params = template.matches("test://foo/123") - assert params == {"key": "foo", "value": "123"} - - # No match - assert template.matches("test://foo") is None - assert template.matches("other://foo/123") is None - - @pytest.mark.anyio - async def test_create_resource(self): - """Test creating a resource from a template.""" - - def my_func(key: str, value: int) -> dict: - return {"key": key, "value": value} - - template = ResourceTemplate.from_function( - fn=my_func, - uri_template="test://{key}/{value}", - name="test", - ) - - resource = await template.create_resource( - "test://foo/123", - {"key": "foo", "value": 123}, - ) - - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert isinstance(content, str) - data = json.loads(content) - assert data == {"key": "foo", "value": 123} - - @pytest.mark.anyio - async def test_template_error(self): - """Test error handling in template resource creation.""" - - def failing_func(x: str) -> str: - raise ValueError("Test error") - - template = ResourceTemplate.from_function( - fn=failing_func, - uri_template="fail://{x}", - name="fail", - ) - - with pytest.raises(ValueError, match="Error creating resource from template"): - await template.create_resource("fail://test", {"x": "test"}) - - @pytest.mark.anyio - async def test_async_text_resource(self): - """Test creating a text resource from async function.""" - - async def greet(name: str) -> str: - return f"Hello, {name}!" - - template = ResourceTemplate.from_function( - fn=greet, - uri_template="greet://{name}", - name="greeter", - ) - - resource = await template.create_resource( - "greet://world", - {"name": "world"}, - ) - - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert content == "Hello, world!" - - @pytest.mark.anyio - async def test_async_binary_resource(self): - """Test creating a binary resource from async function.""" - - async def get_bytes(value: str) -> bytes: - return value.encode() - - template = ResourceTemplate.from_function( - fn=get_bytes, - uri_template="bytes://{value}", - name="bytes", - ) - - resource = await template.create_resource( - "bytes://test", - {"value": "test"}, - ) - - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert content == b"test" - - @pytest.mark.anyio - async def test_basemodel_conversion(self): - """Test handling of BaseModel types.""" - - class MyModel(BaseModel): - key: str - value: int - - def get_data(key: str, value: int) -> MyModel: - return MyModel(key=key, value=value) - - template = ResourceTemplate.from_function( - fn=get_data, - uri_template="test://{key}/{value}", - name="test", - ) - - resource = await template.create_resource( - "test://foo/123", - {"key": "foo", "value": 123}, - ) - - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert isinstance(content, str) - data = json.loads(content) - assert data == {"key": "foo", "value": 123} - - @pytest.mark.anyio - async def test_custom_type_conversion(self): - """Test handling of custom types.""" - - class CustomData: - def __init__(self, value: str): - self.value = value - - def __str__(self) -> str: - return self.value - - def get_data(value: str) -> CustomData: - return CustomData(value) - - template = ResourceTemplate.from_function( - fn=get_data, - uri_template="test://{value}", - name="test", - ) - - resource = await template.create_resource( - "test://hello", - {"value": "hello"}, - ) - - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert content == '"hello"' +import json + +import pytest +from pydantic import BaseModel + +from mcp.server.fastmcp.resources import FunctionResource, ResourceTemplate + + +class TestResourceTemplate: + """Test ResourceTemplate functionality.""" + + def test_template_creation(self): + """Test creating a template from a function.""" + + def my_func(key: str, value: int) -> dict: + return {"key": key, "value": value} + + template = ResourceTemplate.from_function( + fn=my_func, + uri_template="test://{key}/{value}", + name="test", + ) + assert template.uri_template == "test://{key}/{value}" + assert template.name == "test" + assert template.mime_type == "text/plain" # default + test_input = {"key": "test", "value": 42} + assert template.fn(**test_input) == my_func(**test_input) + + def test_template_matches(self): + """Test matching URIs against a template.""" + + def my_func(key: str, value: int) -> dict: + return {"key": key, "value": value} + + template = ResourceTemplate.from_function( + fn=my_func, + uri_template="test://{key}/{value}", + name="test", + ) + + # Valid match + params = template.matches("test://foo/123") + assert params == {"key": "foo", "value": "123"} + + # No match + assert template.matches("test://foo") is None + assert template.matches("other://foo/123") is None + + @pytest.mark.anyio + async def test_create_resource(self): + """Test creating a resource from a template.""" + + def my_func(key: str, value: int) -> dict: + return {"key": key, "value": value} + + template = ResourceTemplate.from_function( + fn=my_func, + uri_template="test://{key}/{value}", + name="test", + ) + + resource = await template.create_resource( + "test://foo/123", + {"key": "foo", "value": 123}, + ) + + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert isinstance(content, str) + data = json.loads(content) + assert data == {"key": "foo", "value": 123} + + @pytest.mark.anyio + async def test_template_error(self): + """Test error handling in template resource creation.""" + + def failing_func(x: str) -> str: + raise ValueError("Test error") + + template = ResourceTemplate.from_function( + fn=failing_func, + uri_template="fail://{x}", + name="fail", + ) + + with pytest.raises(ValueError, match="Error creating resource from template"): + await template.create_resource("fail://test", {"x": "test"}) + + @pytest.mark.anyio + async def test_async_text_resource(self): + """Test creating a text resource from async function.""" + + async def greet(name: str) -> str: + return f"Hello, {name}!" + + template = ResourceTemplate.from_function( + fn=greet, + uri_template="greet://{name}", + name="greeter", + ) + + resource = await template.create_resource( + "greet://world", + {"name": "world"}, + ) + + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert content == "Hello, world!" + + @pytest.mark.anyio + async def test_async_binary_resource(self): + """Test creating a binary resource from async function.""" + + async def get_bytes(value: str) -> bytes: + return value.encode() + + template = ResourceTemplate.from_function( + fn=get_bytes, + uri_template="bytes://{value}", + name="bytes", + ) + + resource = await template.create_resource( + "bytes://test", + {"value": "test"}, + ) + + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert content == b"test" + + @pytest.mark.anyio + async def test_basemodel_conversion(self): + """Test handling of BaseModel types.""" + + class MyModel(BaseModel): + key: str + value: int + + def get_data(key: str, value: int) -> MyModel: + return MyModel(key=key, value=value) + + template = ResourceTemplate.from_function( + fn=get_data, + uri_template="test://{key}/{value}", + name="test", + ) + + resource = await template.create_resource( + "test://foo/123", + {"key": "foo", "value": 123}, + ) + + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert isinstance(content, str) + data = json.loads(content) + assert data == {"key": "foo", "value": 123} + + @pytest.mark.anyio + async def test_custom_type_conversion(self): + """Test handling of custom types.""" + + class CustomData: + def __init__(self, value: str): + self.value = value + + def __str__(self) -> str: + return self.value + + def get_data(value: str) -> CustomData: + return CustomData(value) + + template = ResourceTemplate.from_function( + fn=get_data, + uri_template="test://{value}", + name="test", + ) + + resource = await template.create_resource( + "test://hello", + {"value": "hello"}, + ) + + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert content == '"hello"' diff --git a/tests/server/fastmcp/resources/test_resources.py b/tests/server/fastmcp/resources/test_resources.py index 08b3e65e1..1732e32c7 100644 --- a/tests/server/fastmcp/resources/test_resources.py +++ b/tests/server/fastmcp/resources/test_resources.py @@ -1,101 +1,101 @@ -import pytest -from pydantic import AnyUrl - -from mcp.server.fastmcp.resources import FunctionResource, Resource - - -class TestResourceValidation: - """Test base Resource validation.""" - - def test_resource_uri_validation(self): - """Test URI validation.""" - - def dummy_func() -> str: - return "data" - - # Valid URI - resource = FunctionResource( - uri=AnyUrl("http://example.com/data"), - name="test", - fn=dummy_func, - ) - assert str(resource.uri) == "http://example.com/data" - - # Missing protocol - with pytest.raises(ValueError, match="Input should be a valid URL"): - FunctionResource( - uri=AnyUrl("invalid"), - name="test", - fn=dummy_func, - ) - - # Missing host - with pytest.raises(ValueError, match="Input should be a valid URL"): - FunctionResource( - uri=AnyUrl("http://"), - name="test", - fn=dummy_func, - ) - - def test_resource_name_from_uri(self): - """Test name is extracted from URI if not provided.""" - - def dummy_func() -> str: - return "data" - - resource = FunctionResource( - uri=AnyUrl("resource://my-resource"), - fn=dummy_func, - ) - assert resource.name == "resource://my-resource" - - def test_resource_name_validation(self): - """Test name validation.""" - - def dummy_func() -> str: - return "data" - - # Must provide either name or URI - with pytest.raises(ValueError, match="Either name or uri must be provided"): - FunctionResource( - fn=dummy_func, - ) - - # Explicit name takes precedence over URI - resource = FunctionResource( - uri=AnyUrl("resource://uri-name"), - name="explicit-name", - fn=dummy_func, - ) - assert resource.name == "explicit-name" - - def test_resource_mime_type(self): - """Test mime type handling.""" - - def dummy_func() -> str: - return "data" - - # Default mime type - resource = FunctionResource( - uri=AnyUrl("resource://test"), - fn=dummy_func, - ) - assert resource.mime_type == "text/plain" - - # Custom mime type - resource = FunctionResource( - uri=AnyUrl("resource://test"), - fn=dummy_func, - mime_type="application/json", - ) - assert resource.mime_type == "application/json" - - @pytest.mark.anyio - async def test_resource_read_abstract(self): - """Test that Resource.read() is abstract.""" - - class ConcreteResource(Resource): - pass - - with pytest.raises(TypeError, match="abstract method"): - ConcreteResource(uri=AnyUrl("test://test"), name="test") # type: ignore +import pytest +from pydantic import AnyUrl + +from mcp.server.fastmcp.resources import FunctionResource, Resource + + +class TestResourceValidation: + """Test base Resource validation.""" + + def test_resource_uri_validation(self): + """Test URI validation.""" + + def dummy_func() -> str: + return "data" + + # Valid URI + resource = FunctionResource( + uri=AnyUrl("http://example.com/data"), + name="test", + fn=dummy_func, + ) + assert str(resource.uri) == "http://example.com/data" + + # Missing protocol + with pytest.raises(ValueError, match="Input should be a valid URL"): + FunctionResource( + uri=AnyUrl("invalid"), + name="test", + fn=dummy_func, + ) + + # Missing host + with pytest.raises(ValueError, match="Input should be a valid URL"): + FunctionResource( + uri=AnyUrl("http://"), + name="test", + fn=dummy_func, + ) + + def test_resource_name_from_uri(self): + """Test name is extracted from URI if not provided.""" + + def dummy_func() -> str: + return "data" + + resource = FunctionResource( + uri=AnyUrl("resource://my-resource"), + fn=dummy_func, + ) + assert resource.name == "resource://my-resource" + + def test_resource_name_validation(self): + """Test name validation.""" + + def dummy_func() -> str: + return "data" + + # Must provide either name or URI + with pytest.raises(ValueError, match="Either name or uri must be provided"): + FunctionResource( + fn=dummy_func, + ) + + # Explicit name takes precedence over URI + resource = FunctionResource( + uri=AnyUrl("resource://uri-name"), + name="explicit-name", + fn=dummy_func, + ) + assert resource.name == "explicit-name" + + def test_resource_mime_type(self): + """Test mime type handling.""" + + def dummy_func() -> str: + return "data" + + # Default mime type + resource = FunctionResource( + uri=AnyUrl("resource://test"), + fn=dummy_func, + ) + assert resource.mime_type == "text/plain" + + # Custom mime type + resource = FunctionResource( + uri=AnyUrl("resource://test"), + fn=dummy_func, + mime_type="application/json", + ) + assert resource.mime_type == "application/json" + + @pytest.mark.anyio + async def test_resource_read_abstract(self): + """Test that Resource.read() is abstract.""" + + class ConcreteResource(Resource): + pass + + with pytest.raises(TypeError, match="abstract method"): + ConcreteResource(uri=AnyUrl("test://test"), name="test") # type: ignore diff --git a/tests/server/fastmcp/servers/test_file_server.py b/tests/server/fastmcp/servers/test_file_server.py index b40778ea8..d899b2618 100644 --- a/tests/server/fastmcp/servers/test_file_server.py +++ b/tests/server/fastmcp/servers/test_file_server.py @@ -1,128 +1,128 @@ -import json -from pathlib import Path - -import pytest - -from mcp.server.fastmcp import FastMCP - - -@pytest.fixture() -def test_dir(tmp_path_factory) -> Path: - """Create a temporary directory with test files.""" - tmp = tmp_path_factory.mktemp("test_files") - - # Create test files - (tmp / "example.py").write_text("print('hello world')") - (tmp / "readme.md").write_text("# Test Directory\nThis is a test.") - (tmp / "config.json").write_text('{"test": true}') - - return tmp - - -@pytest.fixture -def mcp() -> FastMCP: - mcp = FastMCP() - - return mcp - - -@pytest.fixture(autouse=True) -def resources(mcp: FastMCP, test_dir: Path) -> FastMCP: - @mcp.resource("dir://test_dir") - def list_test_dir() -> list[str]: - """List the files in the test directory""" - return [str(f) for f in test_dir.iterdir()] - - @mcp.resource("file://test_dir/example.py") - def read_example_py() -> str: - """Read the example.py file""" - try: - return (test_dir / "example.py").read_text() - except FileNotFoundError: - return "File not found" - - @mcp.resource("file://test_dir/readme.md") - def read_readme_md() -> str: - """Read the readme.md file""" - try: - return (test_dir / "readme.md").read_text() - except FileNotFoundError: - return "File not found" - - @mcp.resource("file://test_dir/config.json") - def read_config_json() -> str: - """Read the config.json file""" - try: - return (test_dir / "config.json").read_text() - except FileNotFoundError: - return "File not found" - - return mcp - - -@pytest.fixture(autouse=True) -def tools(mcp: FastMCP, test_dir: Path) -> FastMCP: - @mcp.tool() - def delete_file(path: str) -> bool: - # ensure path is in test_dir - if Path(path).resolve().parent != test_dir: - raise ValueError(f"Path must be in test_dir: {path}") - Path(path).unlink() - return True - - return mcp - - -@pytest.mark.anyio -async def test_list_resources(mcp: FastMCP): - resources = await mcp.list_resources() - assert len(resources) == 4 - - assert [str(r.uri) for r in resources] == [ - "dir://test_dir", - "file://test_dir/example.py", - "file://test_dir/readme.md", - "file://test_dir/config.json", - ] - - -@pytest.mark.anyio -async def test_read_resource_dir(mcp: FastMCP): - res_iter = await mcp.read_resource("dir://test_dir") - res_list = list(res_iter) - assert len(res_list) == 1 - res = res_list[0] - assert res.mime_type == "text/plain" - - files = json.loads(res.content) - - assert sorted([Path(f).name for f in files]) == [ - "config.json", - "example.py", - "readme.md", - ] - - -@pytest.mark.anyio -async def test_read_resource_file(mcp: FastMCP): - res_iter = await mcp.read_resource("file://test_dir/example.py") - res_list = list(res_iter) - assert len(res_list) == 1 - res = res_list[0] - assert res.content == "print('hello world')" - - -@pytest.mark.anyio -async def test_delete_file(mcp: FastMCP, test_dir: Path): - await mcp.call_tool("delete_file", arguments={"path": str(test_dir / "example.py")}) - assert not (test_dir / "example.py").exists() - - -@pytest.mark.anyio -async def test_delete_file_and_check_resources(mcp: FastMCP, test_dir: Path): - await mcp.call_tool("delete_file", arguments={"path": str(test_dir / "example.py")}) - res_iter = await mcp.read_resource("file://test_dir/example.py") - res_list = list(res_iter) - assert len(res_list) == 1 - res = res_list[0] - assert res.content == "File not found" +import json +from pathlib import Path + +import pytest + +from mcp.server.fastmcp import FastMCP + + +@pytest.fixture() +def test_dir(tmp_path_factory) -> Path: + """Create a temporary directory with test files.""" + tmp = tmp_path_factory.mktemp("test_files") + + # Create test files + (tmp / "example.py").write_text("print('hello world')") + (tmp / "readme.md").write_text("# Test Directory\nThis is a test.") + (tmp / "config.json").write_text('{"test": true}') + + return tmp + + +@pytest.fixture +def mcp() -> FastMCP: + mcp = FastMCP() + + return mcp + + +@pytest.fixture(autouse=True) +def resources(mcp: FastMCP, test_dir: Path) -> FastMCP: + @mcp.resource("dir://test_dir") + def list_test_dir() -> list[str]: + """List the files in the test directory""" + return [str(f) for f in test_dir.iterdir()] + + @mcp.resource("file://test_dir/example.py") + def read_example_py() -> str: + """Read the example.py file""" + try: + return (test_dir / "example.py").read_text() + except FileNotFoundError: + return "File not found" + + @mcp.resource("file://test_dir/readme.md") + def read_readme_md() -> str: + """Read the readme.md file""" + try: + return (test_dir / "readme.md").read_text() + except FileNotFoundError: + return "File not found" + + @mcp.resource("file://test_dir/config.json") + def read_config_json() -> str: + """Read the config.json file""" + try: + return (test_dir / "config.json").read_text() + except FileNotFoundError: + return "File not found" + + return mcp + + +@pytest.fixture(autouse=True) +def tools(mcp: FastMCP, test_dir: Path) -> FastMCP: + @mcp.tool() + def delete_file(path: str) -> bool: + # ensure path is in test_dir + if Path(path).resolve().parent != test_dir: + raise ValueError(f"Path must be in test_dir: {path}") + Path(path).unlink() + return True + + return mcp + + +@pytest.mark.anyio +async def test_list_resources(mcp: FastMCP): + resources = await mcp.list_resources() + assert len(resources) == 4 + + assert [str(r.uri) for r in resources] == [ + "dir://test_dir", + "file://test_dir/example.py", + "file://test_dir/readme.md", + "file://test_dir/config.json", + ] + + +@pytest.mark.anyio +async def test_read_resource_dir(mcp: FastMCP): + res_iter = await mcp.read_resource("dir://test_dir") + res_list = list(res_iter) + assert len(res_list) == 1 + res = res_list[0] + assert res.mime_type == "text/plain" + + files = json.loads(res.content) + + assert sorted([Path(f).name for f in files]) == [ + "config.json", + "example.py", + "readme.md", + ] + + +@pytest.mark.anyio +async def test_read_resource_file(mcp: FastMCP): + res_iter = await mcp.read_resource("file://test_dir/example.py") + res_list = list(res_iter) + assert len(res_list) == 1 + res = res_list[0] + assert res.content == "print('hello world')" + + +@pytest.mark.anyio +async def test_delete_file(mcp: FastMCP, test_dir: Path): + await mcp.call_tool("delete_file", arguments={"path": str(test_dir / "example.py")}) + assert not (test_dir / "example.py").exists() + + +@pytest.mark.anyio +async def test_delete_file_and_check_resources(mcp: FastMCP, test_dir: Path): + await mcp.call_tool("delete_file", arguments={"path": str(test_dir / "example.py")}) + res_iter = await mcp.read_resource("file://test_dir/example.py") + res_list = list(res_iter) + assert len(res_list) == 1 + res = res_list[0] + assert res.content == "File not found" diff --git a/tests/server/fastmcp/test_func_metadata.py b/tests/server/fastmcp/test_func_metadata.py index b1828ffe9..542ed94ff 100644 --- a/tests/server/fastmcp/test_func_metadata.py +++ b/tests/server/fastmcp/test_func_metadata.py @@ -1,416 +1,416 @@ -from typing import Annotated - -import annotated_types -import pytest -from pydantic import BaseModel, Field - -from mcp.server.fastmcp.utilities.func_metadata import func_metadata - - -class SomeInputModelA(BaseModel): - pass - - -class SomeInputModelB(BaseModel): - class InnerModel(BaseModel): - x: int - - how_many_shrimp: Annotated[int, Field(description="How many shrimp in the tank???")] - ok: InnerModel - y: None - - -def complex_arguments_fn( - an_int: int, - must_be_none: None, - must_be_none_dumb_annotation: Annotated[None, "blah"], - list_of_ints: list[int], - # list[str] | str is an interesting case because if it comes in as JSON like - # "[\"a\", \"b\"]" then it will be naively parsed as a string. - list_str_or_str: list[str] | str, - an_int_annotated_with_field: Annotated[ - int, Field(description="An int with a field") - ], - an_int_annotated_with_field_and_others: Annotated[ - int, - str, # Should be ignored, really - Field(description="An int with a field"), - annotated_types.Gt(1), - ], - an_int_annotated_with_junk: Annotated[ - int, - "123", - 456, - ], - field_with_default_via_field_annotation_before_nondefault_arg: Annotated[ - int, Field(1) - ], - unannotated, - my_model_a: SomeInputModelA, - my_model_a_forward_ref: "SomeInputModelA", - my_model_b: SomeInputModelB, - an_int_annotated_with_field_default: Annotated[ - int, - Field(1, description="An int with a field"), - ], - unannotated_with_default=5, - my_model_a_with_default: SomeInputModelA = SomeInputModelA(), # noqa: B008 - an_int_with_default: int = 1, - must_be_none_with_default: None = None, - an_int_with_equals_field: int = Field(1, ge=0), - int_annotated_with_default: Annotated[int, Field(description="hey")] = 5, -) -> str: - _ = ( - an_int, - must_be_none, - must_be_none_dumb_annotation, - list_of_ints, - list_str_or_str, - an_int_annotated_with_field, - an_int_annotated_with_field_and_others, - an_int_annotated_with_junk, - field_with_default_via_field_annotation_before_nondefault_arg, - unannotated, - an_int_annotated_with_field_default, - unannotated_with_default, - my_model_a, - my_model_a_forward_ref, - my_model_b, - my_model_a_with_default, - an_int_with_default, - must_be_none_with_default, - an_int_with_equals_field, - int_annotated_with_default, - ) - return "ok!" - - -@pytest.mark.anyio -async def test_complex_function_runtime_arg_validation_non_json(): - """Test that basic non-JSON arguments are validated correctly""" - meta = func_metadata(complex_arguments_fn) - - # Test with minimum required arguments - result = await meta.call_fn_with_arg_validation( - complex_arguments_fn, - fn_is_async=False, - arguments_to_validate={ - "an_int": 1, - "must_be_none": None, - "must_be_none_dumb_annotation": None, - "list_of_ints": [1, 2, 3], - "list_str_or_str": "hello", - "an_int_annotated_with_field": 42, - "an_int_annotated_with_field_and_others": 5, - "an_int_annotated_with_junk": 100, - "unannotated": "test", - "my_model_a": {}, - "my_model_a_forward_ref": {}, - "my_model_b": {"how_many_shrimp": 5, "ok": {"x": 1}, "y": None}, - }, - arguments_to_pass_directly=None, - ) - assert result == "ok!" - - # Test with invalid types - with pytest.raises(ValueError): - await meta.call_fn_with_arg_validation( - complex_arguments_fn, - fn_is_async=False, - arguments_to_validate={"an_int": "not an int"}, - arguments_to_pass_directly=None, - ) - - -@pytest.mark.anyio -async def test_complex_function_runtime_arg_validation_with_json(): - """Test that JSON string arguments are parsed and validated correctly""" - meta = func_metadata(complex_arguments_fn) - - result = await meta.call_fn_with_arg_validation( - complex_arguments_fn, - fn_is_async=False, - arguments_to_validate={ - "an_int": 1, - "must_be_none": None, - "must_be_none_dumb_annotation": None, - "list_of_ints": "[1, 2, 3]", # JSON string - "list_str_or_str": '["a", "b", "c"]', # JSON string - "an_int_annotated_with_field": 42, - "an_int_annotated_with_field_and_others": "5", # JSON string - "an_int_annotated_with_junk": 100, - "unannotated": "test", - "my_model_a": "{}", # JSON string - "my_model_a_forward_ref": "{}", # JSON string - "my_model_b": '{"how_many_shrimp": 5, "ok": {"x": 1}, "y": null}', - }, - arguments_to_pass_directly=None, - ) - assert result == "ok!" - - -def test_str_vs_list_str(): - """Test handling of string vs list[str] type annotations. - - This is tricky as '"hello"' can be parsed as a JSON string or a Python string. - We want to make sure it's kept as a python string. - """ - - def func_with_str_types(str_or_list: str | list[str]): - return str_or_list - - meta = func_metadata(func_with_str_types) - - # Test string input for union type - result = meta.pre_parse_json({"str_or_list": "hello"}) - assert result["str_or_list"] == "hello" - - # Test string input that contains valid JSON for union type - # We want to see here that the JSON-vali string is NOT parsed as JSON, but rather - # kept as a raw string - result = meta.pre_parse_json({"str_or_list": '"hello"'}) - assert result["str_or_list"] == '"hello"' - - # Test list input for union type - result = meta.pre_parse_json({"str_or_list": '["hello", "world"]'}) - assert result["str_or_list"] == ["hello", "world"] - - -def test_skip_names(): - """Test that skipped parameters are not included in the model""" - - def func_with_many_params( - keep_this: int, skip_this: str, also_keep: float, also_skip: bool - ): - return keep_this, skip_this, also_keep, also_skip - - # Skip some parameters - meta = func_metadata(func_with_many_params, skip_names=["skip_this", "also_skip"]) - - # Check model fields - assert "keep_this" in meta.arg_model.model_fields - assert "also_keep" in meta.arg_model.model_fields - assert "skip_this" not in meta.arg_model.model_fields - assert "also_skip" not in meta.arg_model.model_fields - - # Validate that we can call with only non-skipped parameters - model: BaseModel = meta.arg_model.model_validate({"keep_this": 1, "also_keep": 2.5}) # type: ignore - assert model.keep_this == 1 # type: ignore - assert model.also_keep == 2.5 # type: ignore - - -@pytest.mark.anyio -async def test_lambda_function(): - """Test lambda function schema and validation""" - fn = lambda x, y=5: x # noqa: E731 - meta = func_metadata(lambda x, y=5: x) - - # Test schema - assert meta.arg_model.model_json_schema() == { - "properties": { - "x": {"title": "x", "type": "string"}, - "y": {"default": 5, "title": "y", "type": "string"}, - }, - "required": ["x"], - "title": "Arguments", - "type": "object", - } - - async def check_call(args): - return await meta.call_fn_with_arg_validation( - fn, - fn_is_async=False, - arguments_to_validate=args, - arguments_to_pass_directly=None, - ) - - # Basic calls - assert await check_call({"x": "hello"}) == "hello" - assert await check_call({"x": "hello", "y": "world"}) == "hello" - assert await check_call({"x": '"hello"'}) == '"hello"' - - # Missing required arg - with pytest.raises(ValueError): - await check_call({"y": "world"}) - - -def test_complex_function_json_schema(): - """Test JSON schema generation for complex function arguments. - - Note: Different versions of pydantic output slightly different - JSON Schema formats for model fields with defaults. The format changed in 2.9.0: - - 1. Before 2.9.0: - { - "allOf": [{"$ref": "#/$defs/Model"}], - "default": {} - } - - 2. Since 2.9.0: - { - "$ref": "#/$defs/Model", - "default": {} - } - - Both formats are valid and functionally equivalent. This test accepts either format - to ensure compatibility across our supported pydantic versions. - - This change in format does not affect runtime behavior since: - 1. Both schemas validate the same way - 2. The actual model classes and validation logic are unchanged - 3. func_metadata uses model_validate/model_dump, not the schema directly - """ - meta = func_metadata(complex_arguments_fn) - actual_schema = meta.arg_model.model_json_schema() - - # Create a copy of the actual schema to normalize - normalized_schema = actual_schema.copy() - - # Normalize the my_model_a_with_default field to handle both pydantic formats - if "allOf" in actual_schema["properties"]["my_model_a_with_default"]: - normalized_schema["properties"]["my_model_a_with_default"] = { - "$ref": "#/$defs/SomeInputModelA", - "default": {}, - } - - assert normalized_schema == { - "$defs": { - "InnerModel": { - "properties": {"x": {"title": "X", "type": "integer"}}, - "required": ["x"], - "title": "InnerModel", - "type": "object", - }, - "SomeInputModelA": { - "properties": {}, - "title": "SomeInputModelA", - "type": "object", - }, - "SomeInputModelB": { - "properties": { - "how_many_shrimp": { - "description": "How many shrimp in the tank???", - "title": "How Many Shrimp", - "type": "integer", - }, - "ok": {"$ref": "#/$defs/InnerModel"}, - "y": {"title": "Y", "type": "null"}, - }, - "required": ["how_many_shrimp", "ok", "y"], - "title": "SomeInputModelB", - "type": "object", - }, - }, - "properties": { - "an_int": {"title": "An Int", "type": "integer"}, - "must_be_none": {"title": "Must Be None", "type": "null"}, - "must_be_none_dumb_annotation": { - "title": "Must Be None Dumb Annotation", - "type": "null", - }, - "list_of_ints": { - "items": {"type": "integer"}, - "title": "List Of Ints", - "type": "array", - }, - "list_str_or_str": { - "anyOf": [ - {"items": {"type": "string"}, "type": "array"}, - {"type": "string"}, - ], - "title": "List Str Or Str", - }, - "an_int_annotated_with_field": { - "description": "An int with a field", - "title": "An Int Annotated With Field", - "type": "integer", - }, - "an_int_annotated_with_field_and_others": { - "description": "An int with a field", - "exclusiveMinimum": 1, - "title": "An Int Annotated With Field And Others", - "type": "integer", - }, - "an_int_annotated_with_junk": { - "title": "An Int Annotated With Junk", - "type": "integer", - }, - "field_with_default_via_field_annotation_before_nondefault_arg": { - "default": 1, - "title": "Field With Default Via Field Annotation Before Nondefault Arg", - "type": "integer", - }, - "unannotated": {"title": "unannotated", "type": "string"}, - "my_model_a": {"$ref": "#/$defs/SomeInputModelA"}, - "my_model_a_forward_ref": {"$ref": "#/$defs/SomeInputModelA"}, - "my_model_b": {"$ref": "#/$defs/SomeInputModelB"}, - "an_int_annotated_with_field_default": { - "default": 1, - "description": "An int with a field", - "title": "An Int Annotated With Field Default", - "type": "integer", - }, - "unannotated_with_default": { - "default": 5, - "title": "unannotated_with_default", - "type": "string", - }, - "my_model_a_with_default": { - "$ref": "#/$defs/SomeInputModelA", - "default": {}, - }, - "an_int_with_default": { - "default": 1, - "title": "An Int With Default", - "type": "integer", - }, - "must_be_none_with_default": { - "default": None, - "title": "Must Be None With Default", - "type": "null", - }, - "an_int_with_equals_field": { - "default": 1, - "minimum": 0, - "title": "An Int With Equals Field", - "type": "integer", - }, - "int_annotated_with_default": { - "default": 5, - "description": "hey", - "title": "Int Annotated With Default", - "type": "integer", - }, - }, - "required": [ - "an_int", - "must_be_none", - "must_be_none_dumb_annotation", - "list_of_ints", - "list_str_or_str", - "an_int_annotated_with_field", - "an_int_annotated_with_field_and_others", - "an_int_annotated_with_junk", - "unannotated", - "my_model_a", - "my_model_a_forward_ref", - "my_model_b", - ], - "title": "complex_arguments_fnArguments", - "type": "object", - } - - -def test_str_vs_int(): - """ - Test that string values are kept as strings even when they contain numbers, - while numbers are parsed correctly. - """ - - def func_with_str_and_int(a: str, b: int): - return a - - meta = func_metadata(func_with_str_and_int) - result = meta.pre_parse_json({"a": "123", "b": 123}) - assert result["a"] == "123" - assert result["b"] == 123 +from typing import Annotated + +import annotated_types +import pytest +from pydantic import BaseModel, Field + +from mcp.server.fastmcp.utilities.func_metadata import func_metadata + + +class SomeInputModelA(BaseModel): + pass + + +class SomeInputModelB(BaseModel): + class InnerModel(BaseModel): + x: int + + how_many_shrimp: Annotated[int, Field(description="How many shrimp in the tank???")] + ok: InnerModel + y: None + + +def complex_arguments_fn( + an_int: int, + must_be_none: None, + must_be_none_dumb_annotation: Annotated[None, "blah"], + list_of_ints: list[int], + # list[str] | str is an interesting case because if it comes in as JSON like + # "[\"a\", \"b\"]" then it will be naively parsed as a string. + list_str_or_str: list[str] | str, + an_int_annotated_with_field: Annotated[ + int, Field(description="An int with a field") + ], + an_int_annotated_with_field_and_others: Annotated[ + int, + str, # Should be ignored, really + Field(description="An int with a field"), + annotated_types.Gt(1), + ], + an_int_annotated_with_junk: Annotated[ + int, + "123", + 456, + ], + field_with_default_via_field_annotation_before_nondefault_arg: Annotated[ + int, Field(1) + ], + unannotated, + my_model_a: SomeInputModelA, + my_model_a_forward_ref: "SomeInputModelA", + my_model_b: SomeInputModelB, + an_int_annotated_with_field_default: Annotated[ + int, + Field(1, description="An int with a field"), + ], + unannotated_with_default=5, + my_model_a_with_default: SomeInputModelA = SomeInputModelA(), # noqa: B008 + an_int_with_default: int = 1, + must_be_none_with_default: None = None, + an_int_with_equals_field: int = Field(1, ge=0), + int_annotated_with_default: Annotated[int, Field(description="hey")] = 5, +) -> str: + _ = ( + an_int, + must_be_none, + must_be_none_dumb_annotation, + list_of_ints, + list_str_or_str, + an_int_annotated_with_field, + an_int_annotated_with_field_and_others, + an_int_annotated_with_junk, + field_with_default_via_field_annotation_before_nondefault_arg, + unannotated, + an_int_annotated_with_field_default, + unannotated_with_default, + my_model_a, + my_model_a_forward_ref, + my_model_b, + my_model_a_with_default, + an_int_with_default, + must_be_none_with_default, + an_int_with_equals_field, + int_annotated_with_default, + ) + return "ok!" + + +@pytest.mark.anyio +async def test_complex_function_runtime_arg_validation_non_json(): + """Test that basic non-JSON arguments are validated correctly""" + meta = func_metadata(complex_arguments_fn) + + # Test with minimum required arguments + result = await meta.call_fn_with_arg_validation( + complex_arguments_fn, + fn_is_async=False, + arguments_to_validate={ + "an_int": 1, + "must_be_none": None, + "must_be_none_dumb_annotation": None, + "list_of_ints": [1, 2, 3], + "list_str_or_str": "hello", + "an_int_annotated_with_field": 42, + "an_int_annotated_with_field_and_others": 5, + "an_int_annotated_with_junk": 100, + "unannotated": "test", + "my_model_a": {}, + "my_model_a_forward_ref": {}, + "my_model_b": {"how_many_shrimp": 5, "ok": {"x": 1}, "y": None}, + }, + arguments_to_pass_directly=None, + ) + assert result == "ok!" + + # Test with invalid types + with pytest.raises(ValueError): + await meta.call_fn_with_arg_validation( + complex_arguments_fn, + fn_is_async=False, + arguments_to_validate={"an_int": "not an int"}, + arguments_to_pass_directly=None, + ) + + +@pytest.mark.anyio +async def test_complex_function_runtime_arg_validation_with_json(): + """Test that JSON string arguments are parsed and validated correctly""" + meta = func_metadata(complex_arguments_fn) + + result = await meta.call_fn_with_arg_validation( + complex_arguments_fn, + fn_is_async=False, + arguments_to_validate={ + "an_int": 1, + "must_be_none": None, + "must_be_none_dumb_annotation": None, + "list_of_ints": "[1, 2, 3]", # JSON string + "list_str_or_str": '["a", "b", "c"]', # JSON string + "an_int_annotated_with_field": 42, + "an_int_annotated_with_field_and_others": "5", # JSON string + "an_int_annotated_with_junk": 100, + "unannotated": "test", + "my_model_a": "{}", # JSON string + "my_model_a_forward_ref": "{}", # JSON string + "my_model_b": '{"how_many_shrimp": 5, "ok": {"x": 1}, "y": null}', + }, + arguments_to_pass_directly=None, + ) + assert result == "ok!" + + +def test_str_vs_list_str(): + """Test handling of string vs list[str] type annotations. + + This is tricky as '"hello"' can be parsed as a JSON string or a Python string. + We want to make sure it's kept as a python string. + """ + + def func_with_str_types(str_or_list: str | list[str]): + return str_or_list + + meta = func_metadata(func_with_str_types) + + # Test string input for union type + result = meta.pre_parse_json({"str_or_list": "hello"}) + assert result["str_or_list"] == "hello" + + # Test string input that contains valid JSON for union type + # We want to see here that the JSON-vali string is NOT parsed as JSON, but rather + # kept as a raw string + result = meta.pre_parse_json({"str_or_list": '"hello"'}) + assert result["str_or_list"] == '"hello"' + + # Test list input for union type + result = meta.pre_parse_json({"str_or_list": '["hello", "world"]'}) + assert result["str_or_list"] == ["hello", "world"] + + +def test_skip_names(): + """Test that skipped parameters are not included in the model""" + + def func_with_many_params( + keep_this: int, skip_this: str, also_keep: float, also_skip: bool + ): + return keep_this, skip_this, also_keep, also_skip + + # Skip some parameters + meta = func_metadata(func_with_many_params, skip_names=["skip_this", "also_skip"]) + + # Check model fields + assert "keep_this" in meta.arg_model.model_fields + assert "also_keep" in meta.arg_model.model_fields + assert "skip_this" not in meta.arg_model.model_fields + assert "also_skip" not in meta.arg_model.model_fields + + # Validate that we can call with only non-skipped parameters + model: BaseModel = meta.arg_model.model_validate({"keep_this": 1, "also_keep": 2.5}) # type: ignore + assert model.keep_this == 1 # type: ignore + assert model.also_keep == 2.5 # type: ignore + + +@pytest.mark.anyio +async def test_lambda_function(): + """Test lambda function schema and validation""" + fn = lambda x, y=5: x # noqa: E731 + meta = func_metadata(lambda x, y=5: x) + + # Test schema + assert meta.arg_model.model_json_schema() == { + "properties": { + "x": {"title": "x", "type": "string"}, + "y": {"default": 5, "title": "y", "type": "string"}, + }, + "required": ["x"], + "title": "Arguments", + "type": "object", + } + + async def check_call(args): + return await meta.call_fn_with_arg_validation( + fn, + fn_is_async=False, + arguments_to_validate=args, + arguments_to_pass_directly=None, + ) + + # Basic calls + assert await check_call({"x": "hello"}) == "hello" + assert await check_call({"x": "hello", "y": "world"}) == "hello" + assert await check_call({"x": '"hello"'}) == '"hello"' + + # Missing required arg + with pytest.raises(ValueError): + await check_call({"y": "world"}) + + +def test_complex_function_json_schema(): + """Test JSON schema generation for complex function arguments. + + Note: Different versions of pydantic output slightly different + JSON Schema formats for model fields with defaults. The format changed in 2.9.0: + + 1. Before 2.9.0: + { + "allOf": [{"$ref": "#/$defs/Model"}], + "default": {} + } + + 2. Since 2.9.0: + { + "$ref": "#/$defs/Model", + "default": {} + } + + Both formats are valid and functionally equivalent. This test accepts either format + to ensure compatibility across our supported pydantic versions. + + This change in format does not affect runtime behavior since: + 1. Both schemas validate the same way + 2. The actual model classes and validation logic are unchanged + 3. func_metadata uses model_validate/model_dump, not the schema directly + """ + meta = func_metadata(complex_arguments_fn) + actual_schema = meta.arg_model.model_json_schema() + + # Create a copy of the actual schema to normalize + normalized_schema = actual_schema.copy() + + # Normalize the my_model_a_with_default field to handle both pydantic formats + if "allOf" in actual_schema["properties"]["my_model_a_with_default"]: + normalized_schema["properties"]["my_model_a_with_default"] = { + "$ref": "#/$defs/SomeInputModelA", + "default": {}, + } + + assert normalized_schema == { + "$defs": { + "InnerModel": { + "properties": {"x": {"title": "X", "type": "integer"}}, + "required": ["x"], + "title": "InnerModel", + "type": "object", + }, + "SomeInputModelA": { + "properties": {}, + "title": "SomeInputModelA", + "type": "object", + }, + "SomeInputModelB": { + "properties": { + "how_many_shrimp": { + "description": "How many shrimp in the tank???", + "title": "How Many Shrimp", + "type": "integer", + }, + "ok": {"$ref": "#/$defs/InnerModel"}, + "y": {"title": "Y", "type": "null"}, + }, + "required": ["how_many_shrimp", "ok", "y"], + "title": "SomeInputModelB", + "type": "object", + }, + }, + "properties": { + "an_int": {"title": "An Int", "type": "integer"}, + "must_be_none": {"title": "Must Be None", "type": "null"}, + "must_be_none_dumb_annotation": { + "title": "Must Be None Dumb Annotation", + "type": "null", + }, + "list_of_ints": { + "items": {"type": "integer"}, + "title": "List Of Ints", + "type": "array", + }, + "list_str_or_str": { + "anyOf": [ + {"items": {"type": "string"}, "type": "array"}, + {"type": "string"}, + ], + "title": "List Str Or Str", + }, + "an_int_annotated_with_field": { + "description": "An int with a field", + "title": "An Int Annotated With Field", + "type": "integer", + }, + "an_int_annotated_with_field_and_others": { + "description": "An int with a field", + "exclusiveMinimum": 1, + "title": "An Int Annotated With Field And Others", + "type": "integer", + }, + "an_int_annotated_with_junk": { + "title": "An Int Annotated With Junk", + "type": "integer", + }, + "field_with_default_via_field_annotation_before_nondefault_arg": { + "default": 1, + "title": "Field With Default Via Field Annotation Before Nondefault Arg", + "type": "integer", + }, + "unannotated": {"title": "unannotated", "type": "string"}, + "my_model_a": {"$ref": "#/$defs/SomeInputModelA"}, + "my_model_a_forward_ref": {"$ref": "#/$defs/SomeInputModelA"}, + "my_model_b": {"$ref": "#/$defs/SomeInputModelB"}, + "an_int_annotated_with_field_default": { + "default": 1, + "description": "An int with a field", + "title": "An Int Annotated With Field Default", + "type": "integer", + }, + "unannotated_with_default": { + "default": 5, + "title": "unannotated_with_default", + "type": "string", + }, + "my_model_a_with_default": { + "$ref": "#/$defs/SomeInputModelA", + "default": {}, + }, + "an_int_with_default": { + "default": 1, + "title": "An Int With Default", + "type": "integer", + }, + "must_be_none_with_default": { + "default": None, + "title": "Must Be None With Default", + "type": "null", + }, + "an_int_with_equals_field": { + "default": 1, + "minimum": 0, + "title": "An Int With Equals Field", + "type": "integer", + }, + "int_annotated_with_default": { + "default": 5, + "description": "hey", + "title": "Int Annotated With Default", + "type": "integer", + }, + }, + "required": [ + "an_int", + "must_be_none", + "must_be_none_dumb_annotation", + "list_of_ints", + "list_str_or_str", + "an_int_annotated_with_field", + "an_int_annotated_with_field_and_others", + "an_int_annotated_with_junk", + "unannotated", + "my_model_a", + "my_model_a_forward_ref", + "my_model_b", + ], + "title": "complex_arguments_fnArguments", + "type": "object", + } + + +def test_str_vs_int(): + """ + Test that string values are kept as strings even when they contain numbers, + while numbers are parsed correctly. + """ + + def func_with_str_and_int(a: str, b: int): + return a + + meta = func_metadata(func_with_str_and_int) + result = meta.pre_parse_json({"a": "123", "b": 123}) + assert result["a"] == "123" + assert result["b"] == 123 diff --git a/tests/server/fastmcp/test_parameter_descriptions.py b/tests/server/fastmcp/test_parameter_descriptions.py index 29470ed19..67a59492e 100644 --- a/tests/server/fastmcp/test_parameter_descriptions.py +++ b/tests/server/fastmcp/test_parameter_descriptions.py @@ -1,30 +1,30 @@ -"""Test that parameter descriptions are properly exposed through list_tools""" - -import pytest -from pydantic import Field - -from mcp.server.fastmcp import FastMCP - - -@pytest.mark.anyio -async def test_parameter_descriptions(): - mcp = FastMCP("Test Server") - - @mcp.tool() - def greet( - name: str = Field(description="The name to greet"), - title: str = Field(description="Optional title", default=""), - ) -> str: - """A greeting tool""" - return f"Hello {title} {name}" - - tools = await mcp.list_tools() - assert len(tools) == 1 - tool = tools[0] - - # Check that parameter descriptions are present in the schema - properties = tool.inputSchema["properties"] - assert "name" in properties - assert properties["name"]["description"] == "The name to greet" - assert "title" in properties - assert properties["title"]["description"] == "Optional title" +"""Test that parameter descriptions are properly exposed through list_tools""" + +import pytest +from pydantic import Field + +from mcp.server.fastmcp import FastMCP + + +@pytest.mark.anyio +async def test_parameter_descriptions(): + mcp = FastMCP("Test Server") + + @mcp.tool() + def greet( + name: str = Field(description="The name to greet"), + title: str = Field(description="Optional title", default=""), + ) -> str: + """A greeting tool""" + return f"Hello {title} {name}" + + tools = await mcp.list_tools() + assert len(tools) == 1 + tool = tools[0] + + # Check that parameter descriptions are present in the schema + properties = tool.inputSchema["properties"] + assert "name" in properties + assert properties["name"]["description"] == "The name to greet" + assert "title" in properties + assert properties["title"]["description"] == "Optional title" diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index 015974eb0..7cce464b2 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -1,394 +1,759 @@ -import json -import logging - -import pytest -from pydantic import BaseModel - -from mcp.server.fastmcp import Context, FastMCP -from mcp.server.fastmcp.exceptions import ToolError -from mcp.server.fastmcp.tools import Tool, ToolManager -from mcp.server.fastmcp.utilities.func_metadata import ArgModelBase, FuncMetadata -from mcp.server.session import ServerSessionT -from mcp.shared.context import LifespanContextT -from mcp.types import ToolAnnotations - - -class TestAddTools: - def test_basic_function(self): - """Test registering and running a basic function.""" - - def add(a: int, b: int) -> int: - """Add two numbers.""" - return a + b - - manager = ToolManager() - manager.add_tool(add) - - tool = manager.get_tool("add") - assert tool is not None - assert tool.name == "add" - assert tool.description == "Add two numbers." - assert tool.is_async is False - assert tool.parameters["properties"]["a"]["type"] == "integer" - assert tool.parameters["properties"]["b"]["type"] == "integer" - - def test_init_with_tools(self, caplog): - def add(a: int, b: int) -> int: - return a + b - - class AddArguments(ArgModelBase): - a: int - b: int - - fn_metadata = FuncMetadata(arg_model=AddArguments) - - original_tool = Tool( - name="add", - description="Add two numbers.", - fn=add, - fn_metadata=fn_metadata, - is_async=False, - parameters=AddArguments.model_json_schema(), - context_kwarg=None, - annotations=None, - ) - manager = ToolManager(tools=[original_tool]) - saved_tool = manager.get_tool("add") - assert saved_tool == original_tool - - # warn on duplicate tools - with caplog.at_level(logging.WARNING): - manager = ToolManager(True, tools=[original_tool, original_tool]) - assert "Tool already exists: add" in caplog.text - - @pytest.mark.anyio - async def test_async_function(self): - """Test registering and running an async function.""" - - async def fetch_data(url: str) -> str: - """Fetch data from URL.""" - return f"Data from {url}" - - manager = ToolManager() - manager.add_tool(fetch_data) - - tool = manager.get_tool("fetch_data") - assert tool is not None - assert tool.name == "fetch_data" - assert tool.description == "Fetch data from URL." - assert tool.is_async is True - assert tool.parameters["properties"]["url"]["type"] == "string" - - def test_pydantic_model_function(self): - """Test registering a function that takes a Pydantic model.""" - - class UserInput(BaseModel): - name: str - age: int - - def create_user(user: UserInput, flag: bool) -> dict: - """Create a new user.""" - return {"id": 1, **user.model_dump()} - - manager = ToolManager() - manager.add_tool(create_user) - - tool = manager.get_tool("create_user") - assert tool is not None - assert tool.name == "create_user" - assert tool.description == "Create a new user." - assert tool.is_async is False - assert "name" in tool.parameters["$defs"]["UserInput"]["properties"] - assert "age" in tool.parameters["$defs"]["UserInput"]["properties"] - assert "flag" in tool.parameters["properties"] - - def test_add_invalid_tool(self): - manager = ToolManager() - with pytest.raises(AttributeError): - manager.add_tool(1) # type: ignore - - def test_add_lambda(self): - manager = ToolManager() - tool = manager.add_tool(lambda x: x, name="my_tool") - assert tool.name == "my_tool" - - def test_add_lambda_with_no_name(self): - manager = ToolManager() - with pytest.raises( - ValueError, match="You must provide a name for lambda functions" - ): - manager.add_tool(lambda x: x) - - def test_warn_on_duplicate_tools(self, caplog): - """Test warning on duplicate tools.""" - - def f(x: int) -> int: - return x - - manager = ToolManager() - manager.add_tool(f) - with caplog.at_level(logging.WARNING): - manager.add_tool(f) - assert "Tool already exists: f" in caplog.text - - def test_disable_warn_on_duplicate_tools(self, caplog): - """Test disabling warning on duplicate tools.""" - - def f(x: int) -> int: - return x - - manager = ToolManager() - manager.add_tool(f) - manager.warn_on_duplicate_tools = False - with caplog.at_level(logging.WARNING): - manager.add_tool(f) - assert "Tool already exists: f" not in caplog.text - - -class TestCallTools: - @pytest.mark.anyio - async def test_call_tool(self): - def add(a: int, b: int) -> int: - """Add two numbers.""" - return a + b - - manager = ToolManager() - manager.add_tool(add) - result = await manager.call_tool("add", {"a": 1, "b": 2}) - assert result == 3 - - @pytest.mark.anyio - async def test_call_async_tool(self): - async def double(n: int) -> int: - """Double a number.""" - return n * 2 - - manager = ToolManager() - manager.add_tool(double) - result = await manager.call_tool("double", {"n": 5}) - assert result == 10 - - @pytest.mark.anyio - async def test_call_tool_with_default_args(self): - def add(a: int, b: int = 1) -> int: - """Add two numbers.""" - return a + b - - manager = ToolManager() - manager.add_tool(add) - result = await manager.call_tool("add", {"a": 1}) - assert result == 2 - - @pytest.mark.anyio - async def test_call_tool_with_missing_args(self): - def add(a: int, b: int) -> int: - """Add two numbers.""" - return a + b - - manager = ToolManager() - manager.add_tool(add) - with pytest.raises(ToolError): - await manager.call_tool("add", {"a": 1}) - - @pytest.mark.anyio - async def test_call_unknown_tool(self): - manager = ToolManager() - with pytest.raises(ToolError): - await manager.call_tool("unknown", {"a": 1}) - - @pytest.mark.anyio - async def test_call_tool_with_list_int_input(self): - def sum_vals(vals: list[int]) -> int: - return sum(vals) - - manager = ToolManager() - manager.add_tool(sum_vals) - # Try both with plain list and with JSON list - result = await manager.call_tool("sum_vals", {"vals": "[1, 2, 3]"}) - assert result == 6 - result = await manager.call_tool("sum_vals", {"vals": [1, 2, 3]}) - assert result == 6 - - @pytest.mark.anyio - async def test_call_tool_with_list_str_or_str_input(self): - def concat_strs(vals: list[str] | str) -> str: - return vals if isinstance(vals, str) else "".join(vals) - - manager = ToolManager() - manager.add_tool(concat_strs) - # Try both with plain python object and with JSON list - result = await manager.call_tool("concat_strs", {"vals": ["a", "b", "c"]}) - assert result == "abc" - result = await manager.call_tool("concat_strs", {"vals": '["a", "b", "c"]'}) - assert result == "abc" - result = await manager.call_tool("concat_strs", {"vals": "a"}) - assert result == "a" - result = await manager.call_tool("concat_strs", {"vals": '"a"'}) - assert result == '"a"' - - @pytest.mark.anyio - async def test_call_tool_with_complex_model(self): - class MyShrimpTank(BaseModel): - class Shrimp(BaseModel): - name: str - - shrimp: list[Shrimp] - x: None - - def name_shrimp(tank: MyShrimpTank, ctx: Context) -> list[str]: - return [x.name for x in tank.shrimp] - - manager = ToolManager() - manager.add_tool(name_shrimp) - result = await manager.call_tool( - "name_shrimp", - {"tank": {"x": None, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}}, - ) - assert result == ["rex", "gertrude"] - result = await manager.call_tool( - "name_shrimp", - {"tank": '{"x": null, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}'}, - ) - assert result == ["rex", "gertrude"] - - -class TestToolSchema: - @pytest.mark.anyio - async def test_context_arg_excluded_from_schema(self): - def something(a: int, ctx: Context) -> int: - return a - - manager = ToolManager() - tool = manager.add_tool(something) - assert "ctx" not in json.dumps(tool.parameters) - assert "Context" not in json.dumps(tool.parameters) - assert "ctx" not in tool.fn_metadata.arg_model.model_fields - - -class TestContextHandling: - """Test context handling in the tool manager.""" - - def test_context_parameter_detection(self): - """Test that context parameters are properly detected in - Tool.from_function().""" - - def tool_with_context(x: int, ctx: Context) -> str: - return str(x) - - manager = ToolManager() - tool = manager.add_tool(tool_with_context) - assert tool.context_kwarg == "ctx" - - def tool_without_context(x: int) -> str: - return str(x) - - tool = manager.add_tool(tool_without_context) - assert tool.context_kwarg is None - - def tool_with_parametrized_context( - x: int, ctx: Context[ServerSessionT, LifespanContextT] - ) -> str: - return str(x) - - tool = manager.add_tool(tool_with_parametrized_context) - assert tool.context_kwarg == "ctx" - - @pytest.mark.anyio - async def test_context_injection(self): - """Test that context is properly injected during tool execution.""" - - def tool_with_context(x: int, ctx: Context) -> str: - assert isinstance(ctx, Context) - return str(x) - - manager = ToolManager() - manager.add_tool(tool_with_context) - - mcp = FastMCP() - ctx = mcp.get_context() - result = await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) - assert result == "42" - - @pytest.mark.anyio - async def test_context_injection_async(self): - """Test that context is properly injected in async tools.""" - - async def async_tool(x: int, ctx: Context) -> str: - assert isinstance(ctx, Context) - return str(x) - - manager = ToolManager() - manager.add_tool(async_tool) - - mcp = FastMCP() - ctx = mcp.get_context() - result = await manager.call_tool("async_tool", {"x": 42}, context=ctx) - assert result == "42" - - @pytest.mark.anyio - async def test_context_optional(self): - """Test that context is optional when calling tools.""" - - def tool_with_context(x: int, ctx: Context | None = None) -> str: - return str(x) - - manager = ToolManager() - manager.add_tool(tool_with_context) - # Should not raise an error when context is not provided - result = await manager.call_tool("tool_with_context", {"x": 42}) - assert result == "42" - - @pytest.mark.anyio - async def test_context_error_handling(self): - """Test error handling when context injection fails.""" - - def tool_with_context(x: int, ctx: Context) -> str: - raise ValueError("Test error") - - manager = ToolManager() - manager.add_tool(tool_with_context) - - mcp = FastMCP() - ctx = mcp.get_context() - with pytest.raises(ToolError, match="Error executing tool tool_with_context"): - await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) - - -class TestToolAnnotations: - def test_tool_annotations(self): - """Test that tool annotations are correctly added to tools.""" - - def read_data(path: str) -> str: - """Read data from a file.""" - return f"Data from {path}" - - annotations = ToolAnnotations( - title="File Reader", - readOnlyHint=True, - openWorldHint=False, - ) - - manager = ToolManager() - tool = manager.add_tool(read_data, annotations=annotations) - - assert tool.annotations is not None - assert tool.annotations.title == "File Reader" - assert tool.annotations.readOnlyHint is True - assert tool.annotations.openWorldHint is False - - @pytest.mark.anyio - async def test_tool_annotations_in_fastmcp(self): - """Test that tool annotations are included in MCPTool conversion.""" - - app = FastMCP() - - @app.tool(annotations=ToolAnnotations(title="Echo Tool", readOnlyHint=True)) - def echo(message: str) -> str: - """Echo a message back.""" - return message - - tools = await app.list_tools() - assert len(tools) == 1 - assert tools[0].annotations is not None - assert tools[0].annotations.title == "Echo Tool" - assert tools[0].annotations.readOnlyHint is True +import json +import logging + +import pytest +from pydantic import BaseModel + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.fastmcp.exceptions import ToolError +from mcp.server.fastmcp.tools import ToolManager +from mcp.server.session import ServerSessionT +from mcp.shared.context import LifespanContextT +from mcp.types import ToolAnnotations + + +class TestAddTools: + def test_basic_function(self): + """Test registering and running a basic function.""" + + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + + tool = manager.get_tool("add") + assert tool is not None + assert tool.name == "add" + assert tool.description == "Add two numbers." + assert tool.is_async is False + assert tool.parameters["properties"]["a"]["type"] == "integer" + assert tool.parameters["properties"]["b"]["type"] == "integer" + + @pytest.mark.anyio + async def test_async_function(self): + """Test registering and running an async function.""" + + async def fetch_data(url: str) -> str: + """Fetch data from URL.""" + return f"Data from {url}" + + manager = ToolManager() + manager.add_tool(fetch_data) + + tool = manager.get_tool("fetch_data") + assert tool is not None + assert tool.name == "fetch_data" + assert tool.description == "Fetch data from URL." + assert tool.is_async is True + assert tool.parameters["properties"]["url"]["type"] == "string" + + def test_pydantic_model_function(self): + """Test registering a function that takes a Pydantic model.""" + + class UserInput(BaseModel): + name: str + age: int + + def create_user(user: UserInput, flag: bool) -> dict: + """Create a new user.""" + return {"id": 1, **user.model_dump()} + + manager = ToolManager() + manager.add_tool(create_user) + + tool = manager.get_tool("create_user") + assert tool is not None + assert tool.name == "create_user" + assert tool.description == "Create a new user." + assert tool.is_async is False + assert "name" in tool.parameters["$defs"]["UserInput"]["properties"] + assert "age" in tool.parameters["$defs"]["UserInput"]["properties"] + assert "flag" in tool.parameters["properties"] + + def test_add_invalid_tool(self): + manager = ToolManager() + with pytest.raises(AttributeError): + manager.add_tool(1) # type: ignore + + def test_add_lambda(self): + manager = ToolManager() + tool = manager.add_tool(lambda x: x, name="my_tool") + assert tool.name == "my_tool" + + def test_add_lambda_with_no_name(self): + manager = ToolManager() + with pytest.raises( + ValueError, match="You must provide a name for lambda functions" + ): + manager.add_tool(lambda x: x) + + def test_warn_on_duplicate_tools(self, caplog): + """Test warning on duplicate tools.""" + + def f(x: int) -> int: + return x + + manager = ToolManager() + manager.add_tool(f) + with caplog.at_level(logging.WARNING): + manager.add_tool(f) + assert "Tool already exists: f" in caplog.text + + def test_disable_warn_on_duplicate_tools(self, caplog): + """Test disabling warning on duplicate tools.""" + + def f(x: int) -> int: + return x + + manager = ToolManager() + manager.add_tool(f) + manager.warn_on_duplicate_tools = False + with caplog.at_level(logging.WARNING): + manager.add_tool(f) + assert "Tool already exists: f" not in caplog.text + + +class TestCallTools: + @pytest.mark.anyio + async def test_call_tool(self): + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + result = await manager.call_tool("add", {"a": 1, "b": 2}) + assert result == 3 + + @pytest.mark.anyio + async def test_call_async_tool(self): + async def double(n: int) -> int: + """Double a number.""" + return n * 2 + + manager = ToolManager() + manager.add_tool(double) + result = await manager.call_tool("double", {"n": 5}) + assert result == 10 + + @pytest.mark.anyio + async def test_call_tool_with_default_args(self): + def add(a: int, b: int = 1) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + result = await manager.call_tool("add", {"a": 1}) + assert result == 2 + + @pytest.mark.anyio + async def test_call_tool_with_missing_args(self): + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + with pytest.raises(ToolError): + await manager.call_tool("add", {"a": 1}) + + @pytest.mark.anyio + async def test_call_unknown_tool(self): + manager = ToolManager() + with pytest.raises(ToolError): + await manager.call_tool("unknown", {"a": 1}) + + @pytest.mark.anyio + async def test_call_tool_with_list_int_input(self): + def sum_vals(vals: list[int]) -> int: + return sum(vals) + + manager = ToolManager() + manager.add_tool(sum_vals) + # Try both with plain list and with JSON list + result = await manager.call_tool("sum_vals", {"vals": "[1, 2, 3]"}) + assert result == 6 + result = await manager.call_tool("sum_vals", {"vals": [1, 2, 3]}) + assert result == 6 + + @pytest.mark.anyio + async def test_call_tool_with_list_str_or_str_input(self): + def concat_strs(vals: list[str] | str) -> str: + return vals if isinstance(vals, str) else "".join(vals) + + manager = ToolManager() + manager.add_tool(concat_strs) + # Try both with plain python object and with JSON list + result = await manager.call_tool("concat_strs", {"vals": ["a", "b", "c"]}) + assert result == "abc" + result = await manager.call_tool("concat_strs", {"vals": '["a", "b", "c"]'}) + assert result == "abc" + result = await manager.call_tool("concat_strs", {"vals": "a"}) + assert result == "a" + result = await manager.call_tool("concat_strs", {"vals": '"a"'}) + assert result == '"a"' + + @pytest.mark.anyio + async def test_call_tool_with_complex_model(self): + class MyShrimpTank(BaseModel): + class Shrimp(BaseModel): + name: str + + shrimp: list[Shrimp] + x: None + + def name_shrimp(tank: MyShrimpTank, ctx: Context) -> list[str]: + return [x.name for x in tank.shrimp] + + manager = ToolManager() + manager.add_tool(name_shrimp) + result = await manager.call_tool( + "name_shrimp", + {"tank": {"x": None, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}}, + ) + assert result == ["rex", "gertrude"] + result = await manager.call_tool( + "name_shrimp", + {"tank": '{"x": null, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}'}, + ) + assert result == ["rex", "gertrude"] + + +class TestToolSchema: + @pytest.mark.anyio + async def test_context_arg_excluded_from_schema(self): + def something(a: int, ctx: Context) -> int: + return a + + manager = ToolManager() + tool = manager.add_tool(something) + assert "ctx" not in json.dumps(tool.parameters) + assert "Context" not in json.dumps(tool.parameters) + assert "ctx" not in tool.fn_metadata.arg_model.model_fields + + +class TestContextHandling: + """Test context handling in the tool manager.""" + + def test_context_parameter_detection(self): + """Test that context parameters are properly detected in + Tool.from_function().""" + + def tool_with_context(x: int, ctx: Context) -> str: + return str(x) + + manager = ToolManager() + tool = manager.add_tool(tool_with_context) + assert tool.context_kwarg == "ctx" + + def tool_without_context(x: int) -> str: + return str(x) + + tool = manager.add_tool(tool_without_context) + assert tool.context_kwarg is None + + def tool_with_parametrized_context( + x: int, ctx: Context[ServerSessionT, LifespanContextT] + ) -> str: + return str(x) + + tool = manager.add_tool(tool_with_parametrized_context) + assert tool.context_kwarg == "ctx" + + @pytest.mark.anyio + async def test_context_injection(self): + """Test that context is properly injected during tool execution.""" + + def tool_with_context(x: int, ctx: Context) -> str: + assert isinstance(ctx, Context) + return str(x) + + manager = ToolManager() + manager.add_tool(tool_with_context) + + mcp = FastMCP() + ctx = mcp.get_context() + result = await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) + assert result == "42" + + @pytest.mark.anyio + async def test_context_injection_async(self): + """Test that context is properly injected in async tools.""" + + async def async_tool(x: int, ctx: Context) -> str: + assert isinstance(ctx, Context) + return str(x) + + manager = ToolManager() + manager.add_tool(async_tool) + + mcp = FastMCP() + ctx = mcp.get_context() + result = await manager.call_tool("async_tool", {"x": 42}, context=ctx) + assert result == "42" + + @pytest.mark.anyio + async def test_context_optional(self): + """Test that context is optional when calling tools.""" + + def tool_with_context(x: int, ctx: Context | None = None) -> str: + return str(x) + + manager = ToolManager() + manager.add_tool(tool_with_context) + # Should not raise an error when context is not provided + result = await manager.call_tool("tool_with_context", {"x": 42}) + assert result == "42" + + @pytest.mark.anyio + async def test_context_error_handling(self): + """Test error handling when context injection fails.""" + + def tool_with_context(x: int, ctx: Context) -> str: + raise ValueError("Test error") + + manager = ToolManager() + manager.add_tool(tool_with_context) + + mcp = FastMCP() + ctx = mcp.get_context() + with pytest.raises(ToolError, match="Error executing tool tool_with_context"): + await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) + + +class TestToolAnnotations: + def test_tool_annotations(self): + """Test that tool annotations are correctly added to tools.""" + + def read_data(path: str) -> str: + """Read data from a file.""" + return f"Data from {path}" + + annotations = ToolAnnotations( + title="File Reader", + readOnlyHint=True, + openWorldHint=False, + ) + + manager = ToolManager() + tool = manager.add_tool(read_data, annotations=annotations) + + assert tool.annotations is not None + assert tool.annotations.title == "File Reader" + assert tool.annotations.readOnlyHint is True + assert tool.annotations.openWorldHint is False + + @pytest.mark.anyio + async def test_tool_annotations_in_fastmcp(self): + """Test that tool annotations are included in MCPTool conversion.""" + + app = FastMCP() + + @app.tool(annotations=ToolAnnotations(title="Echo Tool", readOnlyHint=True)) + def echo(message: str) -> str: + """Echo a message back.""" + return message + + tools = await app.list_tools() + assert len(tools) == 1 + assert tools[0].annotations is not None + assert tools[0].annotations.title == "Echo Tool" + assert tools[0].annotations.readOnlyHint is True +======= +import json +import logging + +import pytest +from pydantic import BaseModel + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.fastmcp.exceptions import ToolError +from mcp.server.fastmcp.tools import Tool, ToolManager +from mcp.server.fastmcp.utilities.func_metadata import ArgModelBase, FuncMetadata +from mcp.server.session import ServerSessionT +from mcp.shared.context import LifespanContextT +from mcp.types import ToolAnnotations + + +class TestAddTools: + def test_basic_function(self): + """Test registering and running a basic function.""" + + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + + tool = manager.get_tool("add") + assert tool is not None + assert tool.name == "add" + assert tool.description == "Add two numbers." + assert tool.is_async is False + assert tool.parameters["properties"]["a"]["type"] == "integer" + assert tool.parameters["properties"]["b"]["type"] == "integer" + + def test_init_with_tools(self, caplog): + def add(a: int, b: int) -> int: + return a + b + + class AddArguments(ArgModelBase): + a: int + b: int + + fn_metadata = FuncMetadata(arg_model=AddArguments) + + original_tool = Tool( + name="add", + description="Add two numbers.", + fn=add, + fn_metadata=fn_metadata, + is_async=False, + parameters=AddArguments.model_json_schema(), + context_kwarg=None, + annotations=None, + ) + manager = ToolManager(tools=[original_tool]) + saved_tool = manager.get_tool("add") + assert saved_tool == original_tool + + # warn on duplicate tools + with caplog.at_level(logging.WARNING): + manager = ToolManager(True, tools=[original_tool, original_tool]) + assert "Tool already exists: add" in caplog.text + + @pytest.mark.anyio + async def test_async_function(self): + """Test registering and running an async function.""" + + async def fetch_data(url: str) -> str: + """Fetch data from URL.""" + return f"Data from {url}" + + manager = ToolManager() + manager.add_tool(fetch_data) + + tool = manager.get_tool("fetch_data") + assert tool is not None + assert tool.name == "fetch_data" + assert tool.description == "Fetch data from URL." + assert tool.is_async is True + assert tool.parameters["properties"]["url"]["type"] == "string" + + def test_pydantic_model_function(self): + """Test registering a function that takes a Pydantic model.""" + + class UserInput(BaseModel): + name: str + age: int + + def create_user(user: UserInput, flag: bool) -> dict: + """Create a new user.""" + return {"id": 1, **user.model_dump()} + + manager = ToolManager() + manager.add_tool(create_user) + + tool = manager.get_tool("create_user") + assert tool is not None + assert tool.name == "create_user" + assert tool.description == "Create a new user." + assert tool.is_async is False + assert "name" in tool.parameters["$defs"]["UserInput"]["properties"] + assert "age" in tool.parameters["$defs"]["UserInput"]["properties"] + assert "flag" in tool.parameters["properties"] + + def test_add_invalid_tool(self): + manager = ToolManager() + with pytest.raises(AttributeError): + manager.add_tool(1) # type: ignore + + def test_add_lambda(self): + manager = ToolManager() + tool = manager.add_tool(lambda x: x, name="my_tool") + assert tool.name == "my_tool" + + def test_add_lambda_with_no_name(self): + manager = ToolManager() + with pytest.raises( + ValueError, match="You must provide a name for lambda functions" + ): + manager.add_tool(lambda x: x) + + def test_warn_on_duplicate_tools(self, caplog): + """Test warning on duplicate tools.""" + + def f(x: int) -> int: + return x + + manager = ToolManager() + manager.add_tool(f) + with caplog.at_level(logging.WARNING): + manager.add_tool(f) + assert "Tool already exists: f" in caplog.text + + def test_disable_warn_on_duplicate_tools(self, caplog): + """Test disabling warning on duplicate tools.""" + + def f(x: int) -> int: + return x + + manager = ToolManager() + manager.add_tool(f) + manager.warn_on_duplicate_tools = False + with caplog.at_level(logging.WARNING): + manager.add_tool(f) + assert "Tool already exists: f" not in caplog.text + + +class TestCallTools: + @pytest.mark.anyio + async def test_call_tool(self): + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + result = await manager.call_tool("add", {"a": 1, "b": 2}) + assert result == 3 + + @pytest.mark.anyio + async def test_call_async_tool(self): + async def double(n: int) -> int: + """Double a number.""" + return n * 2 + + manager = ToolManager() + manager.add_tool(double) + result = await manager.call_tool("double", {"n": 5}) + assert result == 10 + + @pytest.mark.anyio + async def test_call_tool_with_default_args(self): + def add(a: int, b: int = 1) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + result = await manager.call_tool("add", {"a": 1}) + assert result == 2 + + @pytest.mark.anyio + async def test_call_tool_with_missing_args(self): + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + manager = ToolManager() + manager.add_tool(add) + with pytest.raises(ToolError): + await manager.call_tool("add", {"a": 1}) + + @pytest.mark.anyio + async def test_call_unknown_tool(self): + manager = ToolManager() + with pytest.raises(ToolError): + await manager.call_tool("unknown", {"a": 1}) + + @pytest.mark.anyio + async def test_call_tool_with_list_int_input(self): + def sum_vals(vals: list[int]) -> int: + return sum(vals) + + manager = ToolManager() + manager.add_tool(sum_vals) + # Try both with plain list and with JSON list + result = await manager.call_tool("sum_vals", {"vals": "[1, 2, 3]"}) + assert result == 6 + result = await manager.call_tool("sum_vals", {"vals": [1, 2, 3]}) + assert result == 6 + + @pytest.mark.anyio + async def test_call_tool_with_list_str_or_str_input(self): + def concat_strs(vals: list[str] | str) -> str: + return vals if isinstance(vals, str) else "".join(vals) + + manager = ToolManager() + manager.add_tool(concat_strs) + # Try both with plain python object and with JSON list + result = await manager.call_tool("concat_strs", {"vals": ["a", "b", "c"]}) + assert result == "abc" + result = await manager.call_tool("concat_strs", {"vals": '["a", "b", "c"]'}) + assert result == "abc" + result = await manager.call_tool("concat_strs", {"vals": "a"}) + assert result == "a" + result = await manager.call_tool("concat_strs", {"vals": '"a"'}) + assert result == '"a"' + + @pytest.mark.anyio + async def test_call_tool_with_complex_model(self): + class MyShrimpTank(BaseModel): + class Shrimp(BaseModel): + name: str + + shrimp: list[Shrimp] + x: None + + def name_shrimp(tank: MyShrimpTank, ctx: Context) -> list[str]: + return [x.name for x in tank.shrimp] + + manager = ToolManager() + manager.add_tool(name_shrimp) + result = await manager.call_tool( + "name_shrimp", + {"tank": {"x": None, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}}, + ) + assert result == ["rex", "gertrude"] + result = await manager.call_tool( + "name_shrimp", + {"tank": '{"x": null, "shrimp": [{"name": "rex"}, {"name": "gertrude"}]}'}, + ) + assert result == ["rex", "gertrude"] + + +class TestToolSchema: + @pytest.mark.anyio + async def test_context_arg_excluded_from_schema(self): + def something(a: int, ctx: Context) -> int: + return a + + manager = ToolManager() + tool = manager.add_tool(something) + assert "ctx" not in json.dumps(tool.parameters) + assert "Context" not in json.dumps(tool.parameters) + assert "ctx" not in tool.fn_metadata.arg_model.model_fields + + +class TestContextHandling: + """Test context handling in the tool manager.""" + + def test_context_parameter_detection(self): + """Test that context parameters are properly detected in + Tool.from_function().""" + + def tool_with_context(x: int, ctx: Context) -> str: + return str(x) + + manager = ToolManager() + tool = manager.add_tool(tool_with_context) + assert tool.context_kwarg == "ctx" + + def tool_without_context(x: int) -> str: + return str(x) + + tool = manager.add_tool(tool_without_context) + assert tool.context_kwarg is None + + def tool_with_parametrized_context( + x: int, ctx: Context[ServerSessionT, LifespanContextT] + ) -> str: + return str(x) + + tool = manager.add_tool(tool_with_parametrized_context) + assert tool.context_kwarg == "ctx" + + @pytest.mark.anyio + async def test_context_injection(self): + """Test that context is properly injected during tool execution.""" + + def tool_with_context(x: int, ctx: Context) -> str: + assert isinstance(ctx, Context) + return str(x) + + manager = ToolManager() + manager.add_tool(tool_with_context) + + mcp = FastMCP() + ctx = mcp.get_context() + result = await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) + assert result == "42" + + @pytest.mark.anyio + async def test_context_injection_async(self): + """Test that context is properly injected in async tools.""" + + async def async_tool(x: int, ctx: Context) -> str: + assert isinstance(ctx, Context) + return str(x) + + manager = ToolManager() + manager.add_tool(async_tool) + + mcp = FastMCP() + ctx = mcp.get_context() + result = await manager.call_tool("async_tool", {"x": 42}, context=ctx) + assert result == "42" + + @pytest.mark.anyio + async def test_context_optional(self): + """Test that context is optional when calling tools.""" + + def tool_with_context(x: int, ctx: Context | None = None) -> str: + return str(x) + + manager = ToolManager() + manager.add_tool(tool_with_context) + # Should not raise an error when context is not provided + result = await manager.call_tool("tool_with_context", {"x": 42}) + assert result == "42" + + @pytest.mark.anyio + async def test_context_error_handling(self): + """Test error handling when context injection fails.""" + + def tool_with_context(x: int, ctx: Context) -> str: + raise ValueError("Test error") + + manager = ToolManager() + manager.add_tool(tool_with_context) + + mcp = FastMCP() + ctx = mcp.get_context() + with pytest.raises(ToolError, match="Error executing tool tool_with_context"): + await manager.call_tool("tool_with_context", {"x": 42}, context=ctx) + + +class TestToolAnnotations: + def test_tool_annotations(self): + """Test that tool annotations are correctly added to tools.""" + + def read_data(path: str) -> str: + """Read data from a file.""" + return f"Data from {path}" + + annotations = ToolAnnotations( + title="File Reader", + readOnlyHint=True, + openWorldHint=False, + ) + + manager = ToolManager() + tool = manager.add_tool(read_data, annotations=annotations) + + assert tool.annotations is not None + assert tool.annotations.title == "File Reader" + assert tool.annotations.readOnlyHint is True + assert tool.annotations.openWorldHint is False + + @pytest.mark.anyio + async def test_tool_annotations_in_fastmcp(self): + """Test that tool annotations are included in MCPTool conversion.""" + + app = FastMCP() + + @app.tool(annotations=ToolAnnotations(title="Echo Tool", readOnlyHint=True)) + def echo(message: str) -> str: + """Echo a message back.""" + return message + + tools = await app.list_tools() + assert len(tools) == 1 + assert tools[0].annotations is not None + assert tools[0].annotations.title == "Echo Tool" + assert tools[0].annotations.readOnlyHint is True \ No newline at end of file diff --git a/tests/server/test_lifespan.py b/tests/server/test_lifespan.py index a3ff59bc1..e7fd62f10 100644 --- a/tests/server/test_lifespan.py +++ b/tests/server/test_lifespan.py @@ -1,236 +1,236 @@ -"""Tests for lifespan functionality in both low-level and FastMCP servers.""" - -from collections.abc import AsyncIterator -from contextlib import asynccontextmanager - -import anyio -import pytest -from pydantic import TypeAdapter - -from mcp.server.fastmcp import Context, FastMCP -from mcp.server.lowlevel.server import NotificationOptions, Server -from mcp.server.models import InitializationOptions -from mcp.shared.message import SessionMessage -from mcp.types import ( - ClientCapabilities, - Implementation, - InitializeRequestParams, - JSONRPCMessage, - JSONRPCNotification, - JSONRPCRequest, -) - - -@pytest.mark.anyio -async def test_lowlevel_server_lifespan(): - """Test that lifespan works in low-level server.""" - - @asynccontextmanager - async def test_lifespan(server: Server) -> AsyncIterator[dict[str, bool]]: - """Test lifespan context that tracks startup/shutdown.""" - context = {"started": False, "shutdown": False} - try: - context["started"] = True - yield context - finally: - context["shutdown"] = True - - server = Server("test", lifespan=test_lifespan) - - # Create memory streams for testing - send_stream1, receive_stream1 = anyio.create_memory_object_stream(100) - send_stream2, receive_stream2 = anyio.create_memory_object_stream(100) - - # Create a tool that accesses lifespan context - @server.call_tool() - async def check_lifespan(name: str, arguments: dict) -> list: - ctx = server.request_context - assert isinstance(ctx.lifespan_context, dict) - assert ctx.lifespan_context["started"] - assert not ctx.lifespan_context["shutdown"] - return [{"type": "text", "text": "true"}] - - # Run server in background task - async with ( - anyio.create_task_group() as tg, - send_stream1, - receive_stream1, - send_stream2, - receive_stream2, - ): - - async def run_server(): - await server.run( - receive_stream1, - send_stream2, - InitializationOptions( - server_name="test", - server_version="0.1.0", - capabilities=server.get_capabilities( - notification_options=NotificationOptions(), - experimental_capabilities={}, - ), - ), - raise_exceptions=True, - ) - - tg.start_soon(run_server) - - # Initialize the server - params = InitializeRequestParams( - protocolVersion="2024-11-05", - capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="0.1.0"), - ) - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=TypeAdapter(InitializeRequestParams).dump_python(params), - ) - ) - ) - ) - response = await receive_stream2.receive() - response = response.message - - # Send initialized notification - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) - ) - - # Call the tool to verify lifespan context - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=2, - method="tools/call", - params={"name": "check_lifespan", "arguments": {}}, - ) - ) - ) - ) - - # Get response and verify - response = await receive_stream2.receive() - response = response.message - assert response.root.result["content"][0]["text"] == "true" - - # Cancel server task - tg.cancel_scope.cancel() - - -@pytest.mark.anyio -async def test_fastmcp_server_lifespan(): - """Test that lifespan works in FastMCP server.""" - - @asynccontextmanager - async def test_lifespan(server: FastMCP) -> AsyncIterator[dict]: - """Test lifespan context that tracks startup/shutdown.""" - context = {"started": False, "shutdown": False} - try: - context["started"] = True - yield context - finally: - context["shutdown"] = True - - server = FastMCP("test", lifespan=test_lifespan) - - # Create memory streams for testing - send_stream1, receive_stream1 = anyio.create_memory_object_stream(100) - send_stream2, receive_stream2 = anyio.create_memory_object_stream(100) - - # Add a tool that checks lifespan context - @server.tool() - def check_lifespan(ctx: Context) -> bool: - """Tool that checks lifespan context.""" - assert isinstance(ctx.request_context.lifespan_context, dict) - assert ctx.request_context.lifespan_context["started"] - assert not ctx.request_context.lifespan_context["shutdown"] - return True - - # Run server in background task - async with ( - anyio.create_task_group() as tg, - send_stream1, - receive_stream1, - send_stream2, - receive_stream2, - ): - - async def run_server(): - await server._mcp_server.run( - receive_stream1, - send_stream2, - server._mcp_server.create_initialization_options(), - raise_exceptions=True, - ) - - tg.start_soon(run_server) - - # Initialize the server - params = InitializeRequestParams( - protocolVersion="2024-11-05", - capabilities=ClientCapabilities(), - clientInfo=Implementation(name="test-client", version="0.1.0"), - ) - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=1, - method="initialize", - params=TypeAdapter(InitializeRequestParams).dump_python(params), - ) - ) - ) - ) - response = await receive_stream2.receive() - response = response.message - - # Send initialized notification - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCNotification( - jsonrpc="2.0", - method="notifications/initialized", - ) - ) - ) - ) - - # Call the tool to verify lifespan context - await send_stream1.send( - SessionMessage( - JSONRPCMessage( - root=JSONRPCRequest( - jsonrpc="2.0", - id=2, - method="tools/call", - params={"name": "check_lifespan", "arguments": {}}, - ) - ) - ) - ) - - # Get response and verify - response = await receive_stream2.receive() - response = response.message - assert response.root.result["content"][0]["text"] == "true" - - # Cancel server task - tg.cancel_scope.cancel() +"""Tests for lifespan functionality in both low-level and FastMCP servers.""" + +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager + +import anyio +import pytest +from pydantic import TypeAdapter + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.lowlevel.server import NotificationOptions, Server +from mcp.server.models import InitializationOptions +from mcp.shared.message import SessionMessage +from mcp.types import ( + ClientCapabilities, + Implementation, + InitializeRequestParams, + JSONRPCMessage, + JSONRPCNotification, + JSONRPCRequest, +) + + +@pytest.mark.anyio +async def test_lowlevel_server_lifespan(): + """Test that lifespan works in low-level server.""" + + @asynccontextmanager + async def test_lifespan(server: Server) -> AsyncIterator[dict[str, bool]]: + """Test lifespan context that tracks startup/shutdown.""" + context = {"started": False, "shutdown": False} + try: + context["started"] = True + yield context + finally: + context["shutdown"] = True + + server = Server("test", lifespan=test_lifespan) + + # Create memory streams for testing + send_stream1, receive_stream1 = anyio.create_memory_object_stream(100) + send_stream2, receive_stream2 = anyio.create_memory_object_stream(100) + + # Create a tool that accesses lifespan context + @server.call_tool() + async def check_lifespan(name: str, arguments: dict) -> list: + ctx = server.request_context + assert isinstance(ctx.lifespan_context, dict) + assert ctx.lifespan_context["started"] + assert not ctx.lifespan_context["shutdown"] + return [{"type": "text", "text": "true"}] + + # Run server in background task + async with ( + anyio.create_task_group() as tg, + send_stream1, + receive_stream1, + send_stream2, + receive_stream2, + ): + + async def run_server(): + await server.run( + receive_stream1, + send_stream2, + InitializationOptions( + server_name="test", + server_version="0.1.0", + capabilities=server.get_capabilities( + notification_options=NotificationOptions(), + experimental_capabilities={}, + ), + ), + raise_exceptions=True, + ) + + tg.start_soon(run_server) + + # Initialize the server + params = InitializeRequestParams( + protocolVersion="2024-11-05", + capabilities=ClientCapabilities(), + clientInfo=Implementation(name="test-client", version="0.1.0"), + ) + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=TypeAdapter(InitializeRequestParams).dump_python(params), + ) + ) + ) + ) + response = await receive_stream2.receive() + response = response.message + + # Send initialized notification + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCNotification( + jsonrpc="2.0", + method="notifications/initialized", + ) + ) + ) + ) + + # Call the tool to verify lifespan context + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCRequest( + jsonrpc="2.0", + id=2, + method="tools/call", + params={"name": "check_lifespan", "arguments": {}}, + ) + ) + ) + ) + + # Get response and verify + response = await receive_stream2.receive() + response = response.message + assert response.root.result["content"][0]["text"] == "true" + + # Cancel server task + tg.cancel_scope.cancel() + + +@pytest.mark.anyio +async def test_fastmcp_server_lifespan(): + """Test that lifespan works in FastMCP server.""" + + @asynccontextmanager + async def test_lifespan(server: FastMCP) -> AsyncIterator[dict]: + """Test lifespan context that tracks startup/shutdown.""" + context = {"started": False, "shutdown": False} + try: + context["started"] = True + yield context + finally: + context["shutdown"] = True + + server = FastMCP("test", lifespan=test_lifespan) + + # Create memory streams for testing + send_stream1, receive_stream1 = anyio.create_memory_object_stream(100) + send_stream2, receive_stream2 = anyio.create_memory_object_stream(100) + + # Add a tool that checks lifespan context + @server.tool() + def check_lifespan(ctx: Context) -> bool: + """Tool that checks lifespan context.""" + assert isinstance(ctx.request_context.lifespan_context, dict) + assert ctx.request_context.lifespan_context["started"] + assert not ctx.request_context.lifespan_context["shutdown"] + return True + + # Run server in background task + async with ( + anyio.create_task_group() as tg, + send_stream1, + receive_stream1, + send_stream2, + receive_stream2, + ): + + async def run_server(): + await server._mcp_server.run( + receive_stream1, + send_stream2, + server._mcp_server.create_initialization_options(), + raise_exceptions=True, + ) + + tg.start_soon(run_server) + + # Initialize the server + params = InitializeRequestParams( + protocolVersion="2024-11-05", + capabilities=ClientCapabilities(), + clientInfo=Implementation(name="test-client", version="0.1.0"), + ) + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCRequest( + jsonrpc="2.0", + id=1, + method="initialize", + params=TypeAdapter(InitializeRequestParams).dump_python(params), + ) + ) + ) + ) + response = await receive_stream2.receive() + response = response.message + + # Send initialized notification + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCNotification( + jsonrpc="2.0", + method="notifications/initialized", + ) + ) + ) + ) + + # Call the tool to verify lifespan context + await send_stream1.send( + SessionMessage( + JSONRPCMessage( + root=JSONRPCRequest( + jsonrpc="2.0", + id=2, + method="tools/call", + params={"name": "check_lifespan", "arguments": {}}, + ) + ) + ) + ) + + # Get response and verify + response = await receive_stream2.receive() + response = response.message + assert response.root.result["content"][0]["text"] == "true" + + # Cancel server task + tg.cancel_scope.cancel() diff --git a/tests/server/test_lowlevel_tool_annotations.py b/tests/server/test_lowlevel_tool_annotations.py index e9eff9ed0..0fbc23b27 100644 --- a/tests/server/test_lowlevel_tool_annotations.py +++ b/tests/server/test_lowlevel_tool_annotations.py @@ -1,111 +1,111 @@ -"""Tests for tool annotations in low-level server.""" - -import anyio -import pytest - -from mcp.client.session import ClientSession -from mcp.server import Server -from mcp.server.lowlevel import NotificationOptions -from mcp.server.models import InitializationOptions -from mcp.server.session import ServerSession -from mcp.shared.message import SessionMessage -from mcp.shared.session import RequestResponder -from mcp.types import ( - ClientResult, - ServerNotification, - ServerRequest, - Tool, - ToolAnnotations, -) - - -@pytest.mark.anyio -async def test_lowlevel_server_tool_annotations(): - """Test that tool annotations work in low-level server.""" - server = Server("test") - - # Create a tool with annotations - @server.list_tools() - async def list_tools(): - return [ - Tool( - name="echo", - description="Echo a message back", - inputSchema={ - "type": "object", - "properties": { - "message": {"type": "string"}, - }, - "required": ["message"], - }, - annotations=ToolAnnotations( - title="Echo Tool", - readOnlyHint=True, - ), - ) - ] - - server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[ - SessionMessage - ](10) - client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[ - SessionMessage - ](10) - - # Message handler for client - async def message_handler( - message: RequestResponder[ServerRequest, ClientResult] - | ServerNotification - | Exception, - ) -> None: - if isinstance(message, Exception): - raise message - - # Server task - async def run_server(): - async with ServerSession( - client_to_server_receive, - server_to_client_send, - InitializationOptions( - server_name="test-server", - server_version="1.0.0", - capabilities=server.get_capabilities( - notification_options=NotificationOptions(), - experimental_capabilities={}, - ), - ), - ) as server_session: - async with anyio.create_task_group() as tg: - - async def handle_messages(): - async for message in server_session.incoming_messages: - await server._handle_message(message, server_session, {}, False) - - tg.start_soon(handle_messages) - await anyio.sleep_forever() - - # Run the test - async with anyio.create_task_group() as tg: - tg.start_soon(run_server) - - async with ClientSession( - server_to_client_receive, - client_to_server_send, - message_handler=message_handler, - ) as client_session: - # Initialize the session - await client_session.initialize() - - # List tools - tools_result = await client_session.list_tools() - - # Cancel the server task - tg.cancel_scope.cancel() - - # Verify results - assert tools_result is not None - assert len(tools_result.tools) == 1 - assert tools_result.tools[0].name == "echo" - assert tools_result.tools[0].annotations is not None - assert tools_result.tools[0].annotations.title == "Echo Tool" - assert tools_result.tools[0].annotations.readOnlyHint is True +"""Tests for tool annotations in low-level server.""" + +import anyio +import pytest + +from mcp.client.session import ClientSession +from mcp.server import Server +from mcp.server.lowlevel import NotificationOptions +from mcp.server.models import InitializationOptions +from mcp.server.session import ServerSession +from mcp.shared.message import SessionMessage +from mcp.shared.session import RequestResponder +from mcp.types import ( + ClientResult, + ServerNotification, + ServerRequest, + Tool, + ToolAnnotations, +) + + +@pytest.mark.anyio +async def test_lowlevel_server_tool_annotations(): + """Test that tool annotations work in low-level server.""" + server = Server("test") + + # Create a tool with annotations + @server.list_tools() + async def list_tools(): + return [ + Tool( + name="echo", + description="Echo a message back", + inputSchema={ + "type": "object", + "properties": { + "message": {"type": "string"}, + }, + "required": ["message"], + }, + annotations=ToolAnnotations( + title="Echo Tool", + readOnlyHint=True, + ), + ) + ] + + server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[ + SessionMessage + ](10) + client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[ + SessionMessage + ](10) + + # Message handler for client + async def message_handler( + message: RequestResponder[ServerRequest, ClientResult] + | ServerNotification + | Exception, + ) -> None: + if isinstance(message, Exception): + raise message + + # Server task + async def run_server(): + async with ServerSession( + client_to_server_receive, + server_to_client_send, + InitializationOptions( + server_name="test-server", + server_version="1.0.0", + capabilities=server.get_capabilities( + notification_options=NotificationOptions(), + experimental_capabilities={}, + ), + ), + ) as server_session: + async with anyio.create_task_group() as tg: + + async def handle_messages(): + async for message in server_session.incoming_messages: + await server._handle_message(message, server_session, {}, False) + + tg.start_soon(handle_messages) + await anyio.sleep_forever() + + # Run the test + async with anyio.create_task_group() as tg: + tg.start_soon(run_server) + + async with ClientSession( + server_to_client_receive, + client_to_server_send, + message_handler=message_handler, + ) as client_session: + # Initialize the session + await client_session.initialize() + + # List tools + tools_result = await client_session.list_tools() + + # Cancel the server task + tg.cancel_scope.cancel() + + # Verify results + assert tools_result is not None + assert len(tools_result.tools) == 1 + assert tools_result.tools[0].name == "echo" + assert tools_result.tools[0].annotations is not None + assert tools_result.tools[0].annotations.title == "Echo Tool" + assert tools_result.tools[0].annotations.readOnlyHint is True diff --git a/tests/server/test_read_resource.py b/tests/server/test_read_resource.py index 469eef857..fb7d644fa 100644 --- a/tests/server/test_read_resource.py +++ b/tests/server/test_read_resource.py @@ -1,114 +1,114 @@ -from collections.abc import Iterable -from pathlib import Path -from tempfile import NamedTemporaryFile - -import pytest -from pydantic import AnyUrl, FileUrl - -import mcp.types as types -from mcp.server.lowlevel.server import ReadResourceContents, Server - - -@pytest.fixture -def temp_file(): - """Create a temporary file for testing.""" - with NamedTemporaryFile(mode="w", delete=False) as f: - f.write("test content") - path = Path(f.name).resolve() - yield path - try: - path.unlink() - except FileNotFoundError: - pass - - -@pytest.mark.anyio -async def test_read_resource_text(temp_file: Path): - server = Server("test") - - @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: - return [ReadResourceContents(content="Hello World", mime_type="text/plain")] - - # Get the handler directly from the server - handler = server.request_handlers[types.ReadResourceRequest] - - # Create a request - request = types.ReadResourceRequest( - method="resources/read", - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), - ) - - # Call the handler - result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 - - content = result.root.contents[0] - assert isinstance(content, types.TextResourceContents) - assert content.text == "Hello World" - assert content.mimeType == "text/plain" - - -@pytest.mark.anyio -async def test_read_resource_binary(temp_file: Path): - server = Server("test") - - @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: - return [ - ReadResourceContents( - content=b"Hello World", mime_type="application/octet-stream" - ) - ] - - # Get the handler directly from the server - handler = server.request_handlers[types.ReadResourceRequest] - - # Create a request - request = types.ReadResourceRequest( - method="resources/read", - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), - ) - - # Call the handler - result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 - - content = result.root.contents[0] - assert isinstance(content, types.BlobResourceContents) - assert content.mimeType == "application/octet-stream" - - -@pytest.mark.anyio -async def test_read_resource_default_mime(temp_file: Path): - server = Server("test") - - @server.read_resource() - async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: - return [ - ReadResourceContents( - content="Hello World", - # No mime_type specified, should default to text/plain - ) - ] - - # Get the handler directly from the server - handler = server.request_handlers[types.ReadResourceRequest] - - # Create a request - request = types.ReadResourceRequest( - method="resources/read", - params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), - ) - - # Call the handler - result = await handler(request) - assert isinstance(result.root, types.ReadResourceResult) - assert len(result.root.contents) == 1 - - content = result.root.contents[0] - assert isinstance(content, types.TextResourceContents) - assert content.text == "Hello World" - assert content.mimeType == "text/plain" +from collections.abc import Iterable +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from pydantic import AnyUrl, FileUrl + +import mcp.types as types +from mcp.server.lowlevel.server import ReadResourceContents, Server + + +@pytest.fixture +def temp_file(): + """Create a temporary file for testing.""" + with NamedTemporaryFile(mode="w", delete=False) as f: + f.write("test content") + path = Path(f.name).resolve() + yield path + try: + path.unlink() + except FileNotFoundError: + pass + + +@pytest.mark.anyio +async def test_read_resource_text(temp_file: Path): + server = Server("test") + + @server.read_resource() + async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + return [ReadResourceContents(content="Hello World", mime_type="text/plain")] + + # Get the handler directly from the server + handler = server.request_handlers[types.ReadResourceRequest] + + # Create a request + request = types.ReadResourceRequest( + method="resources/read", + params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + ) + + # Call the handler + result = await handler(request) + assert isinstance(result.root, types.ReadResourceResult) + assert len(result.root.contents) == 1 + + content = result.root.contents[0] + assert isinstance(content, types.TextResourceContents) + assert content.text == "Hello World" + assert content.mimeType == "text/plain" + + +@pytest.mark.anyio +async def test_read_resource_binary(temp_file: Path): + server = Server("test") + + @server.read_resource() + async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + return [ + ReadResourceContents( + content=b"Hello World", mime_type="application/octet-stream" + ) + ] + + # Get the handler directly from the server + handler = server.request_handlers[types.ReadResourceRequest] + + # Create a request + request = types.ReadResourceRequest( + method="resources/read", + params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + ) + + # Call the handler + result = await handler(request) + assert isinstance(result.root, types.ReadResourceResult) + assert len(result.root.contents) == 1 + + content = result.root.contents[0] + assert isinstance(content, types.BlobResourceContents) + assert content.mimeType == "application/octet-stream" + + +@pytest.mark.anyio +async def test_read_resource_default_mime(temp_file: Path): + server = Server("test") + + @server.read_resource() + async def read_resource(uri: AnyUrl) -> Iterable[ReadResourceContents]: + return [ + ReadResourceContents( + content="Hello World", + # No mime_type specified, should default to text/plain + ) + ] + + # Get the handler directly from the server + handler = server.request_handlers[types.ReadResourceRequest] + + # Create a request + request = types.ReadResourceRequest( + method="resources/read", + params=types.ReadResourceRequestParams(uri=FileUrl(temp_file.as_uri())), + ) + + # Call the handler + result = await handler(request) + assert isinstance(result.root, types.ReadResourceResult) + assert len(result.root.contents) == 1 + + content = result.root.contents[0] + assert isinstance(content, types.TextResourceContents) + assert content.text == "Hello World" + assert content.mimeType == "text/plain" diff --git a/tests/server/test_stdio.py b/tests/server/test_stdio.py index c546a7167..b2d5234f4 100644 --- a/tests/server/test_stdio.py +++ b/tests/server/test_stdio.py @@ -1,70 +1,70 @@ -import io - -import anyio -import pytest - -from mcp.server.stdio import stdio_server -from mcp.shared.message import SessionMessage -from mcp.types import JSONRPCMessage, JSONRPCRequest, JSONRPCResponse - - -@pytest.mark.anyio -async def test_stdio_server(): - stdin = io.StringIO() - stdout = io.StringIO() - - messages = [ - JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")), - JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})), - ] - - for message in messages: - stdin.write(message.model_dump_json(by_alias=True, exclude_none=True) + "\n") - stdin.seek(0) - - async with stdio_server( - stdin=anyio.AsyncFile(stdin), stdout=anyio.AsyncFile(stdout) - ) as (read_stream, write_stream): - received_messages = [] - async with read_stream: - async for message in read_stream: - if isinstance(message, Exception): - raise message - received_messages.append(message.message) - if len(received_messages) == 2: - break - - # Verify received messages - assert len(received_messages) == 2 - assert received_messages[0] == JSONRPCMessage( - root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping") - ) - assert received_messages[1] == JSONRPCMessage( - root=JSONRPCResponse(jsonrpc="2.0", id=2, result={}) - ) - - # Test sending responses from the server - responses = [ - JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping")), - JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=4, result={})), - ] - - async with write_stream: - for response in responses: - session_message = SessionMessage(response) - await write_stream.send(session_message) - - stdout.seek(0) - output_lines = stdout.readlines() - assert len(output_lines) == 2 - - received_responses = [ - JSONRPCMessage.model_validate_json(line.strip()) for line in output_lines - ] - assert len(received_responses) == 2 - assert received_responses[0] == JSONRPCMessage( - root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping") - ) - assert received_responses[1] == JSONRPCMessage( - root=JSONRPCResponse(jsonrpc="2.0", id=4, result={}) - ) +import io + +import anyio +import pytest + +from mcp.server.stdio import stdio_server +from mcp.shared.message import SessionMessage +from mcp.types import JSONRPCMessage, JSONRPCRequest, JSONRPCResponse + + +@pytest.mark.anyio +async def test_stdio_server(): + stdin = io.StringIO() + stdout = io.StringIO() + + messages = [ + JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping")), + JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=2, result={})), + ] + + for message in messages: + stdin.write(message.model_dump_json(by_alias=True, exclude_none=True) + "\n") + stdin.seek(0) + + async with stdio_server( + stdin=anyio.AsyncFile(stdin), stdout=anyio.AsyncFile(stdout) + ) as (read_stream, write_stream): + received_messages = [] + async with read_stream: + async for message in read_stream: + if isinstance(message, Exception): + raise message + received_messages.append(message.message) + if len(received_messages) == 2: + break + + # Verify received messages + assert len(received_messages) == 2 + assert received_messages[0] == JSONRPCMessage( + root=JSONRPCRequest(jsonrpc="2.0", id=1, method="ping") + ) + assert received_messages[1] == JSONRPCMessage( + root=JSONRPCResponse(jsonrpc="2.0", id=2, result={}) + ) + + # Test sending responses from the server + responses = [ + JSONRPCMessage(root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping")), + JSONRPCMessage(root=JSONRPCResponse(jsonrpc="2.0", id=4, result={})), + ] + + async with write_stream: + for response in responses: + session_message = SessionMessage(response) + await write_stream.send(session_message) + + stdout.seek(0) + output_lines = stdout.readlines() + assert len(output_lines) == 2 + + received_responses = [ + JSONRPCMessage.model_validate_json(line.strip()) for line in output_lines + ] + assert len(received_responses) == 2 + assert received_responses[0] == JSONRPCMessage( + root=JSONRPCRequest(jsonrpc="2.0", id=3, method="ping") + ) + assert received_responses[1] == JSONRPCMessage( + root=JSONRPCResponse(jsonrpc="2.0", id=4, result={}) + ) diff --git a/tests/shared/test_memory.py b/tests/shared/test_memory.py index a0c32f556..2403d310e 100644 --- a/tests/shared/test_memory.py +++ b/tests/shared/test_memory.py @@ -1,47 +1,47 @@ -import pytest -from pydantic import AnyUrl -from typing_extensions import AsyncGenerator - -from mcp.client.session import ClientSession -from mcp.server import Server -from mcp.shared.memory import ( - create_connected_server_and_client_session, -) -from mcp.types import ( - EmptyResult, - Resource, -) - - -@pytest.fixture -def mcp_server() -> Server: - server = Server(name="test_server") - - @server.list_resources() - async def handle_list_resources(): - return [ - Resource( - uri=AnyUrl("memory://test"), - name="Test Resource", - description="A test resource", - ) - ] - - return server - - -@pytest.fixture -async def client_connected_to_server( - mcp_server: Server, -) -> AsyncGenerator[ClientSession, None]: - async with create_connected_server_and_client_session(mcp_server) as client_session: - yield client_session - - -@pytest.mark.anyio -async def test_memory_server_and_client_connection( - client_connected_to_server: ClientSession, -): - """Shows how a client and server can communicate over memory streams.""" - response = await client_connected_to_server.send_ping() - assert isinstance(response, EmptyResult) +import pytest +from pydantic import AnyUrl +from typing_extensions import AsyncGenerator + +from mcp.client.session import ClientSession +from mcp.server import Server +from mcp.shared.memory import ( + create_connected_server_and_client_session, +) +from mcp.types import ( + EmptyResult, + Resource, +) + + +@pytest.fixture +def mcp_server() -> Server: + server = Server(name="test_server") + + @server.list_resources() + async def handle_list_resources(): + return [ + Resource( + uri=AnyUrl("memory://test"), + name="Test Resource", + description="A test resource", + ) + ] + + return server + + +@pytest.fixture +async def client_connected_to_server( + mcp_server: Server, +) -> AsyncGenerator[ClientSession, None]: + async with create_connected_server_and_client_session(mcp_server) as client_session: + yield client_session + + +@pytest.mark.anyio +async def test_memory_server_and_client_connection( + client_connected_to_server: ClientSession, +): + """Shows how a client and server can communicate over memory streams.""" + response = await client_connected_to_server.send_ping() + assert isinstance(response, EmptyResult) diff --git a/tests/shared/test_session.py b/tests/shared/test_session.py index 59cb30c86..26b003761 100644 --- a/tests/shared/test_session.py +++ b/tests/shared/test_session.py @@ -1,126 +1,126 @@ -from collections.abc import AsyncGenerator - -import anyio -import pytest - -import mcp.types as types -from mcp.client.session import ClientSession -from mcp.server.lowlevel.server import Server -from mcp.shared.exceptions import McpError -from mcp.shared.memory import create_connected_server_and_client_session -from mcp.types import ( - CancelledNotification, - CancelledNotificationParams, - ClientNotification, - ClientRequest, - EmptyResult, -) - - -@pytest.fixture -def mcp_server() -> Server: - return Server(name="test server") - - -@pytest.fixture -async def client_connected_to_server( - mcp_server: Server, -) -> AsyncGenerator[ClientSession, None]: - async with create_connected_server_and_client_session(mcp_server) as client_session: - yield client_session - - -@pytest.mark.anyio -async def test_in_flight_requests_cleared_after_completion( - client_connected_to_server: ClientSession, -): - """Verify that _in_flight is empty after all requests complete.""" - # Send a request and wait for response - response = await client_connected_to_server.send_ping() - assert isinstance(response, EmptyResult) - - # Verify _in_flight is empty - assert len(client_connected_to_server._in_flight) == 0 - - -@pytest.mark.anyio -async def test_request_cancellation(): - """Test that requests can be cancelled while in-flight.""" - # The tool is already registered in the fixture - - ev_tool_called = anyio.Event() - ev_cancelled = anyio.Event() - request_id = None - - # Start the request in a separate task so we can cancel it - def make_server() -> Server: - server = Server(name="TestSessionServer") - - # Register the tool handler - @server.call_tool() - async def handle_call_tool(name: str, arguments: dict | None) -> list: - nonlocal request_id, ev_tool_called - if name == "slow_tool": - request_id = server.request_context.request_id - ev_tool_called.set() - await anyio.sleep(10) # Long enough to ensure we can cancel - return [] - raise ValueError(f"Unknown tool: {name}") - - # Register the tool so it shows up in list_tools - @server.list_tools() - async def handle_list_tools() -> list[types.Tool]: - return [ - types.Tool( - name="slow_tool", - description="A slow tool that takes 10 seconds to complete", - inputSchema={}, - ) - ] - - return server - - async def make_request(client_session): - nonlocal ev_cancelled - try: - await client_session.send_request( - ClientRequest( - types.CallToolRequest( - method="tools/call", - params=types.CallToolRequestParams( - name="slow_tool", arguments={} - ), - ) - ), - types.CallToolResult, - ) - pytest.fail("Request should have been cancelled") - except McpError as e: - # Expected - request was cancelled - assert "Request cancelled" in str(e) - ev_cancelled.set() - - async with create_connected_server_and_client_session( - make_server() - ) as client_session: - async with anyio.create_task_group() as tg: - tg.start_soon(make_request, client_session) - - # Wait for the request to be in-flight - with anyio.fail_after(1): # Timeout after 1 second - await ev_tool_called.wait() - - # Send cancellation notification - assert request_id is not None - await client_session.send_notification( - ClientNotification( - CancelledNotification( - method="notifications/cancelled", - params=CancelledNotificationParams(requestId=request_id), - ) - ) - ) - - # Give cancellation time to process - with anyio.fail_after(1): - await ev_cancelled.wait() +from collections.abc import AsyncGenerator + +import anyio +import pytest + +import mcp.types as types +from mcp.client.session import ClientSession +from mcp.server.lowlevel.server import Server +from mcp.shared.exceptions import McpError +from mcp.shared.memory import create_connected_server_and_client_session +from mcp.types import ( + CancelledNotification, + CancelledNotificationParams, + ClientNotification, + ClientRequest, + EmptyResult, +) + + +@pytest.fixture +def mcp_server() -> Server: + return Server(name="test server") + + +@pytest.fixture +async def client_connected_to_server( + mcp_server: Server, +) -> AsyncGenerator[ClientSession, None]: + async with create_connected_server_and_client_session(mcp_server) as client_session: + yield client_session + + +@pytest.mark.anyio +async def test_in_flight_requests_cleared_after_completion( + client_connected_to_server: ClientSession, +): + """Verify that _in_flight is empty after all requests complete.""" + # Send a request and wait for response + response = await client_connected_to_server.send_ping() + assert isinstance(response, EmptyResult) + + # Verify _in_flight is empty + assert len(client_connected_to_server._in_flight) == 0 + + +@pytest.mark.anyio +async def test_request_cancellation(): + """Test that requests can be cancelled while in-flight.""" + # The tool is already registered in the fixture + + ev_tool_called = anyio.Event() + ev_cancelled = anyio.Event() + request_id = None + + # Start the request in a separate task so we can cancel it + def make_server() -> Server: + server = Server(name="TestSessionServer") + + # Register the tool handler + @server.call_tool() + async def handle_call_tool(name: str, arguments: dict | None) -> list: + nonlocal request_id, ev_tool_called + if name == "slow_tool": + request_id = server.request_context.request_id + ev_tool_called.set() + await anyio.sleep(10) # Long enough to ensure we can cancel + return [] + raise ValueError(f"Unknown tool: {name}") + + # Register the tool so it shows up in list_tools + @server.list_tools() + async def handle_list_tools() -> list[types.Tool]: + return [ + types.Tool( + name="slow_tool", + description="A slow tool that takes 10 seconds to complete", + inputSchema={}, + ) + ] + + return server + + async def make_request(client_session): + nonlocal ev_cancelled + try: + await client_session.send_request( + ClientRequest( + types.CallToolRequest( + method="tools/call", + params=types.CallToolRequestParams( + name="slow_tool", arguments={} + ), + ) + ), + types.CallToolResult, + ) + pytest.fail("Request should have been cancelled") + except McpError as e: + # Expected - request was cancelled + assert "Request cancelled" in str(e) + ev_cancelled.set() + + async with create_connected_server_and_client_session( + make_server() + ) as client_session: + async with anyio.create_task_group() as tg: + tg.start_soon(make_request, client_session) + + # Wait for the request to be in-flight + with anyio.fail_after(1): # Timeout after 1 second + await ev_tool_called.wait() + + # Send cancellation notification + assert request_id is not None + await client_session.send_notification( + ClientNotification( + CancelledNotification( + method="notifications/cancelled", + params=CancelledNotificationParams(requestId=request_id), + ) + ) + ) + + # Give cancellation time to process + with anyio.fail_after(1): + await ev_cancelled.wait() diff --git a/tests/shared/test_ws.py b/tests/shared/test_ws.py index 1381c8153..490ba288e 100644 --- a/tests/shared/test_ws.py +++ b/tests/shared/test_ws.py @@ -1,228 +1,228 @@ -import multiprocessing -import socket -import time -from collections.abc import AsyncGenerator, Generator - -import anyio -import pytest -import uvicorn -from pydantic import AnyUrl -from starlette.applications import Starlette -from starlette.routing import WebSocketRoute - -from mcp.client.session import ClientSession -from mcp.client.websocket import websocket_client -from mcp.server import Server -from mcp.server.websocket import websocket_server -from mcp.shared.exceptions import McpError -from mcp.types import ( - EmptyResult, - ErrorData, - InitializeResult, - ReadResourceResult, - TextContent, - TextResourceContents, - Tool, -) - -SERVER_NAME = "test_server_for_WS" - - -@pytest.fixture -def server_port() -> int: - with socket.socket() as s: - s.bind(("127.0.0.1", 0)) - return s.getsockname()[1] - - -@pytest.fixture -def server_url(server_port: int) -> str: - return f"ws://127.0.0.1:{server_port}" - - -# Test server implementation -class ServerTest(Server): - def __init__(self): - super().__init__(SERVER_NAME) - - @self.read_resource() - async def handle_read_resource(uri: AnyUrl) -> str | bytes: - if uri.scheme == "foobar": - return f"Read {uri.host}" - elif uri.scheme == "slow": - # Simulate a slow resource - await anyio.sleep(2.0) - return f"Slow response from {uri.host}" - - raise McpError( - error=ErrorData( - code=404, message="OOPS! no resource with that URI was found" - ) - ) - - @self.list_tools() - async def handle_list_tools() -> list[Tool]: - return [ - Tool( - name="test_tool", - description="A test tool", - inputSchema={"type": "object", "properties": {}}, - ) - ] - - @self.call_tool() - async def handle_call_tool(name: str, args: dict) -> list[TextContent]: - return [TextContent(type="text", text=f"Called {name}")] - - -# Test fixtures -def make_server_app() -> Starlette: - """Create test Starlette app with WebSocket transport""" - server = ServerTest() - - async def handle_ws(websocket): - async with websocket_server( - websocket.scope, websocket.receive, websocket.send - ) as streams: - await server.run( - streams[0], streams[1], server.create_initialization_options() - ) - - app = Starlette( - routes=[ - WebSocketRoute("/ws", endpoint=handle_ws), - ] - ) - - return app - - -def run_server(server_port: int) -> None: - app = make_server_app() - server = uvicorn.Server( - config=uvicorn.Config( - app=app, host="127.0.0.1", port=server_port, log_level="error" - ) - ) - print(f"starting server on {server_port}") - server.run() - - # Give server time to start - while not server.started: - print("waiting for server to start") - time.sleep(0.5) - - -@pytest.fixture() -def server(server_port: int) -> Generator[None, None, None]: - proc = multiprocessing.Process( - target=run_server, kwargs={"server_port": server_port}, daemon=True - ) - print("starting process") - proc.start() - - # Wait for server to be running - max_attempts = 20 - attempt = 0 - print("waiting for server to start") - while attempt < max_attempts: - try: - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.connect(("127.0.0.1", server_port)) - break - except ConnectionRefusedError: - time.sleep(0.1) - attempt += 1 - else: - raise RuntimeError(f"Server failed to start after {max_attempts} attempts") - - yield - - print("killing server") - # Signal the server to stop - proc.kill() - proc.join(timeout=2) - if proc.is_alive(): - print("server process failed to terminate") - - -@pytest.fixture() -async def initialized_ws_client_session( - server, server_url: str -) -> AsyncGenerator[ClientSession, None]: - """Create and initialize a WebSocket client session""" - async with websocket_client(server_url + "/ws") as streams: - async with ClientSession(*streams) as session: - # Test initialization - result = await session.initialize() - assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME - - # Test ping - ping_result = await session.send_ping() - assert isinstance(ping_result, EmptyResult) - - yield session - - -# Tests -@pytest.mark.anyio -async def test_ws_client_basic_connection(server: None, server_url: str) -> None: - """Test the WebSocket connection establishment""" - async with websocket_client(server_url + "/ws") as streams: - async with ClientSession(*streams) as session: - # Test initialization - result = await session.initialize() - assert isinstance(result, InitializeResult) - assert result.serverInfo.name == SERVER_NAME - - # Test ping - ping_result = await session.send_ping() - assert isinstance(ping_result, EmptyResult) - - -@pytest.mark.anyio -async def test_ws_client_happy_request_and_response( - initialized_ws_client_session: ClientSession, -) -> None: - """Test a successful request and response via WebSocket""" - result = await initialized_ws_client_session.read_resource( - AnyUrl("foobar://example") - ) - assert isinstance(result, ReadResourceResult) - assert isinstance(result.contents, list) - assert len(result.contents) > 0 - assert isinstance(result.contents[0], TextResourceContents) - assert result.contents[0].text == "Read example" - - -@pytest.mark.anyio -async def test_ws_client_exception_handling( - initialized_ws_client_session: ClientSession, -) -> None: - """Test exception handling in WebSocket communication""" - with pytest.raises(McpError) as exc_info: - await initialized_ws_client_session.read_resource(AnyUrl("unknown://example")) - assert exc_info.value.error.code == 404 - - -@pytest.mark.anyio -async def test_ws_client_timeout( - initialized_ws_client_session: ClientSession, -) -> None: - """Test timeout handling in WebSocket communication""" - # Set a very short timeout to trigger a timeout exception - with pytest.raises(TimeoutError): - with anyio.fail_after(0.1): # 100ms timeout - await initialized_ws_client_session.read_resource(AnyUrl("slow://example")) - - # Now test that we can still use the session after a timeout - with anyio.fail_after(5): # Longer timeout to allow completion - result = await initialized_ws_client_session.read_resource( - AnyUrl("foobar://example") - ) - assert isinstance(result, ReadResourceResult) - assert isinstance(result.contents, list) - assert len(result.contents) > 0 - assert isinstance(result.contents[0], TextResourceContents) - assert result.contents[0].text == "Read example" +import multiprocessing +import socket +import time +from collections.abc import AsyncGenerator, Generator + +import anyio +import pytest +import uvicorn +from pydantic import AnyUrl +from starlette.applications import Starlette +from starlette.routing import WebSocketRoute + +from mcp.client.session import ClientSession +from mcp.client.websocket import websocket_client +from mcp.server import Server +from mcp.server.websocket import websocket_server +from mcp.shared.exceptions import McpError +from mcp.types import ( + EmptyResult, + ErrorData, + InitializeResult, + ReadResourceResult, + TextContent, + TextResourceContents, + Tool, +) + +SERVER_NAME = "test_server_for_WS" + + +@pytest.fixture +def server_port() -> int: + with socket.socket() as s: + s.bind(("127.0.0.1", 0)) + return s.getsockname()[1] + + +@pytest.fixture +def server_url(server_port: int) -> str: + return f"ws://127.0.0.1:{server_port}" + + +# Test server implementation +class ServerTest(Server): + def __init__(self): + super().__init__(SERVER_NAME) + + @self.read_resource() + async def handle_read_resource(uri: AnyUrl) -> str | bytes: + if uri.scheme == "foobar": + return f"Read {uri.host}" + elif uri.scheme == "slow": + # Simulate a slow resource + await anyio.sleep(2.0) + return f"Slow response from {uri.host}" + + raise McpError( + error=ErrorData( + code=404, message="OOPS! no resource with that URI was found" + ) + ) + + @self.list_tools() + async def handle_list_tools() -> list[Tool]: + return [ + Tool( + name="test_tool", + description="A test tool", + inputSchema={"type": "object", "properties": {}}, + ) + ] + + @self.call_tool() + async def handle_call_tool(name: str, args: dict) -> list[TextContent]: + return [TextContent(type="text", text=f"Called {name}")] + + +# Test fixtures +def make_server_app() -> Starlette: + """Create test Starlette app with WebSocket transport""" + server = ServerTest() + + async def handle_ws(websocket): + async with websocket_server( + websocket.scope, websocket.receive, websocket.send + ) as streams: + await server.run( + streams[0], streams[1], server.create_initialization_options() + ) + + app = Starlette( + routes=[ + WebSocketRoute("/ws", endpoint=handle_ws), + ] + ) + + return app + + +def run_server(server_port: int) -> None: + app = make_server_app() + server = uvicorn.Server( + config=uvicorn.Config( + app=app, host="127.0.0.1", port=server_port, log_level="error" + ) + ) + print(f"starting server on {server_port}") + server.run() + + # Give server time to start + while not server.started: + print("waiting for server to start") + time.sleep(0.5) + + +@pytest.fixture() +def server(server_port: int) -> Generator[None, None, None]: + proc = multiprocessing.Process( + target=run_server, kwargs={"server_port": server_port}, daemon=True + ) + print("starting process") + proc.start() + + # Wait for server to be running + max_attempts = 20 + attempt = 0 + print("waiting for server to start") + while attempt < max_attempts: + try: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.connect(("127.0.0.1", server_port)) + break + except ConnectionRefusedError: + time.sleep(0.1) + attempt += 1 + else: + raise RuntimeError(f"Server failed to start after {max_attempts} attempts") + + yield + + print("killing server") + # Signal the server to stop + proc.kill() + proc.join(timeout=2) + if proc.is_alive(): + print("server process failed to terminate") + + +@pytest.fixture() +async def initialized_ws_client_session( + server, server_url: str +) -> AsyncGenerator[ClientSession, None]: + """Create and initialize a WebSocket client session""" + async with websocket_client(server_url + "/ws") as streams: + async with ClientSession(*streams) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == SERVER_NAME + + # Test ping + ping_result = await session.send_ping() + assert isinstance(ping_result, EmptyResult) + + yield session + + +# Tests +@pytest.mark.anyio +async def test_ws_client_basic_connection(server: None, server_url: str) -> None: + """Test the WebSocket connection establishment""" + async with websocket_client(server_url + "/ws") as streams: + async with ClientSession(*streams) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == SERVER_NAME + + # Test ping + ping_result = await session.send_ping() + assert isinstance(ping_result, EmptyResult) + + +@pytest.mark.anyio +async def test_ws_client_happy_request_and_response( + initialized_ws_client_session: ClientSession, +) -> None: + """Test a successful request and response via WebSocket""" + result = await initialized_ws_client_session.read_resource( + AnyUrl("foobar://example") + ) + assert isinstance(result, ReadResourceResult) + assert isinstance(result.contents, list) + assert len(result.contents) > 0 + assert isinstance(result.contents[0], TextResourceContents) + assert result.contents[0].text == "Read example" + + +@pytest.mark.anyio +async def test_ws_client_exception_handling( + initialized_ws_client_session: ClientSession, +) -> None: + """Test exception handling in WebSocket communication""" + with pytest.raises(McpError) as exc_info: + await initialized_ws_client_session.read_resource(AnyUrl("unknown://example")) + assert exc_info.value.error.code == 404 + + +@pytest.mark.anyio +async def test_ws_client_timeout( + initialized_ws_client_session: ClientSession, +) -> None: + """Test timeout handling in WebSocket communication""" + # Set a very short timeout to trigger a timeout exception + with pytest.raises(TimeoutError): + with anyio.fail_after(0.1): # 100ms timeout + await initialized_ws_client_session.read_resource(AnyUrl("slow://example")) + + # Now test that we can still use the session after a timeout + with anyio.fail_after(5): # Longer timeout to allow completion + result = await initialized_ws_client_session.read_resource( + AnyUrl("foobar://example") + ) + assert isinstance(result, ReadResourceResult) + assert isinstance(result.contents, list) + assert len(result.contents) > 0 + assert isinstance(result.contents[0], TextResourceContents) + assert result.contents[0].text == "Read example" diff --git a/tests/test_types.py b/tests/test_types.py index a39d33412..8e8cdc71b 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,32 +1,32 @@ -import pytest - -from mcp.types import ( - LATEST_PROTOCOL_VERSION, - ClientRequest, - JSONRPCMessage, - JSONRPCRequest, -) - - -@pytest.mark.anyio -async def test_jsonrpc_request(): - json_data = { - "jsonrpc": "2.0", - "id": 1, - "method": "initialize", - "params": { - "protocolVersion": LATEST_PROTOCOL_VERSION, - "capabilities": {"batch": None, "sampling": None}, - "clientInfo": {"name": "mcp", "version": "0.1.0"}, - }, - } - - request = JSONRPCMessage.model_validate(json_data) - assert isinstance(request.root, JSONRPCRequest) - ClientRequest.model_validate(request.model_dump(by_alias=True, exclude_none=True)) - - assert request.root.jsonrpc == "2.0" - assert request.root.id == 1 - assert request.root.method == "initialize" - assert request.root.params is not None - assert request.root.params["protocolVersion"] == LATEST_PROTOCOL_VERSION +import pytest + +from mcp.types import ( + LATEST_PROTOCOL_VERSION, + ClientRequest, + JSONRPCMessage, + JSONRPCRequest, +) + + +@pytest.mark.anyio +async def test_jsonrpc_request(): + json_data = { + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": { + "protocolVersion": LATEST_PROTOCOL_VERSION, + "capabilities": {"batch": None, "sampling": None}, + "clientInfo": {"name": "mcp", "version": "0.1.0"}, + }, + } + + request = JSONRPCMessage.model_validate(json_data) + assert isinstance(request.root, JSONRPCRequest) + ClientRequest.model_validate(request.model_dump(by_alias=True, exclude_none=True)) + + assert request.root.jsonrpc == "2.0" + assert request.root.id == 1 + assert request.root.method == "initialize" + assert request.root.params is not None + assert request.root.params["protocolVersion"] == LATEST_PROTOCOL_VERSION