Skip to content

Add OpenRouter provider #1778

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions docs/models/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,14 +294,11 @@ Once you have the API key, you can use it with the `OpenAIProvider`:
```python
from pydantic_ai import Agent
from pydantic_ai.models.openai import OpenAIModel
from pydantic_ai.providers.openai import OpenAIProvider
from pydantic_ai.providers.openrouter import OpenRouterProvider

model = OpenAIModel(
'anthropic/claude-3.5-sonnet',
provider=OpenAIProvider(
base_url='https://openrouter.ai/api/v1',
api_key='your-openrouter-api-key',
),
provider=OpenRouterProvider(api_key='your-openrouter-api-key'),
)
agent = Agent(model)
...
Expand Down
4 changes: 4 additions & 0 deletions pydantic_ai_slim/pydantic_ai/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ def infer_provider(provider: str) -> Provider[Any]:
from .deepseek import DeepSeekProvider

return DeepSeekProvider()
elif provider == 'openrouter':
from .openrouter import OpenRouterProvider

return OpenRouterProvider()
elif provider == 'azure':
from .azure import AzureProvider

Expand Down
69 changes: 69 additions & 0 deletions pydantic_ai_slim/pydantic_ai/providers/openrouter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
from __future__ import annotations as _annotations

import os
from typing import overload

from httpx import AsyncClient as AsyncHTTPClient
from openai import AsyncOpenAI

from pydantic_ai.exceptions import UserError
from pydantic_ai.models import cached_async_http_client
from pydantic_ai.providers import Provider

try:
from openai import AsyncOpenAI
except ImportError as _import_error: # pragma: no cover
raise ImportError(
'Please install the `openai` package to use the OpenRouter provider, '
'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
) from _import_error


class OpenRouterProvider(Provider[AsyncOpenAI]):
"""Provider for OpenRouter API."""

@property
def name(self) -> str:
return 'openrouter'

@property
def base_url(self) -> str:
return 'https://openrouter.ai/api/v1'

@property
def client(self) -> AsyncOpenAI:
return self._client

@overload
def __init__(self) -> None: ...

@overload
def __init__(self, *, api_key: str) -> None: ...

@overload
def __init__(self, *, api_key: str, http_client: AsyncHTTPClient) -> None: ...

@overload
def __init__(self, *, openai_client: AsyncOpenAI | None = None) -> None: ...

def __init__(
self,
*,
api_key: str | None = None,
openai_client: AsyncOpenAI | None = None,
http_client: AsyncHTTPClient | None = None,
) -> None:
api_key = api_key or os.getenv('OPENROUTER_API_KEY')
if not api_key and openai_client is None:
raise UserError(
'Set the `OPENROUTER_API_KEY` environment variable or pass it via `OpenRouterProvider(api_key=...)`'
'to use the OpenRouter provider.'
)

if openai_client is not None:
self._client = openai_client
elif http_client is not None:
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)
else:
http_client = cached_async_http_client(provider='openrouter')
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)
5 changes: 5 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,11 @@ def mistral_api_key() -> str:
return os.getenv('MISTRAL_API_KEY', 'mock-api-key')


@pytest.fixture(scope='session')
def openrouter_api_key() -> str:
return os.getenv('OPENROUTER_API_KEY', 'mock-api-key')


@pytest.fixture(scope='session')
def bedrock_provider():
try:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
interactions:
- request:
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '164'
content-type:
- application/json
host:
- openrouter.ai
method: POST
parsed_body:
messages:
- content: Be helpful.
role: system
- content: Tell me a joke.
role: user
model: google/gemini-2.0-flash-exp:free
n: 1
stream: false
uri: https://openrouter.ai/api/v1/chat/completions
response:
headers:
access-control-allow-origin:
- '*'
connection:
- keep-alive
content-length:
- '242'
content-type:
- application/json
vary:
- Accept-Encoding
parsed_body:
error:
code: 429
message: Provider returned error
metadata:
provider_name: Google
raw: google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream; please retry shortly.
user_id: user_2uRh0l3Yi3hdjBArTOSmLXWJBc4
status:
code: 429
message: Too Many Requests
- request:
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '164'
content-type:
- application/json
host:
- openrouter.ai
method: POST
parsed_body:
messages:
- content: Be helpful.
role: system
- content: Tell me a joke.
role: user
model: google/gemini-2.0-flash-exp:free
n: 1
stream: false
uri: https://openrouter.ai/api/v1/chat/completions
response:
headers:
access-control-allow-origin:
- '*'
connection:
- keep-alive
content-length:
- '252'
content-type:
- application/json
vary:
- Accept-Encoding
parsed_body:
error:
code: 429
message: Provider returned error
metadata:
provider_name: Google AI Studio
raw: google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream; please retry shortly.
user_id: user_2uRh0l3Yi3hdjBArTOSmLXWJBc4
status:
code: 429
message: Too Many Requests
- request:
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '164'
content-type:
- application/json
host:
- openrouter.ai
method: POST
parsed_body:
messages:
- content: Be helpful.
role: system
- content: Tell me a joke.
role: user
model: google/gemini-2.0-flash-exp:free
n: 1
stream: false
uri: https://openrouter.ai/api/v1/chat/completions
response:
headers:
access-control-allow-origin:
- '*'
connection:
- keep-alive
content-length:
- '476'
content-type:
- application/json
transfer-encoding:
- chunked
vary:
- Accept-Encoding
parsed_body:
choices:
- finish_reason: stop
index: 0
logprobs: null
message:
content: "Why don't scientists trust atoms? \n\nBecause they make up everything!\n"
reasoning: null
refusal: null
role: assistant
native_finish_reason: STOP
created: 1747736401
id: gen-1747736401-niWjLHssb1xvyg7Ow2Nf
model: google/gemini-2.0-flash-exp:free
object: chat.completion
provider: Google
usage:
completion_tokens: 17
prompt_tokens: 8
total_tokens: 25
status:
code: 200
message: OK
version: 1
67 changes: 67 additions & 0 deletions tests/providers/test_openrouter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import re

import httpx
import pytest
from inline_snapshot import snapshot

from pydantic_ai.agent import Agent
from pydantic_ai.exceptions import UserError

from ..conftest import TestEnv, try_import

with try_import() as imports_successful:
import openai

from pydantic_ai.models.openai import OpenAIModel
from pydantic_ai.providers.openrouter import OpenRouterProvider


pytestmark = [
pytest.mark.skipif(not imports_successful(), reason='openai not installed'),
pytest.mark.vcr,
pytest.mark.anyio,
]


def test_openrouter_provider():
provider = OpenRouterProvider(api_key='api-key')
assert provider.name == 'openrouter'
assert provider.base_url == 'https://openrouter.ai/api/v1'
assert isinstance(provider.client, openai.AsyncOpenAI)
assert provider.client.api_key == 'api-key'


def test_openrouter_provider_need_api_key(env: TestEnv) -> None:
env.remove('OPENROUTER_API_KEY')
with pytest.raises(
UserError,
match=re.escape(
'Set the `OPENROUTER_API_KEY` environment variable or pass it via `OpenRouterProvider(api_key=...)`'
'to use the OpenRouter provider.'
),
):
OpenRouterProvider()


def test_openrouter_provider_pass_http_client() -> None:
http_client = httpx.AsyncClient()
provider = OpenRouterProvider(http_client=http_client, api_key='api-key')
assert provider.client._client == http_client # type: ignore[reportPrivateUsage]


def test_openrouter_pass_openai_client() -> None:
openai_client = openai.AsyncOpenAI(api_key='api-key')
provider = OpenRouterProvider(openai_client=openai_client)
assert provider.client == openai_client


async def test_openrouter_with_google_model(allow_model_requests: None, openrouter_api_key: str) -> None:
provider = OpenRouterProvider(api_key=openrouter_api_key)
model = OpenAIModel('google/gemini-2.0-flash-exp:free', provider=provider)
agent = Agent(model, instructions='Be helpful.')
response = await agent.run('Tell me a joke.')
assert response.output == snapshot("""\
Why don't scientists trust atoms? \n\

Because they make up everything!
""")
Comment on lines +58 to +67
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Supposedly, this would have failed...

2 changes: 2 additions & 0 deletions tests/providers/test_provider_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,13 @@
from pydantic_ai.providers.groq import GroqProvider
from pydantic_ai.providers.mistral import MistralProvider
from pydantic_ai.providers.openai import OpenAIProvider
from pydantic_ai.providers.openrouter import OpenRouterProvider

test_infer_provider_params = [
('anthropic', AnthropicProvider, 'ANTHROPIC_API_KEY'),
('cohere', CohereProvider, 'CO_API_KEY'),
('deepseek', DeepSeekProvider, 'DEEPSEEK_API_KEY'),
('openrouter', OpenRouterProvider, 'OPENROUTER_API_KEY'),
('openai', OpenAIProvider, 'OPENAI_API_KEY'),
('azure', AzureProvider, 'AZURE_OPENAI'),
('google-vertex', GoogleVertexProvider, None),
Expand Down