diff --git a/llm_openrouter.py b/llm_openrouter.py index 059754e..648450d 100644 --- a/llm_openrouter.py +++ b/llm_openrouter.py @@ -39,6 +39,11 @@ class ReasoningEffortEnum(str, Enum): high = "high" +class WebSearchEngineEnum(str, Enum): + exa = "exa" + native = "native" + + class _mixin: class Options(Chat.Options): online: Optional[bool] = Field( @@ -61,6 +66,10 @@ class Options(Chat.Options): description="Set to true to enable reasoning with default parameters", default=None, ) + web_search_engine: Optional[WebSearchEngineEnum] = Field( + description='Web search engine: "exa" or "native"', + default=None, + ) @field_validator("provider") def validate_provider(cls, provider): @@ -81,9 +90,12 @@ def build_kwargs(self, prompt, stream): kwargs.pop("reasoning_effort", None) kwargs.pop("reasoning_max_tokens", None) kwargs.pop("reasoning_enabled", None) + kwargs.pop("web_search_engine", None) extra_body = {} if prompt.options.online: extra_body["plugins"] = [{"id": "web"}] + if prompt.options.web_search_engine: + extra_body["plugins"] = [{"id": "web", "engine": prompt.options.web_search_engine}] if prompt.options.provider: extra_body["provider"] = prompt.options.provider reasoning = {} diff --git a/tests/test_llm_openrouter.py b/tests/test_llm_openrouter.py index 9f942ec..bb00f0c 100644 --- a/tests/test_llm_openrouter.py +++ b/tests/test_llm_openrouter.py @@ -3,6 +3,7 @@ from click.testing import CliRunner from inline_snapshot import snapshot from llm.cli import cli +from unittest.mock import MagicMock TINY_PNG = ( b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\xa6\x00\x00\x01\x1a" @@ -143,3 +144,35 @@ def llm_version() -> str: "model": "openai/gpt-4.1-mini", } ) + + +def test_web_search_engine_native(): + model = llm.get_model("openrouter/openai/gpt-5") + prompt = MagicMock() + prompt.options = model.Options(web_search_engine="native") + kwargs = model.build_kwargs(prompt, stream=False) + assert kwargs["extra_body"]["plugins"] == [{"id": "web", "engine": "native"}] + + +def test_web_search_engine_exa(): + model = llm.get_model("openrouter/openai/gpt-5") + prompt = MagicMock() + prompt.options = model.Options(web_search_engine="exa") + kwargs = model.build_kwargs(prompt, stream=False) + assert kwargs["extra_body"]["plugins"] == [{"id": "web", "engine": "exa"}] + + +def test_online(): + model = llm.get_model("openrouter/openai/gpt-5") + prompt = MagicMock() + prompt.options = model.Options(online=True) + kwargs = model.build_kwargs(prompt, stream=False) + assert kwargs["extra_body"]["plugins"] == [{"id": "web"}] + + +def test_online_and_web_search_engine_together(): + model = llm.get_model("openrouter/openai/gpt-5") + prompt = MagicMock() + prompt.options = model.Options(online=True, web_search_engine="native") + kwargs = model.build_kwargs(prompt, stream=False) + assert kwargs["extra_body"]["plugins"] == [{"id": "web", "engine": "native"}]