Skip to content

Commit 4050078

Browse files
authored
refactor: replace hardcoded thinking models with dynamic capability detection (#65)
* refactor: replace hardcoded thinking models with dynamic capability detection Remove THINKING_MODELS constant and implement dynamic model capability checking using ollama.show() to detect thinking mode support at runtime * chore: remove unneeded python version config * Bump version to 0.17.0
1 parent bb9215c commit 4050078

File tree

7 files changed

+364
-362
lines changed

7 files changed

+364
-362
lines changed

.python-version

Lines changed: 0 additions & 1 deletion
This file was deleted.

cli-package/pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "ollmcp"
3-
version = "0.16.0"
3+
version = "0.17.0"
44
description = "CLI for MCP Client for Ollama - An easy-to-use command for interacting with Ollama through MCP"
55
readme = "README.md"
66
requires-python = ">=3.10"
@@ -9,7 +9,7 @@ authors = [
99
{name = "Jonathan Löwenstern"}
1010
]
1111
dependencies = [
12-
"mcp-client-for-ollama==0.16.0"
12+
"mcp-client-for-ollama==0.17.0"
1313
]
1414

1515
[project.scripts]

mcp_client_for_ollama/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""MCP Client for Ollama package."""
22

3-
__version__ = "0.16.0"
3+
__version__ = "0.17.0"

mcp_client_for_ollama/client.py

Lines changed: 34 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from . import __version__
1717
from .config.manager import ConfigManager
1818
from .utils.version import check_for_updates
19-
from .utils.constants import DEFAULT_CLAUDE_CONFIG, DEFAULT_MODEL, DEFAULT_OLLAMA_HOST, THINKING_MODELS, DEFAULT_COMPLETION_STYLE
19+
from .utils.constants import DEFAULT_CLAUDE_CONFIG, DEFAULT_MODEL, DEFAULT_OLLAMA_HOST, DEFAULT_COMPLETION_STYLE
2020
from .server.connector import ServerConnector
2121
from .models.manager import ModelManager
2222
from .models.config_manager import ModelConfigManager
@@ -82,16 +82,25 @@ def display_current_model(self):
8282
"""Display the currently selected model"""
8383
self.model_manager.display_current_model()
8484

85-
def supports_thinking_mode(self) -> bool:
86-
"""Check if the current model supports thinking mode
85+
async def supports_thinking_mode(self) -> bool:
86+
"""Check if the current model supports thinking mode by checking its capabilities
8787
8888
Returns:
8989
bool: True if the current model supports thinking mode, False otherwise
9090
"""
91-
current_model = self.model_manager.get_current_model()
92-
# Check if the model name (before the colon) matches any thinking model
93-
model_base_name = current_model.split(":")[0]
94-
return model_base_name in THINKING_MODELS
91+
try:
92+
current_model = self.model_manager.get_current_model()
93+
# Query the model's capabilities using ollama.show()
94+
model_info = await self.ollama.show(current_model)
95+
96+
# Check if the model has 'thinking' capability
97+
if 'capabilities' in model_info and model_info['capabilities']:
98+
return 'thinking' in model_info['capabilities']
99+
100+
return False
101+
except Exception:
102+
# If we can't determine capabilities, assume no thinking support
103+
return False
95104

96105
async def select_model(self):
97106
"""Let the user select an Ollama model from the available ones"""
@@ -250,7 +259,7 @@ async def process_query(self, query: str) -> str:
250259
}
251260

252261
# Add thinking parameter if thinking mode is enabled and model supports it
253-
if self.supports_thinking_mode():
262+
if await self.supports_thinking_mode():
254263
chat_params["think"] = self.thinking_mode
255264

256265
# Initial Ollama API call with the query and available tools
@@ -325,7 +334,7 @@ async def process_query(self, query: str) -> str:
325334
}
326335

327336
# Add thinking parameter if thinking mode is enabled and model supports it
328-
if self.supports_thinking_mode():
337+
if await self.supports_thinking_mode():
329338
chat_params_followup["think"] = self.thinking_mode
330339

331340
stream = await self.ollama.chat(**chat_params_followup)
@@ -362,7 +371,7 @@ async def get_user_input(self, prompt_text: str = None) -> str:
362371
prompt_text = f"{model_name}"
363372

364373
# Add thinking indicator
365-
if self.thinking_mode and self.supports_thinking_mode():
374+
if self.thinking_mode and await self.supports_thinking_mode():
366375
prompt_text += "/show-thinking" if self.show_thinking else "/thinking"
367376

368377
# Add tool count
@@ -434,11 +443,11 @@ async def chat_loop(self):
434443
continue
435444

436445
if query.lower() in ['thinking-mode', 'tm']:
437-
self.toggle_thinking_mode()
446+
await self.toggle_thinking_mode()
438447
continue
439448

440449
if query.lower() in ['show-thinking', 'st']:
441-
self.toggle_show_thinking()
450+
await self.toggle_show_thinking()
442451
continue
443452

444453
if query.lower() in ['show-tool-execution', 'ste']:
@@ -544,7 +553,7 @@ def print_help(self):
544553
"[bold cyan]Model:[/bold cyan]\n"
545554
"• Type [bold]model[/bold] or [bold]m[/bold] to select a model\n"
546555
"• Type [bold]model-config[/bold] or [bold]mc[/bold] to configure system prompt and model parameters\n"
547-
f"• Type [bold]thinking-mode[/bold] or [bold]tm[/bold] to toggle thinking mode [{', '.join(THINKING_MODELS)}]\n"
556+
f"• Type [bold]thinking-mode[/bold] or [bold]tm[/bold] to toggle thinking mode\n"
548557
"• Type [bold]show-thinking[/bold] or [bold]st[/bold] to toggle thinking text visibility\n"
549558
"• Type [bold]show-metrics[/bold] or [bold]sm[/bold] to toggle performance metrics display\n\n"
550559

@@ -579,16 +588,15 @@ def toggle_context_retention(self):
579588
# Display current context stats
580589
self.display_context_stats()
581590

582-
def toggle_thinking_mode(self):
591+
async def toggle_thinking_mode(self):
583592
"""Toggle thinking mode on/off (only for supported models)"""
584-
if not self.supports_thinking_mode():
593+
if not await self.supports_thinking_mode():
585594
current_model = self.model_manager.get_current_model()
586595
model_base_name = current_model.split(":")[0]
587596
self.console.print(Panel(
588597
f"[bold red]Thinking mode is not supported for model '{model_base_name}'[/bold red]\n\n"
589-
f"Thinking mode is only available for these models:\n"
590-
+ "\n".join(f"• {model}" for model in THINKING_MODELS) +
591-
f"\n\nCurrent model: [yellow]{current_model}[/yellow]\n"
598+
f"Thinking mode is only available for models that have the 'thinking' capability.\n"
599+
f"\nCurrent model: [yellow]{current_model}[/yellow]\n"
592600
f"Use [bold cyan]model[/bold cyan] or [bold cyan]m[/bold cyan] to switch to a supported model.",
593601
title="Thinking Mode Not Available", border_style="red", expand=False
594602
))
@@ -603,7 +611,7 @@ def toggle_thinking_mode(self):
603611
else:
604612
self.console.print("[cyan]The model will now provide direct responses.[/cyan]")
605613

606-
def toggle_show_thinking(self):
614+
async def toggle_show_thinking(self):
607615
"""Toggle whether thinking text remains visible after completion"""
608616
if not self.thinking_mode:
609617
self.console.print(Panel(
@@ -614,13 +622,12 @@ def toggle_show_thinking(self):
614622
))
615623
return
616624

617-
if not self.supports_thinking_mode():
625+
if not await self.supports_thinking_mode():
618626
current_model = self.model_manager.get_current_model()
619627
model_base_name = current_model.split(":")[0]
620628
self.console.print(Panel(
621629
f"[bold red]Thinking mode is not supported for model '{model_base_name}'[/bold red]\n\n"
622-
f"This setting only applies to thinking-capable models:\n"
623-
+ "\n".join(f"• {model}" for model in THINKING_MODELS),
630+
f"This setting only applies to models that have the 'thinking' capability.",
624631
title="Show Thinking Not Available", border_style="red", expand=False
625632
))
626633
return
@@ -667,14 +674,13 @@ def display_context_stats(self):
667674
"""Display information about the current context window usage"""
668675
history_count = len(self.chat_history)
669676

670-
# Check if thinking mode is available for current model
677+
# For thinking status, show a simplified message. The user can check model capabilities by trying to enable thinking mode
671678
thinking_status = ""
672-
if self.supports_thinking_mode():
673-
thinking_status = f"Thinking mode: [{'green' if self.thinking_mode else 'red'}]{'Enabled' if self.thinking_mode else 'Disabled'}[/{'green' if self.thinking_mode else 'red'}]\n"
674-
if self.thinking_mode:
675-
thinking_status += f"Show thinking text: [{'green' if self.show_thinking else 'red'}]{'Visible' if self.show_thinking else 'Hidden'}[/{'green' if self.show_thinking else 'red'}]\n"
679+
if self.thinking_mode:
680+
thinking_status = f"Thinking mode: [green]Enabled[/green]\n"
681+
thinking_status += f"Show thinking text: [{'green' if self.show_thinking else 'red'}]{'Visible' if self.show_thinking else 'Hidden'}[/{'green' if self.show_thinking else 'red'}]\n"
676682
else:
677-
thinking_status = f"Thinking mode: [yellow]Not available for current model[/yellow]\n"
683+
thinking_status = f"Thinking mode: [red]Disabled[/red]\n"
678684

679685
self.console.print(Panel(
680686
f"Context retention: [{'green' if self.retain_context else 'red'}]{'Enabled' if self.retain_context else 'Disabled'}[/{'green' if self.retain_context else 'red'}]\n"

mcp_client_for_ollama/utils/constants.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,6 @@
2121
# URL for checking package updates on PyPI
2222
PYPI_PACKAGE_URL = "https://pypi.org/pypi/mcp-client-for-ollama/json"
2323

24-
# Thinking mode models - these models support the thinking parameter
25-
THINKING_MODELS = ["deepseek-r1", "qwen3"]
26-
2724
# Interactive commands and their descriptions for autocomplete
2825
INTERACTIVE_COMMANDS = {
2926
'tools': 'Configure available tools',

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "mcp-client-for-ollama"
3-
version = "0.16.0"
3+
version = "0.17.0"
44
description = "MCP Client for Ollama - A client for connecting to Model Context Protocol servers using Ollama"
55
readme = "README.md"
66
requires-python = ">=3.10"

0 commit comments

Comments
 (0)