- Reorganize project structure and file locations - Add ReasoningController to manage model selection and reasoning mode - Update design and requirements for reasoning mode toggle - Implement model switching between Qwen3-4B-Instruct and Qwen3-4B-Thinking models - Remove deprecated files and consolidate project layout - Add new steering and specification documentation - Clean up and remove unnecessary files and directories - Prepare for enhanced AI sidebar functionality with more flexible model handling
128 lines
4.3 KiB
Python
128 lines
4.3 KiB
Python
"""Reasoning mode controller for managing AI reasoning preferences."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import json
|
|
import os
|
|
from dataclasses import dataclass, asdict
|
|
from pathlib import Path
|
|
from typing import Dict, Any
|
|
|
|
|
|
@dataclass
|
|
class PreferencesState:
|
|
"""User preferences for sidebar behavior."""
|
|
reasoning_enabled: bool = False
|
|
default_model: str | None = None
|
|
theme: str = "default"
|
|
|
|
|
|
class ReasoningController:
|
|
"""Manages reasoning mode state and model selection."""
|
|
|
|
# Model names for reasoning toggle
|
|
INSTRUCT_MODEL = "hf.co/unsloth/Qwen3-4B-Instruct-2507-GGUF:Q8_K_XL"
|
|
THINKING_MODEL = "hf.co/unsloth/Qwen3-4B-Thinking-2507-GGUF:Q8_K_XL"
|
|
|
|
def __init__(self):
|
|
"""Initialize the reasoning controller with preference persistence."""
|
|
# Set preference file path
|
|
config_dir = Path.home() / ".config" / "aisidebar"
|
|
config_dir.mkdir(parents=True, exist_ok=True)
|
|
self._preference_file = config_dir / "preferences.json"
|
|
|
|
# Load preferences from disk
|
|
self._preferences = self._load_preferences()
|
|
|
|
def is_enabled(self) -> bool:
|
|
"""Check if reasoning mode is active.
|
|
|
|
Returns:
|
|
True if reasoning mode is enabled, False otherwise
|
|
"""
|
|
return self._preferences.reasoning_enabled
|
|
|
|
def toggle(self) -> bool:
|
|
"""Toggle reasoning mode and persist preference.
|
|
|
|
Returns:
|
|
New state of reasoning mode (True if enabled, False if disabled)
|
|
"""
|
|
self._preferences.reasoning_enabled = not self._preferences.reasoning_enabled
|
|
self._save_preferences()
|
|
return self._preferences.reasoning_enabled
|
|
|
|
def get_model_name(self) -> str:
|
|
"""Return the appropriate model name based on reasoning mode.
|
|
|
|
Returns:
|
|
THINKING_MODEL if reasoning is enabled, INSTRUCT_MODEL otherwise
|
|
"""
|
|
return self.THINKING_MODEL if self._preferences.reasoning_enabled else self.INSTRUCT_MODEL
|
|
|
|
def get_model_options(self) -> Dict[str, Any]:
|
|
"""Return the optimal parameters for the current model.
|
|
|
|
Returns:
|
|
Dictionary of model-specific parameters
|
|
"""
|
|
if self._preferences.reasoning_enabled:
|
|
# Thinking model settings
|
|
return {
|
|
"temperature": 0.6,
|
|
"top_p": 0.95,
|
|
"top_k": 20,
|
|
"min_p": 0.0,
|
|
"num_predict": 32768, # Adequate output length
|
|
}
|
|
else:
|
|
# Instruct model settings
|
|
return {
|
|
"temperature": 0.7,
|
|
"top_p": 0.8,
|
|
"top_k": 20,
|
|
"min_p": 0.0,
|
|
"num_predict": 32768,
|
|
}
|
|
|
|
def _load_preferences(self) -> PreferencesState:
|
|
"""Load preferences from disk or create defaults.
|
|
|
|
Returns:
|
|
PreferencesState instance with loaded or default values
|
|
"""
|
|
if not self._preference_file.exists():
|
|
return PreferencesState()
|
|
|
|
try:
|
|
with self._preference_file.open("r", encoding="utf-8") as f:
|
|
data = json.load(f)
|
|
return PreferencesState(
|
|
reasoning_enabled=data.get("reasoning_enabled", False),
|
|
default_model=data.get("default_model"),
|
|
theme=data.get("theme", "default"),
|
|
)
|
|
except (json.JSONDecodeError, OSError):
|
|
# If file is corrupted or unreadable, return defaults
|
|
return PreferencesState()
|
|
|
|
def _save_preferences(self) -> None:
|
|
"""Persist preferences to disk atomically."""
|
|
try:
|
|
# Convert dataclass to dict
|
|
data = asdict(self._preferences)
|
|
|
|
# Write to temporary file first
|
|
temp_file = self._preference_file.with_suffix(".tmp")
|
|
with temp_file.open("w", encoding="utf-8") as f:
|
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
f.flush()
|
|
os.fsync(f.fileno())
|
|
|
|
# Atomic replace
|
|
os.replace(temp_file, self._preference_file)
|
|
except OSError:
|
|
# If save fails, continue without crashing
|
|
# Preferences will revert to previous state on next load
|
|
pass
|