feat(aisidebar): implement Ollama availability handling and graceful startup
- Add comprehensive Ollama connection error handling strategy - Implement OllamaClient with non-blocking initialization and connection checks - Create OllamaAvailabilityMonitor for periodic Ollama connection tracking - Update design and requirements to support graceful Ollama unavailability - Add new project structure for AI sidebar module with initial implementation - Enhance error handling to prevent application crashes when Ollama is not running - Prepare for future improvements in AI sidebar interaction and resilience
This commit is contained in:
192
aisidebar/chat_widget.py
Normal file
192
aisidebar/chat_widget.py
Normal file
@@ -0,0 +1,192 @@
|
||||
import threading
|
||||
from ignis import widgets, app
|
||||
from gi.repository import GLib
|
||||
from .ollama_client import OllamaClient
|
||||
from .conversation_manager import ConversationManager
|
||||
|
||||
|
||||
class ChatWidget(widgets.Box):
|
||||
"""Chat interface widget with Ollama integration"""
|
||||
|
||||
def __init__(self):
|
||||
self._conversation_manager = ConversationManager()
|
||||
self._ollama_client = OllamaClient()
|
||||
self._current_model = self._ollama_client.default_model
|
||||
|
||||
# Header with title and model
|
||||
header_title = widgets.Label(
|
||||
label="AI Sidebar",
|
||||
halign="start",
|
||||
css_classes=["title-2"],
|
||||
)
|
||||
|
||||
model_name = self._current_model or "No local model detected"
|
||||
|
||||
self._model_label = widgets.Label(
|
||||
label=f"Model: {model_name}",
|
||||
halign="start",
|
||||
css_classes=["dim-label"],
|
||||
)
|
||||
|
||||
header_box = widgets.Box(
|
||||
vertical=True,
|
||||
spacing=4,
|
||||
child=[header_title, self._model_label],
|
||||
)
|
||||
|
||||
# Message list
|
||||
self._message_list = widgets.Box(
|
||||
vertical=True,
|
||||
spacing=8,
|
||||
hexpand=True,
|
||||
vexpand=True,
|
||||
valign="start",
|
||||
)
|
||||
|
||||
# Scrolled window for messages
|
||||
self._scroller = widgets.Scroll(
|
||||
hexpand=True,
|
||||
vexpand=True,
|
||||
min_content_height=300,
|
||||
child=self._message_list,
|
||||
)
|
||||
|
||||
# Input entry
|
||||
self._entry = widgets.Entry(
|
||||
hexpand=True,
|
||||
placeholder_text="Ask a question…",
|
||||
on_accept=lambda x: self._on_submit(),
|
||||
)
|
||||
|
||||
# Send button
|
||||
self._send_button = widgets.Button(
|
||||
label="Send",
|
||||
on_click=lambda x: self._on_submit(),
|
||||
)
|
||||
|
||||
# Input box
|
||||
input_box = widgets.Box(
|
||||
spacing=8,
|
||||
hexpand=True,
|
||||
child=[self._entry, self._send_button],
|
||||
)
|
||||
|
||||
# Main container
|
||||
super().__init__(
|
||||
vertical=True,
|
||||
spacing=12,
|
||||
hexpand=True,
|
||||
vexpand=True,
|
||||
child=[header_box, self._scroller, input_box],
|
||||
css_classes=["ai-sidebar-content"],
|
||||
)
|
||||
|
||||
# Set margins
|
||||
self.set_margin_top(16)
|
||||
self.set_margin_bottom(16)
|
||||
self.set_margin_start(16)
|
||||
self.set_margin_end(16)
|
||||
|
||||
# Load initial messages
|
||||
self._populate_initial_messages()
|
||||
|
||||
def _populate_initial_messages(self):
|
||||
"""Load conversation history"""
|
||||
for message in self._conversation_manager.messages:
|
||||
self._append_message(message["role"], message["content"], persist=False)
|
||||
|
||||
if not self._conversation_manager.messages:
|
||||
self._append_message(
|
||||
"assistant",
|
||||
"Welcome! Ask a question to start a conversation.",
|
||||
persist=True,
|
||||
)
|
||||
|
||||
def _append_message(self, role: str, content: str, *, persist: bool = True):
|
||||
"""Add a message bubble to the chat"""
|
||||
label_prefix = "You" if role == "user" else "Assistant"
|
||||
|
||||
message_label = widgets.Label(
|
||||
label=f"{label_prefix}: {content}",
|
||||
halign="start",
|
||||
xalign=0.0,
|
||||
wrap=True,
|
||||
wrap_mode="word_char", # Fixed: use underscore not hyphen
|
||||
justify="left",
|
||||
)
|
||||
|
||||
self._message_list.append(message_label)
|
||||
self._scroll_to_bottom()
|
||||
|
||||
if persist and self._conversation_manager:
|
||||
self._conversation_manager.append_message(role, content)
|
||||
|
||||
def _scroll_to_bottom(self):
|
||||
"""Scroll to the latest message"""
|
||||
def _scroll():
|
||||
adjustment = self._scroller.get_vadjustment()
|
||||
if adjustment:
|
||||
adjustment.set_value(adjustment.get_upper() - adjustment.get_page_size())
|
||||
return False
|
||||
|
||||
GLib.idle_add(_scroll)
|
||||
|
||||
def _set_input_enabled(self, enabled: bool):
|
||||
"""Enable/disable input controls"""
|
||||
self._entry.set_sensitive(enabled)
|
||||
self._send_button.set_sensitive(enabled)
|
||||
|
||||
def _on_submit(self):
|
||||
"""Handle message submission"""
|
||||
text = self._entry.text.strip()
|
||||
if not text:
|
||||
return
|
||||
|
||||
self._entry.text = ""
|
||||
self._append_message("user", text, persist=True)
|
||||
self._request_response()
|
||||
|
||||
def _request_response(self):
|
||||
"""Request AI response in background thread"""
|
||||
model = self._current_model or self._ollama_client.default_model
|
||||
if not model:
|
||||
self._append_message(
|
||||
"assistant",
|
||||
"No Ollama models are available. Install a model to continue.",
|
||||
persist=True,
|
||||
)
|
||||
return
|
||||
|
||||
history = self._conversation_manager.chat_messages
|
||||
self._set_input_enabled(False)
|
||||
|
||||
def _worker(messages):
|
||||
response = self._ollama_client.chat(model=model, messages=list(messages))
|
||||
GLib.idle_add(self._handle_response, response, priority=GLib.PRIORITY_DEFAULT)
|
||||
|
||||
thread = threading.Thread(target=_worker, args=(history,), daemon=True)
|
||||
thread.start()
|
||||
|
||||
def _handle_response(self, response):
|
||||
"""Handle AI response"""
|
||||
self._set_input_enabled(True)
|
||||
|
||||
if not response:
|
||||
self._append_message(
|
||||
"assistant",
|
||||
"The model returned an empty response.",
|
||||
persist=True,
|
||||
)
|
||||
return False
|
||||
|
||||
role = response.get("role", "assistant")
|
||||
content = response.get("content") or ""
|
||||
if not content:
|
||||
content = "[No content received from Ollama]"
|
||||
|
||||
self._append_message(role, content, persist=True)
|
||||
return False
|
||||
|
||||
def focus_input(self):
|
||||
"""Focus the input entry"""
|
||||
self._entry.grab_focus()
|
||||
Reference in New Issue
Block a user