chore: bootstrap project scaffold
This commit is contained in:
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
.venv/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
data/conversations/*.json
|
||||
data/conversations/index.json
|
||||
data/conversations/*.tmp
|
||||
36
AGENTS.md
Normal file
36
AGENTS.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## Project Structure & Module Organization
|
||||
- `main.py` wires the GTK `Application`; UI widgets live in `sidebar_window.py` and `message_widget.py`.
|
||||
- `ollama_client.py` wraps streaming calls and threading helpers so GTK stays responsive.
|
||||
- Conversation state persists through `conversation_manager.py` and JSON files under `data/conversations/`; keep writes atomic.
|
||||
- Shared settings belong in `config.py` and styles in `styles.css`; prefer adding focused modules over bloating these.
|
||||
- Tests mirror the source tree under `tests/`, with fixtures in `tests/fixtures/` for reusable transcripts and metadata.
|
||||
|
||||
## Build, Test, and Development Commands
|
||||
- `python -m venv .venv && source .venv/bin/activate` — creates and activates the project’s virtual environment.
|
||||
- `pip install -r requirements.txt` — installs GTK, Ollama, and tooling dependencies.
|
||||
- `python main.py` — launches the sidebar in development mode; pass `--mock-ollama` when iterating without a local model.
|
||||
- `pytest` — runs the full test suite; combine with `pytest -k "conversation"` for targeted checks.
|
||||
|
||||
## Coding Style & Naming Conventions
|
||||
- Use 4-space indentation and format with `black .`; avoid tab characters.
|
||||
- Lint via `ruff check .` and fix violations instead of adding ignores.
|
||||
- Files stay snake_case; classes use PascalCase; callbacks adopt verb-based snake_case (e.g., `handle_stream_chunk`).
|
||||
- Keep GTK signal handlers thin and delegate behavior to helpers or managers.
|
||||
|
||||
## Testing Guidelines
|
||||
- Prefer `pytest` parameterized cases for conversation flows; store golden transcripts in `tests/fixtures/responses/`.
|
||||
- Name tests `test_<module>_<behavior>` (e.g., `test_conversation_manager_persists_history`).
|
||||
- Cover threading boundaries by mocking Ollama responses and asserting GTK updates via `GLib.idle_add`.
|
||||
- Run `pytest --maxfail=1` before commits to catch regressions early.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
- Follow the Conventional Commit pattern (`feat:`, `fix:`, `refactor:`) to keep the changelog clean.
|
||||
- Keep commits scoped to a single concern and include tests or fixtures when changing behavior.
|
||||
- Pull requests should summarize user-facing changes, list manual test steps, and attach screenshots or recordings for UI tweaks.
|
||||
- Reference related issues with `Closes #ID` and call out follow-up tasks to keep planning clear.
|
||||
|
||||
## Agent Workflow Tips
|
||||
- When prototyping new UI flows, add exploratory scripts under `experiments/` and clean them up before merging.
|
||||
- Document new configuration toggles in `config.py` docstrings and echo them in the README so users can discover them easily.
|
||||
8
config.py
Normal file
8
config.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Configuration defaults and helpers for the AI sidebar."""
|
||||
|
||||
|
||||
class Config:
|
||||
"""Placeholder configuration container."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
raise NotImplementedError("Configuration logic not implemented yet.")
|
||||
8
conversation_manager.py
Normal file
8
conversation_manager.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Conversation state management and persistence helpers."""
|
||||
|
||||
|
||||
class ConversationManager:
|
||||
"""Placeholder conversation manager awaiting implementation."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
raise NotImplementedError("Conversation manager not implemented yet.")
|
||||
0
data/conversations/.gitkeep
Normal file
0
data/conversations/.gitkeep
Normal file
67
main.py
Normal file
67
main.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""Entry point for the AI sidebar application."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import gi
|
||||
|
||||
gi.require_version("Gtk", "4.0")
|
||||
from gi.repository import Gtk # noqa: E402
|
||||
|
||||
from sidebar_window import SidebarWindow # noqa: E402
|
||||
|
||||
HEADLESS_ENV_VAR = "AI_SIDEBAR_HEADLESS"
|
||||
_STARTUP_FAILED = False
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
"""Launch the GTK application and return the exit status."""
|
||||
args = argv or sys.argv
|
||||
|
||||
if os.environ.get(HEADLESS_ENV_VAR) == "1":
|
||||
print("Headless mode enabled; skipping GTK startup.")
|
||||
return 0
|
||||
|
||||
if not Gtk.init_check():
|
||||
print(
|
||||
"Failed to initialize GTK. Ensure a display server is available.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
|
||||
if not (os.environ.get("WAYLAND_DISPLAY") or os.environ.get("DISPLAY")):
|
||||
print(
|
||||
"No Wayland or X11 display detected. Launch this app inside a graphical session.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
|
||||
app = Gtk.Application(application_id="ai.sidebar")
|
||||
app.connect("activate", _on_activate)
|
||||
status = app.run(args)
|
||||
return 1 if _STARTUP_FAILED else status
|
||||
|
||||
|
||||
def _on_activate(app: Gtk.Application) -> None:
|
||||
"""Create and present the main sidebar window when the app activates."""
|
||||
try:
|
||||
window = SidebarWindow(application=app)
|
||||
except RuntimeError as exc:
|
||||
if "Gtk couldn't be initialized" in str(exc):
|
||||
print(
|
||||
"Failed to initialize GTK. Ensure a display server is available.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
global _STARTUP_FAILED
|
||||
_STARTUP_FAILED = True
|
||||
app.quit()
|
||||
return
|
||||
raise
|
||||
|
||||
window.present()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
8
message_widget.py
Normal file
8
message_widget.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""UI widgets for chat messages."""
|
||||
|
||||
|
||||
class MessageWidget:
|
||||
"""Placeholder message widget to be filled in with GTK constructs."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
raise NotImplementedError("Message widget not implemented yet.")
|
||||
8
ollama_client.py
Normal file
8
ollama_client.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""Client utilities for interacting with the Ollama API."""
|
||||
|
||||
|
||||
class OllamaClient:
|
||||
"""Placeholder client used until streaming integration is built."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
raise NotImplementedError("Ollama client not implemented yet.")
|
||||
16
pyproject.toml
Normal file
16
pyproject.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ["py313"]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py313"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I"]
|
||||
ignore = []
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "8.0"
|
||||
addopts = "-ra"
|
||||
testpaths = ["tests"]
|
||||
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
PyGObject
|
||||
ollama
|
||||
pytest
|
||||
ruff
|
||||
black
|
||||
640
sidebar-plan.md
Normal file
640
sidebar-plan.md
Normal file
@@ -0,0 +1,640 @@
|
||||
<img src="https://r2cdn.perplexity.ai/pplx-full-logo-primary-dark%402x.png" style="height:64px;margin-right:32px"/>
|
||||
|
||||
# AI Chat Sidebar Development Plan - Direct GTK4 Approach
|
||||
|
||||
Based on your comprehensive feasibility assessment and the gathered documentation, here's a production-ready development plan using **direct GTK4 + Python** instead of Ignis.
|
||||
|
||||
***
|
||||
|
||||
## Documentation Resources
|
||||
|
||||
### Core Documentation
|
||||
|
||||
1. **gtk4-layer-shell**: https://github.com/wmww/gtk4-layer-shell
|
||||
- Python examples in `examples/` directory
|
||||
- API documentation for layer positioning
|
||||
- Installation: `pacman -S gtk4-layer-shell` (Arch) or build from source
|
||||
2. **PyGObject Threading Guide**: https://pygobject.gnome.org/guide/threading.html
|
||||
- Essential patterns for `GLib.idle_add()` usage
|
||||
- Thread-safety guidelines for GTK operations
|
||||
3. **Ollama Python Library**: https://github.com/ollama/ollama-python
|
||||
- Installation: `pip install ollama`
|
||||
- Streaming chat API with `stream=True` parameter
|
||||
4. **Niri Configuration**: https://github.com/YaLTeR/niri/wiki/Configuration:-Introduction
|
||||
- KDL syntax guide
|
||||
- Layer-rules documentation
|
||||
- Live reload capabilities
|
||||
5. **Alpaca Reference Implementation**: https://github.com/Jeffser/Alpaca
|
||||
- Production GTK4 + Ollama patterns
|
||||
- Threading implementation examples
|
||||
- UI/UX patterns for chat interfaces
|
||||
|
||||
***
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
ai-sidebar/
|
||||
├── main.py # Entry point, Gtk.Application setup
|
||||
├── sidebar_window.py # Layer-shell window with GTK4
|
||||
├── ollama_client.py # Ollama API wrapper with threading
|
||||
├── message_widget.py # Individual message bubble
|
||||
├── conversation_manager.py # State management, persistence
|
||||
├── styles.css # GTK CSS styling
|
||||
├── config.py # User settings (model, API endpoint)
|
||||
└── data/
|
||||
└── conversations/ # XDG_DATA_HOME/ai-sidebar/
|
||||
├── index.json # Session index
|
||||
└── {uuid}.json # Individual conversations
|
||||
```
|
||||
|
||||
|
||||
***
|
||||
|
||||
## Development Phases
|
||||
|
||||
### **Phase 1: Minimal Proof-of-Concept (Days 1-2)**
|
||||
|
||||
**Objective**: Validate gtk4-layer-shell works with Niri and basic Ollama connectivity
|
||||
|
||||
#### Tasks:
|
||||
|
||||
1. **Install dependencies**:
|
||||
```bash
|
||||
# Arch Linux
|
||||
sudo pacman -S gtk4 gtk4-layer-shell python-gobject python-pip
|
||||
pip install ollama
|
||||
|
||||
# Clone examples
|
||||
git clone https://github.com/wmww/gtk4-layer-shell
|
||||
cd gtk4-layer-shell/examples
|
||||
# Study Python examples
|
||||
```
|
||||
|
||||
2. **Create minimal sidebar** (`minimal_poc.py`):
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
import gi
|
||||
gi.require_version('Gtk', '4.0')
|
||||
gi.require_version('Gtk4LayerShell', '1.0')
|
||||
from gi.repository import Gtk, Gtk4LayerShell, GLib
|
||||
import ollama
|
||||
import threading
|
||||
|
||||
class MinimalSidebar(Gtk.ApplicationWindow):
|
||||
def __init__(self, app):
|
||||
super().__init__(application=app, title="AI Sidebar")
|
||||
|
||||
# Initialize layer shell
|
||||
Gtk4LayerShell.init_for_window(self)
|
||||
Gtk4LayerShell.set_namespace(self, "ai-sidebar")
|
||||
Gtk4LayerShell.set_layer(self, Gtk4LayerShell.Layer.TOP)
|
||||
|
||||
# Anchor to left edge, full height
|
||||
Gtk4LayerShell.set_anchor(self, Gtk4LayerShell.Edge.LEFT, True)
|
||||
Gtk4LayerShell.set_anchor(self, Gtk4LayerShell.Edge.TOP, True)
|
||||
Gtk4LayerShell.set_anchor(self, Gtk4LayerShell.Edge.BOTTOM, True)
|
||||
Gtk4LayerShell.set_margin(self, Gtk4LayerShell.Edge.LEFT, 0)
|
||||
|
||||
# Request keyboard input
|
||||
Gtk4LayerShell.set_keyboard_mode(self,
|
||||
Gtk4LayerShell.KeyboardMode.ON_DEMAND)
|
||||
|
||||
# Set width
|
||||
self.set_default_size(350, -1)
|
||||
|
||||
# Build UI
|
||||
self.setup_ui()
|
||||
|
||||
def setup_ui(self):
|
||||
main_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=0)
|
||||
|
||||
# Chat display area
|
||||
self.textview = Gtk.TextView()
|
||||
self.textview.set_editable(False)
|
||||
self.textview.set_wrap_mode(Gtk.WrapMode.WORD)
|
||||
self.textview.set_margin_start(10)
|
||||
self.textview.set_margin_end(10)
|
||||
|
||||
scroll = Gtk.ScrolledWindow()
|
||||
scroll.set_child(self.textview)
|
||||
scroll.set_vexpand(True)
|
||||
scroll.set_hexpand(True)
|
||||
|
||||
# Input area
|
||||
input_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
|
||||
input_box.set_margin_start(10)
|
||||
input_box.set_margin_end(10)
|
||||
input_box.set_margin_top(5)
|
||||
input_box.set_margin_bottom(10)
|
||||
|
||||
self.entry = Gtk.Entry()
|
||||
self.entry.set_hexpand(True)
|
||||
self.entry.set_placeholder_text("Ask something...")
|
||||
self.entry.connect('activate', self.on_send)
|
||||
|
||||
send_btn = Gtk.Button(label="Send")
|
||||
send_btn.connect('clicked', self.on_send)
|
||||
|
||||
input_box.append(self.entry)
|
||||
input_box.append(send_btn)
|
||||
|
||||
main_box.append(scroll)
|
||||
main_box.append(input_box)
|
||||
|
||||
self.set_child(main_box)
|
||||
|
||||
def append_text(self, text):
|
||||
"""Thread-safe text append"""
|
||||
buffer = self.textview.get_buffer()
|
||||
end_iter = buffer.get_end_iter()
|
||||
buffer.insert(end_iter, text, -1)
|
||||
|
||||
# Auto-scroll to bottom
|
||||
mark = buffer.create_mark(None, end_iter, False)
|
||||
self.textview.scroll_to_mark(mark, 0.0, True, 0.0, 1.0)
|
||||
|
||||
def on_send(self, widget):
|
||||
prompt = self.entry.get_text().strip()
|
||||
if not prompt:
|
||||
return
|
||||
|
||||
self.entry.set_text("")
|
||||
GLib.idle_add(self.append_text, f"\n[You] {prompt}\n")
|
||||
|
||||
def worker():
|
||||
try:
|
||||
GLib.idle_add(self.append_text, "[AI] ")
|
||||
|
||||
# Stream response from Ollama
|
||||
stream = ollama.chat(
|
||||
model='llama3.2', # Use installed model
|
||||
messages=[{'role': 'user', 'content': prompt}],
|
||||
stream=True
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
content = chunk['message']['content']
|
||||
GLib.idle_add(self.append_text, content)
|
||||
|
||||
GLib.idle_add(self.append_text, "\n")
|
||||
|
||||
except Exception as e:
|
||||
GLib.idle_add(self.append_text, f"\n[Error] {str(e)}\n")
|
||||
|
||||
thread = threading.Thread(target=worker, daemon=True)
|
||||
thread.start()
|
||||
|
||||
class App(Gtk.Application):
|
||||
def do_activate(self):
|
||||
window = MinimalSidebar(self)
|
||||
window.present()
|
||||
|
||||
if __name__ == '__main__':
|
||||
app = App()
|
||||
app.run(None)
|
||||
```
|
||||
|
||||
3. **Test basic functionality**:
|
||||
```bash
|
||||
# Ensure Ollama is running with a model
|
||||
ollama pull llama3.2
|
||||
|
||||
# Run the sidebar
|
||||
python minimal_poc.py
|
||||
```
|
||||
|
||||
4. **Add Niri configuration** (`~/.config/niri/config.kdl`):
|
||||
```kdl
|
||||
layer-rule {
|
||||
match namespace="^ai-sidebar$"
|
||||
|
||||
// Optional: Add shadow
|
||||
shadow {
|
||||
on
|
||||
softness 40
|
||||
spread 5
|
||||
offset x=0 y=5
|
||||
}
|
||||
}
|
||||
|
||||
binds {
|
||||
// Toggle sidebar with Super+A
|
||||
Mod+A { spawn "python" "/path/to/minimal_poc.py"; }
|
||||
}
|
||||
```
|
||||
|
||||
**AI Coding Tool Prompt for Phase 1**:
|
||||
|
||||
```
|
||||
Create a minimal GTK4 application using gtk4-layer-shell that:
|
||||
1. Initializes layer-shell with namespace "ai-sidebar"
|
||||
2. Anchors to left edge, full height, 350px width
|
||||
3. Uses Gtk4LayerShell.Layer.TOP for z-order
|
||||
4. Contains a TextView (read-only) and Entry widget
|
||||
5. Connects Entry's 'activate' signal to send messages
|
||||
6. Uses threading.Thread + GLib.idle_add for Ollama streaming
|
||||
7. Calls ollama.chat() with stream=True in background thread
|
||||
8. Appends each chunk to TextView via GLib.idle_add
|
||||
|
||||
Requirements:
|
||||
- Python 3.11+, GTK4, gtk4-layer-shell, ollama-python
|
||||
- Thread-safe UI updates only via GLib.idle_add
|
||||
- Basic error handling for connection failures
|
||||
- Auto-scroll TextView to bottom after each message
|
||||
|
||||
Reference PyGObject threading guide patterns.
|
||||
```
|
||||
|
||||
|
||||
***
|
||||
|
||||
### **Phase 2: Production Features (Days 3-5)**
|
||||
|
||||
**Objective**: Add model selection, conversation persistence, better UI
|
||||
|
||||
#### Components:
|
||||
|
||||
1. **Ollama Client** (`ollama_client.py`):
|
||||
```python
|
||||
import ollama
|
||||
import threading
|
||||
from typing import Callable, Optional, List, Dict
|
||||
from gi.repository import GLib
|
||||
|
||||
class OllamaClient:
|
||||
def __init__(self):
|
||||
self.base_url = "http://localhost:11434"
|
||||
self.current_model = "llama3.2"
|
||||
self._cancel_event = threading.Event()
|
||||
|
||||
def get_models(self) -> List[str]:
|
||||
"""Get list of installed models"""
|
||||
try:
|
||||
models = ollama.list()
|
||||
return [m['name'] for m in models['models']]
|
||||
except Exception as e:
|
||||
print(f"Error fetching models: {e}")
|
||||
return []
|
||||
|
||||
def stream_chat(
|
||||
self,
|
||||
messages: List[Dict[str, str]],
|
||||
on_chunk: Callable[[str], None],
|
||||
on_complete: Callable[[], None],
|
||||
on_error: Callable[[str], None]
|
||||
):
|
||||
"""Stream chat response in background thread"""
|
||||
self._cancel_event.clear()
|
||||
|
||||
def worker():
|
||||
try:
|
||||
stream = ollama.chat(
|
||||
model=self.current_model,
|
||||
messages=messages,
|
||||
stream=True
|
||||
)
|
||||
|
||||
for chunk in stream:
|
||||
if self._cancel_event.is_set():
|
||||
break
|
||||
|
||||
content = chunk['message']['content']
|
||||
GLib.idle_add(on_chunk, content)
|
||||
|
||||
if not self._cancel_event.is_set():
|
||||
GLib.idle_add(on_complete)
|
||||
|
||||
except Exception as e:
|
||||
GLib.idle_add(on_error, str(e))
|
||||
|
||||
thread = threading.Thread(target=worker, daemon=True)
|
||||
thread.start()
|
||||
|
||||
def cancel(self):
|
||||
"""Cancel current streaming operation"""
|
||||
self._cancel_event.set()
|
||||
```
|
||||
|
||||
2. **Conversation Manager** (`conversation_manager.py`):
|
||||
```python
|
||||
import json
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Optional
|
||||
from datetime import datetime
|
||||
|
||||
class ConversationManager:
|
||||
def __init__(self):
|
||||
# XDG data directory
|
||||
self.data_dir = Path.home() / ".local/share/ai-sidebar/conversations"
|
||||
self.data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.index_file = self.data_dir / "index.json"
|
||||
self.current_session_id: Optional[str] = None
|
||||
self.messages: List[Dict[str, str]] = []
|
||||
|
||||
def new_session(self) -> str:
|
||||
"""Create new conversation session"""
|
||||
session_id = str(uuid.uuid4())
|
||||
self.current_session_id = session_id
|
||||
self.messages = []
|
||||
self._update_index(session_id)
|
||||
return session_id
|
||||
|
||||
def add_message(self, role: str, content: str):
|
||||
"""Add message to current session"""
|
||||
self.messages.append({
|
||||
"role": role,
|
||||
"content": content,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
})
|
||||
self._save_session()
|
||||
|
||||
def load_session(self, session_id: str) -> List[Dict]:
|
||||
"""Load conversation from file"""
|
||||
session_file = self.data_dir / f"{session_id}.json"
|
||||
if session_file.exists():
|
||||
with open(session_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
self.messages = data.get('messages', [])
|
||||
self.current_session_id = session_id
|
||||
return self.messages
|
||||
return []
|
||||
|
||||
def _save_session(self):
|
||||
"""Save current session to disk"""
|
||||
if not self.current_session_id:
|
||||
return
|
||||
|
||||
session_file = self.data_dir / f"{self.current_session_id}.json"
|
||||
with open(session_file, 'w') as f:
|
||||
json.dump({
|
||||
"session_id": self.current_session_id,
|
||||
"created": datetime.now().isoformat(),
|
||||
"messages": self.messages
|
||||
}, f, indent=2)
|
||||
|
||||
def _update_index(self, session_id: str):
|
||||
"""Update session index"""
|
||||
index = []
|
||||
if self.index_file.exists():
|
||||
with open(self.index_file, 'r') as f:
|
||||
index = json.load(f)
|
||||
|
||||
index.append({
|
||||
"id": session_id,
|
||||
"created": datetime.now().isoformat()
|
||||
})
|
||||
|
||||
with open(self.index_file, 'w') as f:
|
||||
json.dump(index, f, indent=2)
|
||||
```
|
||||
|
||||
3. **Model Selector Widget**:
|
||||
|
||||
Add to sidebar window:
|
||||
|
||||
```python
|
||||
def setup_header(self):
|
||||
header = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=10)
|
||||
header.set_margin_start(10)
|
||||
header.set_margin_end(10)
|
||||
header.set_margin_top(10)
|
||||
|
||||
# Model dropdown
|
||||
self.model_combo = Gtk.ComboBoxText()
|
||||
self.model_combo.set_hexpand(True)
|
||||
self.refresh_models()
|
||||
self.model_combo.connect('changed', self.on_model_changed)
|
||||
|
||||
# New chat button
|
||||
new_btn = Gtk.Button(label="New")
|
||||
new_btn.connect('clicked', self.on_new_chat)
|
||||
|
||||
header.append(self.model_combo)
|
||||
header.append(new_btn)
|
||||
|
||||
return header
|
||||
|
||||
def refresh_models(self):
|
||||
models = self.ollama_client.get_models()
|
||||
self.model_combo.remove_all()
|
||||
for model in models:
|
||||
self.model_combo.append_text(model)
|
||||
if models:
|
||||
self.model_combo.set_active(0)
|
||||
```
|
||||
|
||||
**AI Coding Tool Prompt for Phase 2**:
|
||||
|
||||
```
|
||||
Extend the minimal sidebar with:
|
||||
|
||||
1. OllamaClient class:
|
||||
- get_models() method calling ollama.list()
|
||||
- stream_chat() with threading.Event for cancellation
|
||||
- Callbacks: on_chunk, on_complete, on_error
|
||||
- Thread-safe via GLib.idle_add
|
||||
|
||||
2. ConversationManager class:
|
||||
- XDG data directory: ~/.local/share/ai-sidebar/conversations/
|
||||
- new_session() creates UUID, initializes messages list
|
||||
- add_message(role, content) appends and saves to JSON
|
||||
- load_session(id) loads from {uuid}.json file
|
||||
- Auto-save after each message with fsync
|
||||
|
||||
3. UI additions:
|
||||
- Header box with Gtk.ComboBoxText for model selection
|
||||
- "New Chat" button to clear conversation
|
||||
- Populate ComboBox from get_models()
|
||||
- Update ollama_client.current_model on selection change
|
||||
|
||||
Maintain thread safety and error handling patterns from Phase 1.
|
||||
```
|
||||
|
||||
|
||||
***
|
||||
|
||||
### **Phase 3: Polish \& Integration (Days 6-7)**
|
||||
|
||||
**Objective**: UI improvements, Niri integration, keyboard shortcuts
|
||||
|
||||
#### Tasks:
|
||||
|
||||
1. **CSS Styling** (`styles.css`):
|
||||
```css
|
||||
window {
|
||||
background-color: #1e1e2e;
|
||||
}
|
||||
|
||||
textview {
|
||||
background-color: #181825;
|
||||
color: #cdd6f4;
|
||||
font-family: monospace;
|
||||
font-size: 12pt;
|
||||
}
|
||||
|
||||
entry {
|
||||
background-color: #313244;
|
||||
color: #cdd6f4;
|
||||
border-radius: 8px;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
button {
|
||||
background-color: #89b4fa;
|
||||
color: #1e1e2e;
|
||||
border-radius: 8px;
|
||||
padding: 8px 16px;
|
||||
}
|
||||
```
|
||||
|
||||
Load in application:
|
||||
|
||||
```python
|
||||
css_provider = Gtk.CssProvider()
|
||||
css_provider.load_from_path('styles.css')
|
||||
Gtk.StyleContext.add_provider_for_display(
|
||||
self.get_display(),
|
||||
css_provider,
|
||||
Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION
|
||||
)
|
||||
```
|
||||
|
||||
2. **Keyboard Shortcuts**:
|
||||
```python
|
||||
def setup_keybindings(self):
|
||||
event_controller = Gtk.EventControllerKey()
|
||||
event_controller.connect('key-pressed', self.on_key_pressed)
|
||||
self.add_controller(event_controller)
|
||||
|
||||
def on_key_pressed(self, controller, keyval, keycode, state):
|
||||
# Escape to close
|
||||
if keyval == Gdk.KEY_Escape:
|
||||
self.close()
|
||||
return True
|
||||
|
||||
# Ctrl+N for new chat
|
||||
if (state & Gdk.ModifierType.CONTROL_MASK and
|
||||
keyval == Gdk.KEY_n):
|
||||
self.on_new_chat(None)
|
||||
return True
|
||||
|
||||
return False
|
||||
```
|
||||
|
||||
3. **Niri Toggle Script** (`toggle-sidebar.sh`):
|
||||
```bash
|
||||
#!/bin/bash
|
||||
PID=$(pgrep -f "python.*main.py")
|
||||
|
||||
if [ -z "$PID" ]; then
|
||||
# Start sidebar
|
||||
python /path/to/ai-sidebar/main.py &
|
||||
else
|
||||
# Kill sidebar
|
||||
kill $PID
|
||||
fi
|
||||
```
|
||||
|
||||
Update Niri config:
|
||||
|
||||
```kdl
|
||||
binds {
|
||||
Mod+A { spawn "bash" "/path/to/toggle-sidebar.sh"; }
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
***
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] Sidebar appears at left edge with correct dimensions
|
||||
- [ ] Layer-shell positioning works (stays on top, doesn't block clicks outside)
|
||||
- [ ] Keyboard input works in Entry widget
|
||||
- [ ] Messages stream smoothly from Ollama
|
||||
- [ ] Model selector populates with installed models
|
||||
- [ ] Model switching changes active model
|
||||
- [ ] New chat clears conversation
|
||||
- [ ] Conversations persist across restarts
|
||||
- [ ] Threading doesn't freeze UI
|
||||
- [ ] Cancel works (if implemented)
|
||||
- [ ] No memory leaks during extended use
|
||||
- [ ] Compatible with Exo shell (no namespace conflicts)
|
||||
- [ ] CSS styling applies correctly
|
||||
- [ ] Escape key closes sidebar
|
||||
- [ ] Toggle script works from Niri keybind
|
||||
|
||||
***
|
||||
|
||||
## Timeline Summary
|
||||
|
||||
| Phase | Duration | Deliverable |
|
||||
| :-- | :-- | :-- |
|
||||
| Phase 1: POC | 2 days | Working sidebar with basic chat |
|
||||
| Phase 2: Features | 3 days | Model selection, persistence, better UI |
|
||||
| Phase 3: Polish | 2 days | Styling, keybinds, Niri integration |
|
||||
| **Total** | **7 days** | **Production-ready sidebar** |
|
||||
|
||||
**Realistic estimate**: 10-14 days accounting for debugging and learning curve.
|
||||
|
||||
***
|
||||
|
||||
## Key Success Factors
|
||||
|
||||
1. **Start simple**: Phase 1 POC validates everything works before investing time
|
||||
2. **Reference Alpaca**: Study their threading patterns and UI implementations
|
||||
3. **Test incrementally**: Each feature works before moving to next
|
||||
4. **Use AI tools effectively**: Break prompts into discrete components
|
||||
5. **Follow PyGObject patterns**: Threading via `GLib.idle_add()` is critical
|
||||
|
||||
This plan avoids Ignis instability while achieving your goal with mature, well-documented technologies.[^1][^2][^3]
|
||||
<span style="display:none">[^10][^11][^12][^13][^14][^15][^16][^17][^18][^19][^20][^21][^22][^4][^5][^6][^7][^8][^9]</span>
|
||||
|
||||
<div align="center">⁂</div>
|
||||
|
||||
[^1]: https://pygobject.gnome.org/guide/threading.html
|
||||
|
||||
[^2]: https://github.com/wmww/gtk4-layer-shell
|
||||
|
||||
[^3]: https://github.com/Jeffser/Alpaca
|
||||
|
||||
[^4]: https://stackoverflow.com/questions/73665239/implementing-threading-in-a-python-gtk-application-pygobject-to-prevent-ui-fre
|
||||
|
||||
[^5]: https://gitlab.gnome.org/GNOME/pygobject/-/blob/3.49.0/docs/guide/threading.rst
|
||||
|
||||
[^6]: https://discourse.gnome.org/t/gtk-threading-problem-with-glib-idle-add/13597
|
||||
|
||||
[^7]: https://gist.github.com/bossjones/e21b53c6dff04e8fdb3d
|
||||
|
||||
[^8]: https://dunkelstern.de/articles/2025-01-24/index.html
|
||||
|
||||
[^9]: https://www.glukhov.org/post/2025/10/ollama-python-examples/
|
||||
|
||||
[^10]: https://www.reddit.com/r/learnpython/comments/fa9612/pygtk_glade_threading/
|
||||
|
||||
[^11]: https://github.com/YaLTeR/niri/wiki/Configuration:-Introduction
|
||||
|
||||
[^12]: https://github.com/ollama/ollama-python
|
||||
|
||||
[^13]: https://pygtk.daa.com.narkive.com/QpH3Y5ky/idle-add-vs-threads-enter-threads-leave
|
||||
|
||||
[^14]: https://github.com/YaLTeR/niri/discussions/674
|
||||
|
||||
[^15]: https://ollama.com/blog/streaming-tool
|
||||
|
||||
[^16]: https://docs.gtk.org/glib/func.idle_add.html
|
||||
|
||||
[^17]: https://yalter.github.io/niri/Configuration:-Window-Rules.html
|
||||
|
||||
[^18]: https://www.cohorte.co/blog/using-ollama-with-python-step-by-step-guide
|
||||
|
||||
[^19]: https://gnulinux.ch/ein-kleines-gtk4-programm-in-python
|
||||
|
||||
[^20]: https://yalter.github.io/niri/Getting-Started.html
|
||||
|
||||
[^21]: https://www.reddit.com/r/Python/comments/1ael05l/ollama_python_library_chat_method_system_message/
|
||||
|
||||
[^22]: https://git.yaroslavps.com/configs/swayrice/tree/dotfiles/.config/niri/config.kdl?id=dd00aee82134d4f1b41463c5371f1ee943a9ec7a
|
||||
37
sidebar_window.py
Normal file
37
sidebar_window.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""GTK sidebar window definitions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import gi
|
||||
|
||||
gi.require_version("Gtk", "4.0")
|
||||
from gi.repository import Gtk # noqa: E402
|
||||
|
||||
|
||||
class SidebarWindow(Gtk.ApplicationWindow):
|
||||
"""Minimal window placeholder to confirm the GTK application starts."""
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
self.set_default_size(360, 640)
|
||||
self.set_title("Niri AI Sidebar")
|
||||
|
||||
layout = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=12)
|
||||
layout.set_margin_top(24)
|
||||
layout.set_margin_bottom(24)
|
||||
layout.set_margin_start(24)
|
||||
layout.set_margin_end(24)
|
||||
|
||||
title = Gtk.Label(label="AI Sidebar")
|
||||
title.set_halign(Gtk.Align.START)
|
||||
title.get_style_context().add_class("title-1")
|
||||
|
||||
message = Gtk.Label(
|
||||
label="GTK app is running. Replace this view with the chat interface."
|
||||
)
|
||||
message.set_wrap(True)
|
||||
message.set_halign(Gtk.Align.START)
|
||||
|
||||
layout.append(title)
|
||||
layout.append(message)
|
||||
self.set_child(layout)
|
||||
5
styles.css
Normal file
5
styles.css
Normal file
@@ -0,0 +1,5 @@
|
||||
/* Global styles placeholder for the AI sidebar. */
|
||||
|
||||
body {
|
||||
/* Replace with GTK CSS once widgets are implemented. */
|
||||
}
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
25
tests/test_smoke.py
Normal file
25
tests/test_smoke.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def test_main_runs_headless() -> None:
|
||||
repo_root = Path(__file__).resolve().parents[1]
|
||||
env = os.environ.copy()
|
||||
env["AI_SIDEBAR_HEADLESS"] = "1"
|
||||
|
||||
result = subprocess.run(
|
||||
[sys.executable, "main.py"],
|
||||
cwd=repo_root,
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
|
||||
assert result.returncode == 0
|
||||
assert "Headless mode enabled" in result.stdout
|
||||
Reference in New Issue
Block a user