From d3fa8d940b25bf2cb9d737917905f2f9c2fd3f46 Mon Sep 17 00:00:00 2001 From: Dave Date: Thu, 25 Dec 2025 13:21:55 +0000 Subject: [PATCH] feat: persist model selection --- .../stories/09_system_prompt_persona.md | 18 ---------- src-tauri/src/commands/fs.rs | 26 ++++++++++++++ src-tauri/src/lib.rs | 2 ++ src/components/Chat.tsx | 34 +++++++++++++++---- 4 files changed, 56 insertions(+), 24 deletions(-) delete mode 100644 .living_spec/stories/09_system_prompt_persona.md diff --git a/.living_spec/stories/09_system_prompt_persona.md b/.living_spec/stories/09_system_prompt_persona.md deleted file mode 100644 index bbee091..0000000 --- a/.living_spec/stories/09_system_prompt_persona.md +++ /dev/null @@ -1,18 +0,0 @@ -# Story: System Prompt & Persona - -## User Story -**As a** User -**I want** the Agent to behave like a Senior Engineer and know exactly how to use its tools -**So that** it writes high-quality code and doesn't hallucinate capabilities or refuse to edit files. - -## Acceptance Criteria -* [ ] Backend: Define a robust System Prompt constant (likely in `src-tauri/src/llm/prompts.rs`). -* [ ] Content: The prompt should define: - * Role: "Senior Software Engineer / Agent". - * Tone: Professional, direct, no fluff. - * Tool usage instructions: "You have access to the local filesystem. Use `read_file` to inspect context before editing." - * Workflow: "When asked to implement a feature, read relevant files first, then write." -* [ ] Backend: Inject this system message at the *start* of every `chat` session sent to the Provider. - -## Out of Scope -* User-editable system prompts (future story). diff --git a/src-tauri/src/commands/fs.rs b/src-tauri/src/commands/fs.rs index 3469943..da50a8d 100644 --- a/src-tauri/src/commands/fs.rs +++ b/src-tauri/src/commands/fs.rs @@ -8,6 +8,7 @@ use tauri_plugin_store::StoreExt; const STORE_PATH: &str = "store.json"; const KEY_LAST_PROJECT: &str = "last_project_path"; +const KEY_SELECTED_MODEL: &str = "selected_model"; // ----------------------------------------------------------------------------- // Helper Functions @@ -125,6 +126,31 @@ pub async fn get_current_project( Ok(None) } +#[tauri::command] +pub async fn get_model_preference(app: AppHandle) -> Result, String> { + let store = app + .store(STORE_PATH) + .map_err(|e| format!("Failed to access store: {}", e))?; + + if let Some(val) = store.get(KEY_SELECTED_MODEL) { + if let Some(model) = val.as_str() { + return Ok(Some(model.to_string())); + } + } + Ok(None) +} + +#[tauri::command] +pub async fn set_model_preference(app: AppHandle, model: String) -> Result<(), String> { + let store = app + .store(STORE_PATH) + .map_err(|e| format!("Failed to access store: {}", e))?; + + store.set(KEY_SELECTED_MODEL, json!(model)); + let _ = store.save(); + Ok(()) +} + #[tauri::command] pub async fn read_file(path: String, state: State<'_, SessionState>) -> Result { let full_path = resolve_path(&state, &path)?; diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index daa7632..ccdfd0a 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -15,6 +15,8 @@ pub fn run() { commands::fs::open_project, commands::fs::close_project, commands::fs::get_current_project, + commands::fs::get_model_preference, + commands::fs::set_model_preference, commands::fs::read_file, commands::fs::write_file, commands::fs::list_directory, diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index f2b3f99..03cdf7d 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -20,12 +20,22 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) { useEffect(() => { invoke("get_ollama_models") - .then((models) => { + .then(async (models) => { if (models.length > 0) { setAvailableModels(models); - // If we have models and current one isn't valid, switch to first - if (!models.includes(model)) { - setModel(models[0]); + + // Check backend store for saved model + try { + const savedModel = await invoke( + "get_model_preference", + ); + if (savedModel && models.includes(savedModel)) { + setModel(savedModel); + } else if (!models.includes(model)) { + setModel(models[0]); + } + } catch (e) { + console.error(e); } } }) @@ -161,7 +171,13 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) { {availableModels.length > 0 ? ( setModel(e.target.value)} + onChange={(e) => { + const newModel = e.target.value; + setModel(newModel); + invoke("set_model_preference", { model: newModel }).catch( + console.error, + ); + }} placeholder="Model" style={{ padding: "6px 12px",