feat: persist model selection
This commit is contained in:
@@ -1,18 +0,0 @@
|
||||
# Story: System Prompt & Persona
|
||||
|
||||
## User Story
|
||||
**As a** User
|
||||
**I want** the Agent to behave like a Senior Engineer and know exactly how to use its tools
|
||||
**So that** it writes high-quality code and doesn't hallucinate capabilities or refuse to edit files.
|
||||
|
||||
## Acceptance Criteria
|
||||
* [ ] Backend: Define a robust System Prompt constant (likely in `src-tauri/src/llm/prompts.rs`).
|
||||
* [ ] Content: The prompt should define:
|
||||
* Role: "Senior Software Engineer / Agent".
|
||||
* Tone: Professional, direct, no fluff.
|
||||
* Tool usage instructions: "You have access to the local filesystem. Use `read_file` to inspect context before editing."
|
||||
* Workflow: "When asked to implement a feature, read relevant files first, then write."
|
||||
* [ ] Backend: Inject this system message at the *start* of every `chat` session sent to the Provider.
|
||||
|
||||
## Out of Scope
|
||||
* User-editable system prompts (future story).
|
||||
@@ -8,6 +8,7 @@ use tauri_plugin_store::StoreExt;
|
||||
|
||||
const STORE_PATH: &str = "store.json";
|
||||
const KEY_LAST_PROJECT: &str = "last_project_path";
|
||||
const KEY_SELECTED_MODEL: &str = "selected_model";
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Helper Functions
|
||||
@@ -125,6 +126,31 @@ pub async fn get_current_project(
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_model_preference(app: AppHandle) -> Result<Option<String>, String> {
|
||||
let store = app
|
||||
.store(STORE_PATH)
|
||||
.map_err(|e| format!("Failed to access store: {}", e))?;
|
||||
|
||||
if let Some(val) = store.get(KEY_SELECTED_MODEL) {
|
||||
if let Some(model) = val.as_str() {
|
||||
return Ok(Some(model.to_string()));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_model_preference(app: AppHandle, model: String) -> Result<(), String> {
|
||||
let store = app
|
||||
.store(STORE_PATH)
|
||||
.map_err(|e| format!("Failed to access store: {}", e))?;
|
||||
|
||||
store.set(KEY_SELECTED_MODEL, json!(model));
|
||||
let _ = store.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn read_file(path: String, state: State<'_, SessionState>) -> Result<String, String> {
|
||||
let full_path = resolve_path(&state, &path)?;
|
||||
|
||||
@@ -15,6 +15,8 @@ pub fn run() {
|
||||
commands::fs::open_project,
|
||||
commands::fs::close_project,
|
||||
commands::fs::get_current_project,
|
||||
commands::fs::get_model_preference,
|
||||
commands::fs::set_model_preference,
|
||||
commands::fs::read_file,
|
||||
commands::fs::write_file,
|
||||
commands::fs::list_directory,
|
||||
|
||||
@@ -20,12 +20,22 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
|
||||
useEffect(() => {
|
||||
invoke<string[]>("get_ollama_models")
|
||||
.then((models) => {
|
||||
.then(async (models) => {
|
||||
if (models.length > 0) {
|
||||
setAvailableModels(models);
|
||||
// If we have models and current one isn't valid, switch to first
|
||||
if (!models.includes(model)) {
|
||||
setModel(models[0]);
|
||||
|
||||
// Check backend store for saved model
|
||||
try {
|
||||
const savedModel = await invoke<string | null>(
|
||||
"get_model_preference",
|
||||
);
|
||||
if (savedModel && models.includes(savedModel)) {
|
||||
setModel(savedModel);
|
||||
} else if (!models.includes(model)) {
|
||||
setModel(models[0]);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -161,7 +171,13 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
{availableModels.length > 0 ? (
|
||||
<select
|
||||
value={model}
|
||||
onChange={(e) => setModel(e.target.value)}
|
||||
onChange={(e) => {
|
||||
const newModel = e.target.value;
|
||||
setModel(newModel);
|
||||
invoke("set_model_preference", { model: newModel }).catch(
|
||||
console.error,
|
||||
);
|
||||
}}
|
||||
style={{
|
||||
padding: "6px 32px 6px 16px",
|
||||
borderRadius: "99px",
|
||||
@@ -188,7 +204,13 @@ export function Chat({ projectPath, onCloseProject }: ChatProps) {
|
||||
) : (
|
||||
<input
|
||||
value={model}
|
||||
onChange={(e) => setModel(e.target.value)}
|
||||
onChange={(e) => {
|
||||
const newModel = e.target.value;
|
||||
setModel(newModel);
|
||||
invoke("set_model_preference", { model: newModel }).catch(
|
||||
console.error,
|
||||
);
|
||||
}}
|
||||
placeholder="Model"
|
||||
style={{
|
||||
padding: "6px 12px",
|
||||
|
||||
Reference in New Issue
Block a user