feat: auto-detect ollama models

This commit is contained in:
Dave
2025-12-25 12:21:58 +00:00
parent e229f2efa8
commit 6572f45422
5 changed files with 78 additions and 7 deletions

View File

@@ -18,6 +18,12 @@ pub struct ProviderConfig {
const MAX_TURNS: usize = 10;
#[tauri::command]
pub async fn get_ollama_models(base_url: Option<String>) -> Result<Vec<String>, String> {
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
OllamaProvider::get_models(&url).await
}
#[tauri::command]
pub async fn chat(
messages: Vec<Message>,