feat: auto-detect ollama models
This commit is contained in:
@@ -18,6 +18,12 @@ pub struct ProviderConfig {
|
||||
|
||||
const MAX_TURNS: usize = 10;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_ollama_models(base_url: Option<String>) -> Result<Vec<String>, String> {
|
||||
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
OllamaProvider::get_models(&url).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn chat(
|
||||
messages: Vec<Message>,
|
||||
|
||||
Reference in New Issue
Block a user