fix: return empty Ollama model list instead of error when Ollama unreachable
When Ollama is not running, /api/ollama/models now returns [] instead of HTTP 400. This prevents the UI from breaking when the Ollama service is unavailable. The frontend already handles an empty list gracefully. Updated the test to assert success with an empty list rather than an error. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -37,6 +37,7 @@ impl ModelApi {
|
||||
|
||||
/// Fetch available model names from an Ollama server.
|
||||
/// Optionally override the base URL via query string.
|
||||
/// Returns an empty list when Ollama is unreachable so the UI stays functional.
|
||||
#[oai(path = "/ollama/models", method = "get")]
|
||||
async fn get_ollama_models(
|
||||
&self,
|
||||
@@ -44,7 +45,7 @@ impl ModelApi {
|
||||
) -> OpenApiResult<Json<Vec<String>>> {
|
||||
let models = chat::get_ollama_models(base_url.0)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
.unwrap_or_default();
|
||||
Ok(Json(models))
|
||||
}
|
||||
}
|
||||
@@ -116,12 +117,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_ollama_models_returns_error_for_unreachable_url() {
|
||||
async fn get_ollama_models_returns_empty_list_for_unreachable_url() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let api = make_api(&dir);
|
||||
// Port 1 is reserved and should immediately refuse the connection.
|
||||
let base_url = Query(Some("http://127.0.0.1:1".to_string()));
|
||||
let result = api.get_ollama_models(base_url).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap().0, Vec::<String>::new());
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user