Files
storkit/server/src/http/model.rs

51 lines
1.5 KiB
Rust
Raw Normal View History

2026-02-16 16:24:21 +00:00
use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::io::fs;
use crate::llm::chat;
2026-02-16 16:50:50 +00:00
use poem_openapi::{Object, OpenApi, Tags, param::Query, payload::Json};
2026-02-16 16:35:25 +00:00
use serde::Deserialize;
use std::sync::Arc;
2026-02-16 16:24:21 +00:00
2026-02-16 16:50:50 +00:00
#[derive(Tags)]
enum ModelTags {
Model,
}
2026-02-16 16:35:25 +00:00
#[derive(Deserialize, Object)]
struct ModelPayload {
model: String,
2026-02-16 16:24:21 +00:00
}
2026-02-16 16:35:25 +00:00
pub struct ModelApi {
pub ctx: Arc<AppContext>,
2026-02-16 16:24:21 +00:00
}
2026-02-16 16:50:50 +00:00
#[OpenApi(tag = "ModelTags::Model")]
2026-02-16 16:35:25 +00:00
impl ModelApi {
2026-02-16 16:50:50 +00:00
/// Get the currently selected model preference, if any.
2026-02-16 16:35:25 +00:00
#[oai(path = "/model", method = "get")]
async fn get_model_preference(&self) -> OpenApiResult<Json<Option<String>>> {
let result = fs::get_model_preference(self.ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(result))
}
2026-02-16 16:50:50 +00:00
/// Persist the selected model preference.
2026-02-16 16:35:25 +00:00
#[oai(path = "/model", method = "post")]
async fn set_model_preference(&self, payload: Json<ModelPayload>) -> OpenApiResult<Json<bool>> {
fs::set_model_preference(payload.0.model, self.ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(true))
}
2026-02-16 16:50:50 +00:00
/// Fetch available model names from an Ollama server.
/// Optionally override the base URL via query string.
2026-02-16 16:35:25 +00:00
#[oai(path = "/ollama/models", method = "get")]
async fn get_ollama_models(
&self,
base_url: Query<Option<String>>,
) -> OpenApiResult<Json<Vec<String>>> {
let models = chat::get_ollama_models(base_url.0)
.await
.map_err(bad_request)?;
Ok(Json(models))
}
2026-02-16 16:24:21 +00:00
}