More smoothing, as they say
This commit is contained in:
@@ -1,27 +1,41 @@
|
||||
use crate::http::context::{AppContext, OpenApiResult, bad_request};
|
||||
use crate::http::payloads::ModelPayload;
|
||||
use crate::io::fs;
|
||||
use crate::llm::chat;
|
||||
use poem_openapi::{param::Query, payload::Json};
|
||||
use poem_openapi::{Object, OpenApi, param::Query, payload::Json};
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub async fn get_model_preference(ctx: &AppContext) -> OpenApiResult<Json<Option<String>>> {
|
||||
let result = fs::get_model_preference(ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(result))
|
||||
#[derive(Deserialize, Object)]
|
||||
struct ModelPayload {
|
||||
model: String,
|
||||
}
|
||||
|
||||
pub async fn set_model_preference(
|
||||
payload: Json<ModelPayload>,
|
||||
ctx: &AppContext,
|
||||
) -> OpenApiResult<Json<bool>> {
|
||||
fs::set_model_preference(payload.0.model, ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
pub struct ModelApi {
|
||||
pub ctx: Arc<AppContext>,
|
||||
}
|
||||
|
||||
pub async fn get_ollama_models(
|
||||
base_url: Query<Option<String>>,
|
||||
) -> OpenApiResult<Json<Vec<String>>> {
|
||||
let models = chat::get_ollama_models(base_url.0)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(models))
|
||||
#[OpenApi]
|
||||
impl ModelApi {
|
||||
#[oai(path = "/model", method = "get")]
|
||||
async fn get_model_preference(&self) -> OpenApiResult<Json<Option<String>>> {
|
||||
let result = fs::get_model_preference(self.ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[oai(path = "/model", method = "post")]
|
||||
async fn set_model_preference(&self, payload: Json<ModelPayload>) -> OpenApiResult<Json<bool>> {
|
||||
fs::set_model_preference(payload.0.model, self.ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
|
||||
#[oai(path = "/ollama/models", method = "get")]
|
||||
async fn get_ollama_models(
|
||||
&self,
|
||||
base_url: Query<Option<String>>,
|
||||
) -> OpenApiResult<Json<Vec<String>>> {
|
||||
let models = chat::get_ollama_models(base_url.0)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(models))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user