More smoothing, as they say

This commit is contained in:
Dave
2026-02-16 16:35:25 +00:00
parent 5923165fcf
commit f76376b203
10 changed files with 256 additions and 265 deletions

View File

@@ -1,18 +1,40 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::ApiKeyPayload; use crate::llm::chat;
use crate::llm; use poem_openapi::{Object, OpenApi, payload::Json};
use poem_openapi::payload::Json; use serde::Deserialize;
use std::sync::Arc;
pub async fn get_anthropic_api_key_exists(ctx: &AppContext) -> OpenApiResult<Json<bool>> { #[derive(Deserialize, Object)]
struct ApiKeyPayload {
api_key: String,
}
pub struct AnthropicApi {
ctx: Arc<AppContext>,
}
impl AnthropicApi {
pub fn new(ctx: Arc<AppContext>) -> Self {
Self { ctx }
}
}
#[OpenApi]
impl AnthropicApi {
#[oai(path = "/anthropic/key/exists", method = "get")]
async fn get_anthropic_api_key_exists(&self) -> OpenApiResult<Json<bool>> {
let exists = let exists =
llm::chat::get_anthropic_api_key_exists(ctx.store.as_ref()).map_err(bad_request)?; chat::get_anthropic_api_key_exists(self.ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(exists)) Ok(Json(exists))
} }
pub async fn set_anthropic_api_key( #[oai(path = "/anthropic/key", method = "post")]
async fn set_anthropic_api_key(
&self,
payload: Json<ApiKeyPayload>, payload: Json<ApiKeyPayload>,
ctx: &AppContext, ) -> OpenApiResult<Json<bool>> {
) -> OpenApiResult<Json<bool>> { chat::set_anthropic_api_key(self.ctx.store.as_ref(), payload.0.api_key)
llm::chat::set_anthropic_api_key(ctx.store.as_ref(), payload.0.api_key).map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(true)) Ok(Json(true))
}
} }

View File

@@ -1,8 +1,17 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::llm::chat; use crate::llm::chat;
use poem_openapi::payload::Json; use poem_openapi::{OpenApi, payload::Json};
use std::sync::Arc;
pub async fn cancel_chat(ctx: &AppContext) -> OpenApiResult<Json<bool>> { pub struct ChatApi {
chat::cancel_chat(&ctx.state).map_err(bad_request)?; pub ctx: Arc<AppContext>,
Ok(Json(true)) }
#[OpenApi]
impl ChatApi {
#[oai(path = "/chat/cancel", method = "post")]
async fn cancel_chat(&self) -> OpenApiResult<Json<bool>> {
chat::cancel_chat(&self.ctx.state).map_err(bad_request)?;
Ok(Json(true))
}
} }

View File

@@ -1,34 +1,50 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::{FilePathPayload, WriteFilePayload};
use crate::io::fs; use crate::io::fs;
use poem_openapi::payload::Json; use poem_openapi::{Object, OpenApi, payload::Json};
use serde::Deserialize;
use std::sync::Arc;
pub async fn read_file( #[derive(Deserialize, Object)]
payload: Json<FilePathPayload>, struct FilePathPayload {
ctx: &AppContext, pub path: String,
) -> OpenApiResult<Json<String>> { }
let content = fs::read_file(payload.0.path, &ctx.state)
#[derive(Deserialize, Object)]
struct WriteFilePayload {
pub path: String,
pub content: String,
}
pub struct FsApi {
pub ctx: Arc<AppContext>,
}
#[OpenApi]
impl FsApi {
#[oai(path = "/fs/read", method = "post")]
async fn read_file(&self, payload: Json<FilePathPayload>) -> OpenApiResult<Json<String>> {
let content = fs::read_file(payload.0.path, &self.ctx.state)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(content)) Ok(Json(content))
} }
pub async fn write_file( #[oai(path = "/fs/write", method = "post")]
payload: Json<WriteFilePayload>, async fn write_file(&self, payload: Json<WriteFilePayload>) -> OpenApiResult<Json<bool>> {
ctx: &AppContext, fs::write_file(payload.0.path, payload.0.content, &self.ctx.state)
) -> OpenApiResult<Json<bool>> {
fs::write_file(payload.0.path, payload.0.content, &ctx.state)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(true)) Ok(Json(true))
} }
pub async fn list_directory( #[oai(path = "/fs/list", method = "post")]
async fn list_directory(
&self,
payload: Json<FilePathPayload>, payload: Json<FilePathPayload>,
ctx: &AppContext, ) -> OpenApiResult<Json<Vec<fs::FileEntry>>> {
) -> OpenApiResult<Json<Vec<fs::FileEntry>>> { let entries = fs::list_directory(payload.0.path, &self.ctx.state)
let entries = fs::list_directory(payload.0.path, &ctx.state)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(entries)) Ok(Json(entries))
}
} }

View File

@@ -5,17 +5,24 @@ pub mod context;
pub mod fs; pub mod fs;
pub mod health; pub mod health;
pub mod model; pub mod model;
pub mod payloads;
pub mod project; pub mod project;
pub mod rest;
pub mod search; pub mod search;
pub mod shell; pub mod shell;
pub mod ws; pub mod ws;
use crate::http::context::AppContext; use anthropic::AnthropicApi;
use crate::http::rest::build_openapi_service; use chat::ChatApi;
use context::AppContext;
use fs::FsApi;
use model::ModelApi;
use poem::EndpointExt; use poem::EndpointExt;
use poem::{Route, get}; use poem::{Route, get};
use poem_openapi::OpenApiService;
use project::ProjectApi;
use search::SearchApi;
use shell::ShellApi;
use std::sync::Arc;
pub fn build_routes(ctx: AppContext) -> impl poem::Endpoint { pub fn build_routes(ctx: AppContext) -> impl poem::Endpoint {
let ctx_arc = std::sync::Arc::new(ctx); let ctx_arc = std::sync::Arc::new(ctx);
@@ -32,3 +39,45 @@ pub fn build_routes(ctx: AppContext) -> impl poem::Endpoint {
.at("/*path", get(assets::embedded_file)) .at("/*path", get(assets::embedded_file))
.data(ctx_arc) .data(ctx_arc)
} }
type ApiTuple = (
ProjectApi,
ModelApi,
AnthropicApi,
FsApi,
SearchApi,
ShellApi,
ChatApi,
);
type ApiService = OpenApiService<ApiTuple, ()>;
pub fn build_openapi_service(ctx: Arc<AppContext>) -> (ApiService, ApiService) {
let api = (
ProjectApi { ctx: ctx.clone() },
ModelApi { ctx: ctx.clone() },
AnthropicApi::new(ctx.clone()),
FsApi { ctx: ctx.clone() },
SearchApi { ctx: ctx.clone() },
ShellApi { ctx: ctx.clone() },
ChatApi { ctx: ctx.clone() },
);
let api_service =
OpenApiService::new(api, "Story Kit API", "1.0").server("http://127.0.0.1:3001/api");
let docs_api = (
ProjectApi { ctx: ctx.clone() },
ModelApi { ctx: ctx.clone() },
AnthropicApi::new(ctx.clone()),
FsApi { ctx: ctx.clone() },
SearchApi { ctx: ctx.clone() },
ShellApi { ctx: ctx.clone() },
ChatApi { ctx },
);
let docs_service =
OpenApiService::new(docs_api, "Story Kit API", "1.0").server("http://127.0.0.1:3001/api");
(api_service, docs_service)
}

View File

@@ -1,27 +1,41 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::ModelPayload;
use crate::io::fs; use crate::io::fs;
use crate::llm::chat; use crate::llm::chat;
use poem_openapi::{param::Query, payload::Json}; use poem_openapi::{Object, OpenApi, param::Query, payload::Json};
use serde::Deserialize;
use std::sync::Arc;
pub async fn get_model_preference(ctx: &AppContext) -> OpenApiResult<Json<Option<String>>> { #[derive(Deserialize, Object)]
let result = fs::get_model_preference(ctx.store.as_ref()).map_err(bad_request)?; struct ModelPayload {
model: String,
}
pub struct ModelApi {
pub ctx: Arc<AppContext>,
}
#[OpenApi]
impl ModelApi {
#[oai(path = "/model", method = "get")]
async fn get_model_preference(&self) -> OpenApiResult<Json<Option<String>>> {
let result = fs::get_model_preference(self.ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(result)) Ok(Json(result))
} }
pub async fn set_model_preference( #[oai(path = "/model", method = "post")]
payload: Json<ModelPayload>, async fn set_model_preference(&self, payload: Json<ModelPayload>) -> OpenApiResult<Json<bool>> {
ctx: &AppContext, fs::set_model_preference(payload.0.model, self.ctx.store.as_ref()).map_err(bad_request)?;
) -> OpenApiResult<Json<bool>> {
fs::set_model_preference(payload.0.model, ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(true)) Ok(Json(true))
} }
pub async fn get_ollama_models( #[oai(path = "/ollama/models", method = "get")]
async fn get_ollama_models(
&self,
base_url: Query<Option<String>>, base_url: Query<Option<String>>,
) -> OpenApiResult<Json<Vec<String>>> { ) -> OpenApiResult<Json<Vec<String>>> {
let models = chat::get_ollama_models(base_url.0) let models = chat::get_ollama_models(base_url.0)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(models)) Ok(Json(models))
}
} }

View File

@@ -1,39 +0,0 @@
use poem_openapi::Object;
use serde::Deserialize;
#[derive(Deserialize, Object)]
pub struct PathPayload {
pub path: String,
}
#[derive(Deserialize, Object)]
pub struct ModelPayload {
pub model: String,
}
#[derive(Deserialize, Object)]
pub struct ApiKeyPayload {
pub api_key: String,
}
#[derive(Deserialize, Object)]
pub struct FilePathPayload {
pub path: String,
}
#[derive(Deserialize, Object)]
pub struct WriteFilePayload {
pub path: String,
pub content: String,
}
#[derive(Deserialize, Object)]
pub struct SearchPayload {
pub query: String,
}
#[derive(Deserialize, Object)]
pub struct ExecShellPayload {
pub command: String,
pub args: Vec<String>,
}

View File

@@ -1,24 +1,38 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::PathPayload;
use crate::io::fs; use crate::io::fs;
use poem_openapi::payload::Json; use poem_openapi::{Object, OpenApi, payload::Json};
use serde::Deserialize;
use std::sync::Arc;
pub async fn get_current_project(ctx: &AppContext) -> OpenApiResult<Json<Option<String>>> { #[derive(Deserialize, Object)]
let result = fs::get_current_project(&ctx.state, ctx.store.as_ref()).map_err(bad_request)?; struct PathPayload {
Ok(Json(result)) path: String,
} }
pub async fn open_project( pub struct ProjectApi {
payload: Json<PathPayload>, pub ctx: Arc<AppContext>,
ctx: &AppContext, }
) -> OpenApiResult<Json<String>> {
let confirmed = fs::open_project(payload.0.path, &ctx.state, ctx.store.as_ref()) #[OpenApi]
impl ProjectApi {
#[oai(path = "/project", method = "get")]
async fn get_current_project(&self) -> OpenApiResult<Json<Option<String>>> {
let result = fs::get_current_project(&self.ctx.state, self.ctx.store.as_ref())
.map_err(bad_request)?;
Ok(Json(result))
}
#[oai(path = "/project", method = "post")]
async fn open_project(&self, payload: Json<PathPayload>) -> OpenApiResult<Json<String>> {
let confirmed = fs::open_project(payload.0.path, &self.ctx.state, self.ctx.store.as_ref())
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(confirmed)) Ok(Json(confirmed))
} }
pub async fn close_project(ctx: &AppContext) -> OpenApiResult<Json<bool>> { #[oai(path = "/project", method = "delete")]
fs::close_project(&ctx.state, ctx.store.as_ref()).map_err(bad_request)?; async fn close_project(&self) -> OpenApiResult<Json<bool>> {
fs::close_project(&self.ctx.state, self.ctx.store.as_ref()).map_err(bad_request)?;
Ok(Json(true)) Ok(Json(true))
}
} }

View File

@@ -1,125 +0,0 @@
use crate::http::context::{AppContext, OpenApiResult};
use crate::http::payloads::{
ApiKeyPayload, ExecShellPayload, FilePathPayload, ModelPayload, PathPayload, SearchPayload,
WriteFilePayload,
};
use crate::http::{anthropic, chat as chat_http, fs as fs_http, model, project, search, shell};
use poem_openapi::{OpenApi, OpenApiService, param::Query, payload::Json};
use std::sync::Arc;
pub struct Api {
ctx: Arc<AppContext>,
}
#[OpenApi]
impl Api {
#[oai(path = "/project", method = "get")]
async fn get_current_project(&self) -> OpenApiResult<Json<Option<String>>> {
let ctx = self.ctx.clone();
project::get_current_project(&ctx).await
}
#[oai(path = "/project", method = "post")]
async fn open_project(&self, payload: Json<PathPayload>) -> OpenApiResult<Json<String>> {
let ctx = self.ctx.clone();
project::open_project(payload, &ctx).await
}
#[oai(path = "/project", method = "delete")]
async fn close_project(&self) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
project::close_project(&ctx).await
}
#[oai(path = "/model", method = "get")]
async fn get_model_preference(&self) -> OpenApiResult<Json<Option<String>>> {
let ctx = self.ctx.clone();
model::get_model_preference(&ctx).await
}
#[oai(path = "/model", method = "post")]
async fn set_model_preference(&self, payload: Json<ModelPayload>) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
model::set_model_preference(payload, &ctx).await
}
#[oai(path = "/ollama/models", method = "get")]
async fn get_ollama_models(
&self,
base_url: Query<Option<String>>,
) -> OpenApiResult<Json<Vec<String>>> {
model::get_ollama_models(base_url).await
}
#[oai(path = "/anthropic/key/exists", method = "get")]
async fn get_anthropic_api_key_exists(&self) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
anthropic::get_anthropic_api_key_exists(&ctx).await
}
#[oai(path = "/anthropic/key", method = "post")]
async fn set_anthropic_api_key(
&self,
payload: Json<ApiKeyPayload>,
) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
anthropic::set_anthropic_api_key(payload, &ctx).await
}
#[oai(path = "/fs/read", method = "post")]
async fn read_file(&self, payload: Json<FilePathPayload>) -> OpenApiResult<Json<String>> {
let ctx = self.ctx.clone();
fs_http::read_file(payload, &ctx).await
}
#[oai(path = "/fs/write", method = "post")]
async fn write_file(&self, payload: Json<WriteFilePayload>) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
fs_http::write_file(payload, &ctx).await
}
#[oai(path = "/fs/list", method = "post")]
async fn list_directory(
&self,
payload: Json<FilePathPayload>,
) -> OpenApiResult<Json<Vec<crate::io::fs::FileEntry>>> {
let ctx = self.ctx.clone();
fs_http::list_directory(payload, &ctx).await
}
#[oai(path = "/fs/search", method = "post")]
async fn search_files(
&self,
payload: Json<SearchPayload>,
) -> OpenApiResult<Json<Vec<crate::io::search::SearchResult>>> {
let ctx = self.ctx.clone();
search::search_files(payload, &ctx).await
}
#[oai(path = "/shell/exec", method = "post")]
async fn exec_shell(
&self,
payload: Json<ExecShellPayload>,
) -> OpenApiResult<Json<crate::io::shell::CommandOutput>> {
let ctx = self.ctx.clone();
shell::exec_shell(payload, &ctx).await
}
#[oai(path = "/chat/cancel", method = "post")]
async fn cancel_chat(&self) -> OpenApiResult<Json<bool>> {
let ctx = self.ctx.clone();
chat_http::cancel_chat(&ctx).await
}
}
pub fn build_openapi_service(
ctx: Arc<AppContext>,
) -> (OpenApiService<Api, ()>, OpenApiService<Api, ()>) {
let api_service = OpenApiService::new(Api { ctx: ctx.clone() }, "Story Kit API", "1.0")
.server("http://127.0.0.1:3001/api");
let docs_service = OpenApiService::new(Api { ctx }, "Story Kit API", "1.0")
.server("http://127.0.0.1:3001/api");
(api_service, docs_service)
}

View File

@@ -1,13 +1,28 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::SearchPayload; use poem_openapi::{Object, OpenApi, payload::Json};
use poem_openapi::payload::Json; use serde::Deserialize;
use std::sync::Arc;
pub async fn search_files( pub struct SearchApi {
pub ctx: Arc<AppContext>,
}
#[derive(Deserialize, Object)]
struct SearchPayload {
query: String,
}
#[OpenApi]
impl SearchApi {
#[oai(path = "/fs/search", method = "post")]
async fn search_files(
&self,
payload: Json<SearchPayload>, payload: Json<SearchPayload>,
ctx: &AppContext, ) -> OpenApiResult<Json<Vec<crate::io::search::SearchResult>>> {
) -> OpenApiResult<Json<Vec<crate::io::search::SearchResult>>> { let ctx = self.ctx.clone();
let results = crate::io::search::search_files(payload.0.query, &ctx.state) let results = crate::io::search::search_files(payload.0.query, &ctx.state)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(results)) Ok(Json(results))
}
} }

View File

@@ -1,13 +1,29 @@
use crate::http::context::{AppContext, OpenApiResult, bad_request}; use crate::http::context::{AppContext, OpenApiResult, bad_request};
use crate::http::payloads::ExecShellPayload; use poem_openapi::{Object, OpenApi, payload::Json};
use poem_openapi::payload::Json; use serde::Deserialize;
use std::sync::Arc;
pub async fn exec_shell( #[derive(Deserialize, Object)]
struct ExecShellPayload {
pub command: String,
pub args: Vec<String>,
}
pub struct ShellApi {
pub ctx: Arc<AppContext>,
}
#[OpenApi]
impl ShellApi {
#[oai(path = "/shell/exec", method = "post")]
async fn exec_shell(
&self,
payload: Json<ExecShellPayload>, payload: Json<ExecShellPayload>,
ctx: &AppContext, ) -> OpenApiResult<Json<crate::io::shell::CommandOutput>> {
) -> OpenApiResult<Json<crate::io::shell::CommandOutput>> { let output =
let output = crate::io::shell::exec_shell(payload.0.command, payload.0.args, &ctx.state) crate::io::shell::exec_shell(payload.0.command, payload.0.args, &self.ctx.state)
.await .await
.map_err(bad_request)?; .map_err(bad_request)?;
Ok(Json(output)) Ok(Json(output))
}
} }