moved from tauri to a server with embedded UI
This commit is contained in:
3022
server/Cargo.lock
generated
Normal file
3022
server/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
server/Cargo.toml
Normal file
25
server/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "living-spec-standalone-server"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
build = "build.rs"
|
||||
|
||||
[dependencies]
|
||||
poem = { version = "3", features = ["websocket"] }
|
||||
poem-openapi = { version = "5", features = ["swagger-ui"] }
|
||||
tokio = { version = "1", features = ["rt-multi-thread", "macros", "sync"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.13.2", features = ["json", "stream"] }
|
||||
futures = "0.3"
|
||||
uuid = { version = "1.20.0", features = ["v4", "serde"] }
|
||||
chrono = { version = "0.4.43", features = ["serde"] }
|
||||
async-trait = "0.1.89"
|
||||
ignore = "0.4.25"
|
||||
walkdir = "2.5.0"
|
||||
eventsource-stream = "0.2.3"
|
||||
rust-embed = "8"
|
||||
mime_guess = "2"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
23
server/build.rs
Normal file
23
server/build.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
fn main() {
|
||||
let dist_dir = Path::new("../frontend/dist");
|
||||
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
if let Ok(entries) = fs::read_dir(dist_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
if let Ok(sub_entries) = fs::read_dir(&path) {
|
||||
for sub_entry in sub_entries.flatten() {
|
||||
println!("cargo:rerun-if-changed={}", sub_entry.path().display());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
378
server/src/commands/chat.rs
Normal file
378
server/src/commands/chat.rs
Normal file
@@ -0,0 +1,378 @@
|
||||
use crate::llm::prompts::SYSTEM_PROMPT;
|
||||
use crate::llm::types::{Message, Role, ToolCall, ToolDefinition, ToolFunctionDefinition};
|
||||
use crate::state::SessionState;
|
||||
use crate::store::StoreOps;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
|
||||
const MAX_TURNS: usize = 30;
|
||||
const KEY_ANTHROPIC_API_KEY: &str = "anthropic_api_key";
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct ProviderConfig {
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
pub base_url: Option<String>,
|
||||
pub enable_tools: Option<bool>,
|
||||
}
|
||||
|
||||
fn get_anthropic_api_key_exists_impl(store: &dyn StoreOps) -> bool {
|
||||
match store.get(KEY_ANTHROPIC_API_KEY) {
|
||||
Some(value) => value.as_str().map(|k| !k.is_empty()).unwrap_or(false),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_anthropic_api_key_impl(store: &dyn StoreOps, api_key: &str) -> Result<(), String> {
|
||||
store.set(KEY_ANTHROPIC_API_KEY, json!(api_key));
|
||||
store.save()?;
|
||||
|
||||
match store.get(KEY_ANTHROPIC_API_KEY) {
|
||||
Some(value) => {
|
||||
if let Some(retrieved) = value.as_str() {
|
||||
if retrieved != api_key {
|
||||
return Err("Retrieved key does not match saved key".to_string());
|
||||
}
|
||||
} else {
|
||||
return Err("Stored value is not a string".to_string());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Err("API key was saved but cannot be retrieved".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_anthropic_api_key_impl(store: &dyn StoreOps) -> Result<String, String> {
|
||||
match store.get(KEY_ANTHROPIC_API_KEY) {
|
||||
Some(value) => {
|
||||
if let Some(key) = value.as_str() {
|
||||
if key.is_empty() {
|
||||
Err("Anthropic API key is empty. Please set your API key.".to_string())
|
||||
} else {
|
||||
Ok(key.to_string())
|
||||
}
|
||||
} else {
|
||||
Err("Stored API key is not a string".to_string())
|
||||
}
|
||||
}
|
||||
None => Err("Anthropic API key not found. Please set your API key.".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_tool_arguments(args_str: &str) -> Result<serde_json::Value, String> {
|
||||
serde_json::from_str(args_str).map_err(|e| format!("Error parsing arguments: {e}"))
|
||||
}
|
||||
|
||||
pub fn get_tool_definitions() -> Vec<ToolDefinition> {
|
||||
vec![
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "read_file".to_string(),
|
||||
description: "Reads the complete content of a file from the project. Use this to understand existing code before making changes.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to the file from project root" }
|
||||
},
|
||||
"required": ["path"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "write_file".to_string(),
|
||||
description: "Creates or completely overwrites a file with new content. YOU MUST USE THIS to implement code changes - do not suggest code to the user. The content parameter must contain the COMPLETE file including all imports, functions, and unchanged code.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to the file from project root" },
|
||||
"content": { "type": "string", "description": "The complete file content to write (not a diff or partial code)" }
|
||||
},
|
||||
"required": ["path", "content"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "list_directory".to_string(),
|
||||
description: "Lists all files and directories at a given path. Use this to explore the project structure.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to list (use '.' for project root)" }
|
||||
},
|
||||
"required": ["path"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "search_files".to_string(),
|
||||
description: "Searches for text patterns across all files in the project. Use this to find functions, variables, or code patterns when you don't know which file they're in."
|
||||
.to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": { "type": "string", "description": "The text pattern to search for across all files" }
|
||||
},
|
||||
"required": ["query"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "exec_shell".to_string(),
|
||||
description: "Executes a shell command in the project root directory. Use this to run tests, build commands, git operations, or any command-line tool. Examples: cargo check, npm test, git status.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"description": "The command binary to execute (e.g., 'git', 'cargo', 'npm', 'ls')"
|
||||
},
|
||||
"args": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"description": "Array of arguments to pass to the command (e.g., ['status'] for git status)"
|
||||
}
|
||||
},
|
||||
"required": ["command", "args"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
pub async fn get_ollama_models(base_url: Option<String>) -> Result<Vec<String>, String> {
|
||||
use crate::llm::providers::ollama::OllamaProvider;
|
||||
let url = base_url.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
OllamaProvider::get_models(&url).await
|
||||
}
|
||||
|
||||
pub fn get_anthropic_api_key_exists(store: &dyn StoreOps) -> Result<bool, String> {
|
||||
Ok(get_anthropic_api_key_exists_impl(store))
|
||||
}
|
||||
|
||||
pub fn set_anthropic_api_key(store: &dyn StoreOps, api_key: String) -> Result<(), String> {
|
||||
set_anthropic_api_key_impl(store, &api_key)
|
||||
}
|
||||
|
||||
pub async fn chat<F, U>(
|
||||
messages: Vec<Message>,
|
||||
config: ProviderConfig,
|
||||
state: &SessionState,
|
||||
store: &dyn StoreOps,
|
||||
mut on_update: F,
|
||||
mut on_token: U,
|
||||
) -> Result<Vec<Message>, String>
|
||||
where
|
||||
F: FnMut(&[Message]) + Send,
|
||||
U: FnMut(&str) + Send,
|
||||
{
|
||||
use crate::llm::providers::anthropic::AnthropicProvider;
|
||||
use crate::llm::providers::ollama::OllamaProvider;
|
||||
|
||||
let _ = state.cancel_tx.send(false);
|
||||
let mut cancel_rx = state.cancel_rx.clone();
|
||||
cancel_rx.borrow_and_update();
|
||||
|
||||
let base_url = config
|
||||
.base_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| "http://localhost:11434".to_string());
|
||||
|
||||
let is_claude = config.model.starts_with("claude-");
|
||||
|
||||
if !is_claude && config.provider.as_str() != "ollama" {
|
||||
return Err(format!("Unsupported provider: {}", config.provider));
|
||||
}
|
||||
|
||||
let tool_defs = get_tool_definitions();
|
||||
let tools = if config.enable_tools.unwrap_or(true) {
|
||||
tool_defs.as_slice()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
let mut current_history = messages.clone();
|
||||
|
||||
current_history.insert(
|
||||
0,
|
||||
Message {
|
||||
role: Role::System,
|
||||
content: SYSTEM_PROMPT.to_string(),
|
||||
tool_calls: None,
|
||||
tool_call_id: None,
|
||||
},
|
||||
);
|
||||
|
||||
current_history.insert(
|
||||
1,
|
||||
Message {
|
||||
role: Role::System,
|
||||
content: "REMINDER: Distinguish between showing examples (use code blocks in chat) vs implementing changes (use write_file tool). Keywords like 'show me', 'example', 'how does' = chat response. Keywords like 'create', 'add', 'implement', 'fix' = use tools."
|
||||
.to_string(),
|
||||
tool_calls: None,
|
||||
tool_call_id: None,
|
||||
},
|
||||
);
|
||||
|
||||
let mut new_messages: Vec<Message> = Vec::new();
|
||||
let mut turn_count = 0;
|
||||
|
||||
loop {
|
||||
if *cancel_rx.borrow() {
|
||||
return Err("Chat cancelled by user".to_string());
|
||||
}
|
||||
|
||||
if turn_count >= MAX_TURNS {
|
||||
return Err("Max conversation turns reached.".to_string());
|
||||
}
|
||||
turn_count += 1;
|
||||
|
||||
let response = if is_claude {
|
||||
let api_key = get_anthropic_api_key_impl(store)?;
|
||||
let anthropic_provider = AnthropicProvider::new(api_key);
|
||||
anthropic_provider
|
||||
.chat_stream(
|
||||
&config.model,
|
||||
¤t_history,
|
||||
tools,
|
||||
&mut cancel_rx,
|
||||
|token| on_token(token),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Anthropic Error: {e}"))?
|
||||
} else {
|
||||
let ollama_provider = OllamaProvider::new(base_url.clone());
|
||||
ollama_provider
|
||||
.chat_stream(
|
||||
&config.model,
|
||||
¤t_history,
|
||||
tools,
|
||||
&mut cancel_rx,
|
||||
|token| on_token(token),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Ollama Error: {e}"))?
|
||||
};
|
||||
|
||||
if let Some(tool_calls) = response.tool_calls {
|
||||
let assistant_msg = Message {
|
||||
role: Role::Assistant,
|
||||
content: response.content.unwrap_or_default(),
|
||||
tool_calls: Some(tool_calls.clone()),
|
||||
tool_call_id: None,
|
||||
};
|
||||
|
||||
current_history.push(assistant_msg.clone());
|
||||
new_messages.push(assistant_msg);
|
||||
on_update(¤t_history[2..]);
|
||||
|
||||
for call in tool_calls {
|
||||
if *cancel_rx.borrow() {
|
||||
return Err("Chat cancelled before tool execution".to_string());
|
||||
}
|
||||
|
||||
let output = execute_tool(&call, state).await;
|
||||
|
||||
let tool_msg = Message {
|
||||
role: Role::Tool,
|
||||
content: output,
|
||||
tool_calls: None,
|
||||
tool_call_id: call.id,
|
||||
};
|
||||
|
||||
current_history.push(tool_msg.clone());
|
||||
new_messages.push(tool_msg);
|
||||
on_update(¤t_history[2..]);
|
||||
}
|
||||
} else {
|
||||
let assistant_msg = Message {
|
||||
role: Role::Assistant,
|
||||
content: response.content.unwrap_or_default(),
|
||||
tool_calls: None,
|
||||
tool_call_id: None,
|
||||
};
|
||||
|
||||
new_messages.push(assistant_msg.clone());
|
||||
current_history.push(assistant_msg);
|
||||
on_update(¤t_history[2..]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(new_messages)
|
||||
}
|
||||
|
||||
async fn execute_tool(call: &ToolCall, state: &SessionState) -> String {
|
||||
use crate::commands::{fs, search, shell};
|
||||
|
||||
let name = call.function.name.as_str();
|
||||
let args: serde_json::Value = match parse_tool_arguments(&call.function.arguments) {
|
||||
Ok(v) => v,
|
||||
Err(e) => return e,
|
||||
};
|
||||
|
||||
match name {
|
||||
"read_file" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
match fs::read_file(path, state).await {
|
||||
Ok(content) => content,
|
||||
Err(e) => format!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
"write_file" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
let content = args["content"].as_str().unwrap_or("").to_string();
|
||||
match fs::write_file(path, content, state).await {
|
||||
Ok(()) => "File written successfully.".to_string(),
|
||||
Err(e) => format!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
"list_directory" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
match fs::list_directory(path, state).await {
|
||||
Ok(entries) => serde_json::to_string(&entries).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
"search_files" => {
|
||||
let query = args["query"].as_str().unwrap_or("").to_string();
|
||||
match search::search_files(query, state).await {
|
||||
Ok(results) => serde_json::to_string(&results).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
"exec_shell" => {
|
||||
let command = args["command"].as_str().unwrap_or("").to_string();
|
||||
let args_vec: Vec<String> = args["args"]
|
||||
.as_array()
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.map(|v| v.as_str().unwrap_or("").to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
match shell::exec_shell(command, args_vec, state).await {
|
||||
Ok(output) => serde_json::to_string(&output).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
_ => format!("Unknown tool: {name}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cancel_chat(state: &SessionState) -> Result<(), String> {
|
||||
state.cancel_tx.send(true).map_err(|e| e.to_string())?;
|
||||
Ok(())
|
||||
}
|
||||
191
server/src/commands/fs.rs
Normal file
191
server/src/commands/fs.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
use crate::state::SessionState;
|
||||
use crate::store::StoreOps;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
const KEY_LAST_PROJECT: &str = "last_project_path";
|
||||
const KEY_SELECTED_MODEL: &str = "selected_model";
|
||||
|
||||
/// Resolves a relative path against the active project root (pure function for testing).
|
||||
/// Returns error if path attempts traversal (..).
|
||||
fn resolve_path_impl(root: PathBuf, relative_path: &str) -> Result<PathBuf, String> {
|
||||
if relative_path.contains("..") {
|
||||
return Err("Security Violation: Directory traversal ('..') is not allowed.".to_string());
|
||||
}
|
||||
|
||||
Ok(root.join(relative_path))
|
||||
}
|
||||
|
||||
/// Resolves a relative path against the active project root.
|
||||
/// Returns error if no project is open or if path attempts traversal (..).
|
||||
fn resolve_path(state: &SessionState, relative_path: &str) -> Result<PathBuf, String> {
|
||||
let root = state.get_project_root()?;
|
||||
resolve_path_impl(root, relative_path)
|
||||
}
|
||||
|
||||
/// Validate that a path exists and is a directory (pure function for testing)
|
||||
async fn validate_project_path(path: PathBuf) -> Result<(), String> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
if !path.exists() {
|
||||
return Err(format!("Path does not exist: {}", path.display()));
|
||||
}
|
||||
if !path.is_dir() {
|
||||
return Err(format!("Path is not a directory: {}", path.display()));
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task failed: {}", e))?
|
||||
}
|
||||
|
||||
pub async fn open_project(
|
||||
path: String,
|
||||
state: &SessionState,
|
||||
store: &dyn StoreOps,
|
||||
) -> Result<String, String> {
|
||||
let p = PathBuf::from(&path);
|
||||
|
||||
validate_project_path(p.clone()).await?;
|
||||
|
||||
{
|
||||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||||
*root = Some(p);
|
||||
}
|
||||
|
||||
store.set(KEY_LAST_PROJECT, json!(path));
|
||||
store.save()?;
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn close_project(state: &SessionState, store: &dyn StoreOps) -> Result<(), String> {
|
||||
{
|
||||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||||
*root = None;
|
||||
}
|
||||
|
||||
store.delete(KEY_LAST_PROJECT);
|
||||
store.save()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_current_project(
|
||||
state: &SessionState,
|
||||
store: &dyn StoreOps,
|
||||
) -> Result<Option<String>, String> {
|
||||
{
|
||||
let root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||||
if let Some(path) = &*root {
|
||||
return Ok(Some(path.to_string_lossy().to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(path_str) = store
|
||||
.get(KEY_LAST_PROJECT)
|
||||
.as_ref()
|
||||
.and_then(|val| val.as_str())
|
||||
{
|
||||
let p = PathBuf::from(path_str);
|
||||
if p.exists() && p.is_dir() {
|
||||
let mut root = state.project_root.lock().map_err(|e| e.to_string())?;
|
||||
*root = Some(p);
|
||||
return Ok(Some(path_str.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn get_model_preference(store: &dyn StoreOps) -> Result<Option<String>, String> {
|
||||
if let Some(model) = store
|
||||
.get(KEY_SELECTED_MODEL)
|
||||
.as_ref()
|
||||
.and_then(|val| val.as_str())
|
||||
{
|
||||
return Ok(Some(model.to_string()));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn set_model_preference(model: String, store: &dyn StoreOps) -> Result<(), String> {
|
||||
store.set(KEY_SELECTED_MODEL, json!(model));
|
||||
store.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_file_impl(full_path: PathBuf) -> Result<String, String> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
fs::read_to_string(&full_path).map_err(|e| format!("Failed to read file: {}", e))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task failed: {}", e))?
|
||||
}
|
||||
|
||||
pub async fn read_file(path: String, state: &SessionState) -> Result<String, String> {
|
||||
let full_path = resolve_path(state, &path)?;
|
||||
read_file_impl(full_path).await
|
||||
}
|
||||
|
||||
async fn write_file_impl(full_path: PathBuf, content: String) -> Result<(), String> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
if let Some(parent) = full_path.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| format!("Failed to create directories: {}", e))?;
|
||||
}
|
||||
|
||||
fs::write(&full_path, content).map_err(|e| format!("Failed to write file: {}", e))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task failed: {}", e))?
|
||||
}
|
||||
|
||||
pub async fn write_file(path: String, content: String, state: &SessionState) -> Result<(), String> {
|
||||
let full_path = resolve_path(state, &path)?;
|
||||
write_file_impl(full_path, content).await
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||||
pub struct FileEntry {
|
||||
pub name: String,
|
||||
pub kind: String,
|
||||
}
|
||||
|
||||
async fn list_directory_impl(full_path: PathBuf) -> Result<Vec<FileEntry>, String> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let entries = fs::read_dir(&full_path).map_err(|e| format!("Failed to read dir: {}", e))?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for entry in entries {
|
||||
let entry = entry.map_err(|e| e.to_string())?;
|
||||
let ft = entry.file_type().map_err(|e| e.to_string())?;
|
||||
let name = entry.file_name().to_string_lossy().to_string();
|
||||
|
||||
result.push(FileEntry {
|
||||
name,
|
||||
kind: if ft.is_dir() {
|
||||
"dir".to_string()
|
||||
} else {
|
||||
"file".to_string()
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
result.sort_by(|a, b| match (a.kind.as_str(), b.kind.as_str()) {
|
||||
("dir", "file") => std::cmp::Ordering::Less,
|
||||
("file", "dir") => std::cmp::Ordering::Greater,
|
||||
_ => a.name.cmp(&b.name),
|
||||
});
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task failed: {}", e))?
|
||||
}
|
||||
|
||||
pub async fn list_directory(path: String, state: &SessionState) -> Result<Vec<FileEntry>, String> {
|
||||
let full_path = resolve_path(state, &path)?;
|
||||
list_directory_impl(full_path).await
|
||||
}
|
||||
4
server/src/commands/mod.rs
Normal file
4
server/src/commands/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod chat;
|
||||
pub mod fs;
|
||||
pub mod search;
|
||||
pub mod shell;
|
||||
65
server/src/commands/search.rs
Normal file
65
server/src/commands/search.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use crate::state::SessionState;
|
||||
use ignore::WalkBuilder;
|
||||
use serde::Serialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||||
pub struct SearchResult {
|
||||
pub path: String,
|
||||
pub matches: usize,
|
||||
}
|
||||
|
||||
fn get_project_root(state: &SessionState) -> Result<PathBuf, String> {
|
||||
state.get_project_root()
|
||||
}
|
||||
|
||||
pub async fn search_files(
|
||||
query: String,
|
||||
state: &SessionState,
|
||||
) -> Result<Vec<SearchResult>, String> {
|
||||
let root = get_project_root(state)?;
|
||||
search_files_impl(query, root).await
|
||||
}
|
||||
|
||||
pub async fn search_files_impl(query: String, root: PathBuf) -> Result<Vec<SearchResult>, String> {
|
||||
let root_clone = root.clone();
|
||||
|
||||
let results = tokio::task::spawn_blocking(move || {
|
||||
let mut matches = Vec::new();
|
||||
let walker = WalkBuilder::new(&root_clone).git_ignore(true).build();
|
||||
|
||||
for result in walker {
|
||||
match result {
|
||||
Ok(entry) => {
|
||||
if !entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.path();
|
||||
if let Ok(content) = fs::read_to_string(path)
|
||||
&& content.contains(&query)
|
||||
{
|
||||
let relative = path
|
||||
.strip_prefix(&root_clone)
|
||||
.unwrap_or(path)
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
matches.push(SearchResult {
|
||||
path: relative,
|
||||
matches: 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(err) => eprintln!("Error walking dir: {}", err),
|
||||
}
|
||||
}
|
||||
|
||||
matches
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Search task failed: {e}"))?;
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
58
server/src/commands/shell.rs
Normal file
58
server/src/commands/shell.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use crate::state::SessionState;
|
||||
use serde::Serialize;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
/// Helper to get the root path (cloned) without joining
|
||||
fn get_project_root(state: &SessionState) -> Result<PathBuf, String> {
|
||||
state.get_project_root()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, poem_openapi::Object)]
|
||||
pub struct CommandOutput {
|
||||
pub stdout: String,
|
||||
pub stderr: String,
|
||||
pub exit_code: i32,
|
||||
}
|
||||
|
||||
/// Execute shell command logic (pure function for testing)
|
||||
async fn exec_shell_impl(
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
root: PathBuf,
|
||||
) -> Result<CommandOutput, String> {
|
||||
// Security Allowlist
|
||||
let allowed_commands = [
|
||||
"git", "cargo", "npm", "yarn", "pnpm", "node", "bun", "ls", "find", "grep", "mkdir", "rm",
|
||||
"mv", "cp", "touch", "rustc", "rustfmt",
|
||||
];
|
||||
|
||||
if !allowed_commands.contains(&command.as_str()) {
|
||||
return Err(format!("Command '{}' is not in the allowlist.", command));
|
||||
}
|
||||
|
||||
let output = tokio::task::spawn_blocking(move || {
|
||||
Command::new(&command)
|
||||
.args(&args)
|
||||
.current_dir(root)
|
||||
.output()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| format!("Task join error: {}", e))?
|
||||
.map_err(|e| format!("Failed to execute command: {}", e))?;
|
||||
|
||||
Ok(CommandOutput {
|
||||
stdout: String::from_utf8_lossy(&output.stdout).to_string(),
|
||||
stderr: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
exit_code: output.status.code().unwrap_or(-1),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn exec_shell(
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
state: &SessionState,
|
||||
) -> Result<CommandOutput, String> {
|
||||
let root = get_project_root(state)?;
|
||||
exec_shell_impl(command, args, root).await
|
||||
}
|
||||
3
server/src/llm/mod.rs
Normal file
3
server/src/llm/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod prompts;
|
||||
pub mod providers;
|
||||
pub mod types;
|
||||
91
server/src/llm/prompts.rs
Normal file
91
server/src/llm/prompts.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
pub const SYSTEM_PROMPT: &str = r#"You are an AI Agent with direct access to the user's filesystem and development environment.
|
||||
|
||||
CRITICAL INSTRUCTIONS:
|
||||
1. **Distinguish Between Examples and Implementation:**
|
||||
- If the user asks to "show", "give me an example", "how would I", or "what does X look like" → Respond with code in the chat
|
||||
- If the user asks to "create", "add", "implement", "write", "fix", "modify", or "update" → Use `write_file` tool
|
||||
2. **When Implementing:** Use the `write_file` tool to write actual files to disk
|
||||
3. **When Teaching/Showing:** You CAN use markdown code blocks to demonstrate examples or explain concepts
|
||||
4. **Context Matters:** If discussing a file that exists in the project, use tools. If showing generic examples, use code blocks.
|
||||
|
||||
YOUR CAPABILITIES:
|
||||
You have the following tools available:
|
||||
- `read_file(path)` - Read the content of any file in the project
|
||||
- `write_file(path, content)` - Write or overwrite a file with new content
|
||||
- `list_directory(path)` - List files and directories
|
||||
- `search_files(query)` - Search for text patterns across all files
|
||||
- `exec_shell(command, args)` - Execute shell commands (git, cargo, npm, etc.)
|
||||
|
||||
YOUR WORKFLOW:
|
||||
When the user requests a feature or change:
|
||||
1. **Understand:** Read `.living_spec/README.md` if you haven't already to understand the development process
|
||||
2. **Explore:** Use `read_file` and `list_directory` to understand the current codebase structure
|
||||
3. **Implement:** Use `write_file` to create or modify files directly
|
||||
4. **Verify:** Use `exec_shell` to run tests, linters, or build commands to verify your changes work
|
||||
5. **Report:** Tell the user what you did (past tense), not what they should do
|
||||
|
||||
CRITICAL RULES:
|
||||
- **Read Before Write:** ALWAYS read files before modifying them. The `write_file` tool OVERWRITES the entire file.
|
||||
- **Complete Files Only:** When using `write_file`, output the COMPLETE file content, including all imports, functions, and unchanged code. Never write partial diffs or use placeholders like "// ... rest of code".
|
||||
- **Be Direct:** Don't announce your actions ("I will now..."). Just execute the tools immediately.
|
||||
- **Take Initiative:** If you need information, use tools to get it. Don't ask the user for things you can discover yourself.
|
||||
|
||||
EXAMPLES OF CORRECT BEHAVIOR:
|
||||
|
||||
Example 1 - User asks for an EXAMPLE (show in chat):
|
||||
User: "Show me a Java hello world"
|
||||
You (correct): "Here's a simple Java hello world program:
|
||||
```java
|
||||
public class HelloWorld {
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Hello, World!");
|
||||
}
|
||||
}
|
||||
```"
|
||||
|
||||
Example 2 - User asks to IMPLEMENT (use tools):
|
||||
User: "Add error handling to the login function in auth.rs"
|
||||
You (correct): [Call read_file("src/auth.rs"), analyze it, then call write_file("src/auth.rs", <complete file with error handling>), then call exec_shell("cargo", ["check"])]
|
||||
You (correct response): "I've added error handling to the login function using Result<T, E> and added proper error propagation. The code compiles successfully."
|
||||
|
||||
Example 3 - User asks to CREATE (use tools):
|
||||
User: "Create a new component called Button.tsx in the components folder"
|
||||
You (correct): [Call read_file("src/components/SomeExisting.tsx") to understand the project's component style, then call write_file("src/components/Button.tsx", <complete component code>)]
|
||||
You (correct response): "I've created Button.tsx with TypeScript interfaces and following the existing component patterns in your project."
|
||||
|
||||
Example 4 - User asks to FIX (use tools):
|
||||
User: "The calculation in utils.js is wrong"
|
||||
You (correct): [Call read_file("src/utils.js"), identify the bug, call write_file("src/utils.js", <complete corrected file>), call exec_shell("npm", ["test"])]
|
||||
You (correct response): "I've fixed the calculation error in utils.js. The formula now correctly handles edge cases and all tests pass."
|
||||
|
||||
EXAMPLES OF INCORRECT BEHAVIOR (DO NOT DO THIS):
|
||||
|
||||
Example 1 - Writing a file when user asks for an example:
|
||||
User: "Show me a React component"
|
||||
You (WRONG): [Calls write_file("Component.tsx", ...)]
|
||||
You (CORRECT): Show the code in a markdown code block in the chat
|
||||
|
||||
Example 2 - Suggesting code when user asks to implement:
|
||||
User: "Add error handling to the login function"
|
||||
You (WRONG): "Here's how you can add error handling: ```rust fn login() -> Result<User, LoginError> { ... } ``` Add this to your auth.rs file."
|
||||
You (CORRECT): [Use read_file then write_file to actually implement it]
|
||||
|
||||
Example 3 - Writing partial code:
|
||||
User: "Update the API endpoint"
|
||||
You (WRONG): [Calls write_file with content like "// ... existing imports\n\nfn new_endpoint() { }\n\n// ... rest of file"]
|
||||
You (CORRECT): Read the file first, then write the COMPLETE file with all content
|
||||
|
||||
Example 4 - Asking for information you can discover:
|
||||
User: "Add a new route to the app"
|
||||
You (WRONG): "What file contains your routes?"
|
||||
You (CORRECT): [Call search_files("route") or list_directory("src") to find the routing file yourself]
|
||||
|
||||
REMEMBER:
|
||||
- **Teaching vs Implementing:** Show examples in chat, implement changes with tools
|
||||
- **Keywords matter:** "show/example" = chat, "create/add/fix" = tools
|
||||
- **Complete files:** Always write the COMPLETE file content when using write_file
|
||||
- **Verify your work:** Use exec_shell to run tests/checks after implementing changes
|
||||
- You have the power to both teach AND implement - use the right mode for the situation
|
||||
|
||||
Remember: You are an autonomous agent that can both explain concepts and take action. Choose appropriately based on the user's request.
|
||||
"#;
|
||||
310
server/src/llm/providers/anthropic.rs
Normal file
310
server/src/llm/providers/anthropic.rs
Normal file
@@ -0,0 +1,310 @@
|
||||
use crate::llm::types::{
|
||||
CompletionResponse, FunctionCall, Message, Role, ToolCall, ToolDefinition,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use reqwest::header::{CONTENT_TYPE, HeaderMap, HeaderValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tokio::sync::watch::Receiver;
|
||||
|
||||
const ANTHROPIC_API_URL: &str = "https://api.anthropic.com/v1/messages";
|
||||
const ANTHROPIC_VERSION: &str = "2023-06-01";
|
||||
|
||||
pub struct AnthropicProvider {
|
||||
api_key: String,
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct AnthropicMessage {
|
||||
role: String, // "user" or "assistant"
|
||||
content: AnthropicContent,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum AnthropicContent {
|
||||
Text(String),
|
||||
Blocks(Vec<AnthropicContentBlock>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
enum AnthropicContentBlock {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
#[serde(rename = "tool_result")]
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
content: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AnthropicTool {
|
||||
name: String,
|
||||
description: String,
|
||||
input_schema: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct StreamEvent {
|
||||
#[serde(rename = "type")]
|
||||
event_type: String,
|
||||
#[serde(flatten)]
|
||||
data: serde_json::Value,
|
||||
}
|
||||
|
||||
impl AnthropicProvider {
|
||||
pub fn new(api_key: String) -> Self {
|
||||
Self {
|
||||
api_key,
|
||||
client: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_tools(tools: &[ToolDefinition]) -> Vec<AnthropicTool> {
|
||||
tools
|
||||
.iter()
|
||||
.map(|tool| AnthropicTool {
|
||||
name: tool.function.name.clone(),
|
||||
description: tool.function.description.clone(),
|
||||
input_schema: tool.function.parameters.clone(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn convert_messages(messages: &[Message]) -> Vec<AnthropicMessage> {
|
||||
let mut anthropic_messages: Vec<AnthropicMessage> = Vec::new();
|
||||
|
||||
for msg in messages {
|
||||
match msg.role {
|
||||
Role::System => {
|
||||
continue;
|
||||
}
|
||||
Role::User => {
|
||||
anthropic_messages.push(AnthropicMessage {
|
||||
role: "user".to_string(),
|
||||
content: AnthropicContent::Text(msg.content.clone()),
|
||||
});
|
||||
}
|
||||
Role::Assistant => {
|
||||
if let Some(tool_calls) = &msg.tool_calls {
|
||||
let mut blocks = Vec::new();
|
||||
|
||||
if !msg.content.is_empty() {
|
||||
blocks.push(AnthropicContentBlock::Text {
|
||||
text: msg.content.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
for call in tool_calls {
|
||||
let input: serde_json::Value =
|
||||
serde_json::from_str(&call.function.arguments).unwrap_or(json!({}));
|
||||
|
||||
blocks.push(AnthropicContentBlock::ToolUse {
|
||||
id: call
|
||||
.id
|
||||
.clone()
|
||||
.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()),
|
||||
name: call.function.name.clone(),
|
||||
input,
|
||||
});
|
||||
}
|
||||
|
||||
anthropic_messages.push(AnthropicMessage {
|
||||
role: "assistant".to_string(),
|
||||
content: AnthropicContent::Blocks(blocks),
|
||||
});
|
||||
} else {
|
||||
anthropic_messages.push(AnthropicMessage {
|
||||
role: "assistant".to_string(),
|
||||
content: AnthropicContent::Text(msg.content.clone()),
|
||||
});
|
||||
}
|
||||
}
|
||||
Role::Tool => {
|
||||
let tool_use_id = msg.tool_call_id.clone().unwrap_or_default();
|
||||
anthropic_messages.push(AnthropicMessage {
|
||||
role: "user".to_string(),
|
||||
content: AnthropicContent::Blocks(vec![
|
||||
AnthropicContentBlock::ToolResult {
|
||||
tool_use_id,
|
||||
content: msg.content.clone(),
|
||||
},
|
||||
]),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anthropic_messages
|
||||
}
|
||||
|
||||
fn extract_system_prompt(messages: &[Message]) -> String {
|
||||
messages
|
||||
.iter()
|
||||
.filter(|m| matches!(m.role, Role::System))
|
||||
.map(|m| m.content.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n\n")
|
||||
}
|
||||
|
||||
pub async fn chat_stream<F>(
|
||||
&self,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: &[ToolDefinition],
|
||||
cancel_rx: &mut Receiver<bool>,
|
||||
mut on_token: F,
|
||||
) -> Result<CompletionResponse, String>
|
||||
where
|
||||
F: FnMut(&str),
|
||||
{
|
||||
let anthropic_messages = Self::convert_messages(messages);
|
||||
let anthropic_tools = Self::convert_tools(tools);
|
||||
let system_prompt = Self::extract_system_prompt(messages);
|
||||
|
||||
let mut request_body = json!({
|
||||
"model": model,
|
||||
"max_tokens": 4096,
|
||||
"messages": anthropic_messages,
|
||||
"stream": true,
|
||||
});
|
||||
|
||||
if !system_prompt.is_empty() {
|
||||
request_body["system"] = json!(system_prompt);
|
||||
}
|
||||
|
||||
if !anthropic_tools.is_empty() {
|
||||
request_body["tools"] = json!(anthropic_tools);
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
|
||||
headers.insert(
|
||||
"x-api-key",
|
||||
HeaderValue::from_str(&self.api_key).map_err(|e| e.to_string())?,
|
||||
);
|
||||
headers.insert(
|
||||
"anthropic-version",
|
||||
HeaderValue::from_static(ANTHROPIC_VERSION),
|
||||
);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(ANTHROPIC_API_URL)
|
||||
.headers(headers)
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to send request to Anthropic: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response
|
||||
.text()
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown error".to_string());
|
||||
return Err(format!("Anthropic API error {status}: {error_text}"));
|
||||
}
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut accumulated_text = String::new();
|
||||
let mut tool_calls: Vec<ToolCall> = Vec::new();
|
||||
let mut current_tool_use: Option<(String, String, String)> = None;
|
||||
|
||||
loop {
|
||||
let chunk = tokio::select! {
|
||||
result = stream.next() => {
|
||||
match result {
|
||||
Some(c) => c,
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
_ = cancel_rx.changed() => {
|
||||
if *cancel_rx.borrow() {
|
||||
return Err("Chat cancelled by user".to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let bytes = chunk.map_err(|e| format!("Stream error: {e}"))?;
|
||||
let text = String::from_utf8_lossy(&bytes);
|
||||
|
||||
for line in text.lines() {
|
||||
if let Some(json_str) = line.strip_prefix("data: ") {
|
||||
if json_str == "[DONE]" {
|
||||
break;
|
||||
}
|
||||
|
||||
let event: StreamEvent = match serde_json::from_str(json_str) {
|
||||
Ok(e) => e,
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
match event.event_type.as_str() {
|
||||
"content_block_start" => {
|
||||
if let Some(content_block) = event.data.get("content_block")
|
||||
&& content_block.get("type") == Some(&json!("tool_use"))
|
||||
{
|
||||
let id = content_block["id"].as_str().unwrap_or("").to_string();
|
||||
let name = content_block["name"].as_str().unwrap_or("").to_string();
|
||||
current_tool_use = Some((id, name, String::new()));
|
||||
}
|
||||
}
|
||||
"content_block_delta" => {
|
||||
if let Some(delta) = event.data.get("delta") {
|
||||
if delta.get("type") == Some(&json!("text_delta")) {
|
||||
if let Some(text) = delta.get("text").and_then(|t| t.as_str()) {
|
||||
accumulated_text.push_str(text);
|
||||
on_token(text);
|
||||
}
|
||||
} else if delta.get("type") == Some(&json!("input_json_delta"))
|
||||
&& let Some((_, _, input_json)) = &mut current_tool_use
|
||||
&& let Some(partial) =
|
||||
delta.get("partial_json").and_then(|p| p.as_str())
|
||||
{
|
||||
input_json.push_str(partial);
|
||||
}
|
||||
}
|
||||
}
|
||||
"content_block_stop" => {
|
||||
if let Some((id, name, input_json)) = current_tool_use.take() {
|
||||
tool_calls.push(ToolCall {
|
||||
id: Some(id),
|
||||
kind: "function".to_string(),
|
||||
function: FunctionCall {
|
||||
name,
|
||||
arguments: input_json,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(CompletionResponse {
|
||||
content: if accumulated_text.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(accumulated_text)
|
||||
},
|
||||
tool_calls: if tool_calls.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(tool_calls)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
2
server/src/llm/providers/mod.rs
Normal file
2
server/src/llm/providers/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod anthropic;
|
||||
pub mod ollama;
|
||||
266
server/src/llm/providers/ollama.rs
Normal file
266
server/src/llm/providers/ollama.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
use crate::llm::types::{
|
||||
CompletionResponse, FunctionCall, Message, ModelProvider, Role, ToolCall, ToolDefinition,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct OllamaProvider {
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl OllamaProvider {
|
||||
pub fn new(base_url: String) -> Self {
|
||||
Self { base_url }
|
||||
}
|
||||
|
||||
pub async fn get_models(base_url: &str) -> Result<Vec<String>, String> {
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/api/tags", base_url.trim_end_matches('/'));
|
||||
|
||||
let res = client
|
||||
.get(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Request failed: {}", e))?;
|
||||
|
||||
if !res.status().is_success() {
|
||||
let status = res.status();
|
||||
let text = res.text().await.unwrap_or_default();
|
||||
return Err(format!("Ollama API error {}: {}", status, text));
|
||||
}
|
||||
|
||||
let body: OllamaTagsResponse = res
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse response: {}", e))?;
|
||||
|
||||
Ok(body.models.into_iter().map(|m| m.name).collect())
|
||||
}
|
||||
|
||||
/// Streaming chat that calls `on_token` for each token chunk.
|
||||
pub async fn chat_stream<F>(
|
||||
&self,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: &[ToolDefinition],
|
||||
cancel_rx: &mut tokio::sync::watch::Receiver<bool>,
|
||||
mut on_token: F,
|
||||
) -> Result<CompletionResponse, String>
|
||||
where
|
||||
F: FnMut(&str) + Send,
|
||||
{
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/api/chat", self.base_url.trim_end_matches('/'));
|
||||
|
||||
let ollama_messages: Vec<OllamaRequestMessage> = messages
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let tool_calls = m.tool_calls.as_ref().map(|calls| {
|
||||
calls
|
||||
.iter()
|
||||
.map(|tc| {
|
||||
let args_val: Value = serde_json::from_str(&tc.function.arguments)
|
||||
.unwrap_or(Value::String(tc.function.arguments.clone()));
|
||||
|
||||
OllamaRequestToolCall {
|
||||
kind: tc.kind.clone(),
|
||||
function: OllamaRequestFunctionCall {
|
||||
name: tc.function.name.clone(),
|
||||
arguments: args_val,
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
OllamaRequestMessage {
|
||||
role: m.role.clone(),
|
||||
content: m.content.clone(),
|
||||
tool_calls,
|
||||
tool_call_id: m.tool_call_id.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let request_body = OllamaRequest {
|
||||
model,
|
||||
messages: ollama_messages,
|
||||
stream: true,
|
||||
tools,
|
||||
};
|
||||
|
||||
let res = client
|
||||
.post(&url)
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Request failed: {}", e))?;
|
||||
|
||||
if !res.status().is_success() {
|
||||
let status = res.status();
|
||||
let text = res.text().await.unwrap_or_default();
|
||||
return Err(format!("Ollama API error {}: {}", status, text));
|
||||
}
|
||||
|
||||
let mut stream = res.bytes_stream();
|
||||
let mut buffer = String::new();
|
||||
let mut accumulated_content = String::new();
|
||||
let mut final_tool_calls: Option<Vec<ToolCall>> = None;
|
||||
|
||||
loop {
|
||||
if *cancel_rx.borrow() {
|
||||
return Err("Chat cancelled by user".to_string());
|
||||
}
|
||||
|
||||
let chunk_result = tokio::select! {
|
||||
chunk = stream.next() => {
|
||||
match chunk {
|
||||
Some(c) => c,
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
_ = cancel_rx.changed() => {
|
||||
if *cancel_rx.borrow() {
|
||||
return Err("Chat cancelled by user".to_string());
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let chunk = chunk_result.map_err(|e| format!("Stream error: {}", e))?;
|
||||
buffer.push_str(&String::from_utf8_lossy(&chunk));
|
||||
|
||||
while let Some(newline_pos) = buffer.find('\n') {
|
||||
let line = buffer[..newline_pos].trim().to_string();
|
||||
buffer = buffer[newline_pos + 1..].to_string();
|
||||
|
||||
if line.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let stream_msg: OllamaStreamResponse =
|
||||
serde_json::from_str(&line).map_err(|e| format!("JSON parse error: {}", e))?;
|
||||
|
||||
if !stream_msg.message.content.is_empty() {
|
||||
accumulated_content.push_str(&stream_msg.message.content);
|
||||
on_token(&stream_msg.message.content);
|
||||
}
|
||||
|
||||
if let Some(tool_calls) = stream_msg.message.tool_calls {
|
||||
final_tool_calls = Some(
|
||||
tool_calls
|
||||
.into_iter()
|
||||
.map(|tc| ToolCall {
|
||||
id: None,
|
||||
kind: "function".to_string(),
|
||||
function: FunctionCall {
|
||||
name: tc.function.name,
|
||||
arguments: tc.function.arguments.to_string(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
if stream_msg.done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(CompletionResponse {
|
||||
content: if accumulated_content.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(accumulated_content)
|
||||
},
|
||||
tool_calls: final_tool_calls,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaTagsResponse {
|
||||
models: Vec<OllamaModelTag>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaModelTag {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequest<'a> {
|
||||
model: &'a str,
|
||||
messages: Vec<OllamaRequestMessage>,
|
||||
stream: bool,
|
||||
#[serde(skip_serializing_if = "is_empty_tools")]
|
||||
tools: &'a [ToolDefinition],
|
||||
}
|
||||
|
||||
fn is_empty_tools(tools: &&[ToolDefinition]) -> bool {
|
||||
tools.is_empty()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestMessage {
|
||||
role: Role,
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tool_calls: Option<Vec<OllamaRequestToolCall>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tool_call_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestToolCall {
|
||||
function: OllamaRequestFunctionCall,
|
||||
#[serde(rename = "type")]
|
||||
kind: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestFunctionCall {
|
||||
name: String,
|
||||
arguments: Value,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaStreamResponse {
|
||||
message: OllamaStreamMessage,
|
||||
done: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaStreamMessage {
|
||||
#[serde(default)]
|
||||
content: String,
|
||||
#[serde(default)]
|
||||
tool_calls: Option<Vec<OllamaResponseToolCall>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseToolCall {
|
||||
function: OllamaResponseFunctionCall,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseFunctionCall {
|
||||
name: String,
|
||||
arguments: Value,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ModelProvider for OllamaProvider {
|
||||
async fn chat(
|
||||
&self,
|
||||
_model: &str,
|
||||
_messages: &[Message],
|
||||
_tools: &[ToolDefinition],
|
||||
) -> Result<CompletionResponse, String> {
|
||||
Err("Non-streaming Ollama chat not implemented for server".to_string())
|
||||
}
|
||||
}
|
||||
69
server/src/llm/types.rs
Normal file
69
server/src/llm/types.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
System,
|
||||
User,
|
||||
Assistant,
|
||||
Tool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Message {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_calls: Option<Vec<ToolCall>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_call_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolCall {
|
||||
pub id: Option<String>,
|
||||
pub function: FunctionCall,
|
||||
#[serde(rename = "type")]
|
||||
pub kind: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct FunctionCall {
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolDefinition {
|
||||
#[serde(rename = "type")]
|
||||
pub kind: String,
|
||||
pub function: ToolFunctionDefinition,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CompletionResponse {
|
||||
pub content: Option<String>,
|
||||
pub tool_calls: Option<Vec<ToolCall>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
#[allow(dead_code)]
|
||||
pub trait ModelProvider: Send + Sync {
|
||||
async fn chat(
|
||||
&self,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: &[ToolDefinition],
|
||||
) -> Result<CompletionResponse, String>;
|
||||
}
|
||||
396
server/src/main.rs
Normal file
396
server/src/main.rs
Normal file
@@ -0,0 +1,396 @@
|
||||
mod commands;
|
||||
mod llm;
|
||||
mod state;
|
||||
mod store;
|
||||
|
||||
use crate::commands::{chat, fs};
|
||||
use crate::llm::types::Message;
|
||||
use crate::state::SessionState;
|
||||
use crate::store::JsonFileStore;
|
||||
use futures::{SinkExt, StreamExt};
|
||||
use poem::web::websocket::{Message as WsMessage, WebSocket};
|
||||
use poem::{
|
||||
EndpointExt, Response, Route, Server, get, handler,
|
||||
http::{StatusCode, header},
|
||||
listener::TcpListener,
|
||||
web::{Data, Path},
|
||||
};
|
||||
use poem_openapi::{Object, OpenApi, OpenApiService, param::Query, payload::Json};
|
||||
use rust_embed::RustEmbed;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppContext {
|
||||
state: Arc<SessionState>,
|
||||
store: Arc<JsonFileStore>,
|
||||
}
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "../frontend/dist"]
|
||||
struct EmbeddedAssets;
|
||||
|
||||
type OpenApiResult<T> = poem::Result<T>;
|
||||
|
||||
fn bad_request(message: String) -> poem::Error {
|
||||
poem::Error::from_string(message, StatusCode::BAD_REQUEST)
|
||||
}
|
||||
|
||||
#[handler]
|
||||
fn health() -> &'static str {
|
||||
"ok"
|
||||
}
|
||||
|
||||
fn serve_embedded(path: &str) -> Response {
|
||||
let normalized = if path.is_empty() {
|
||||
"index.html"
|
||||
} else {
|
||||
path.trim_start_matches('/')
|
||||
};
|
||||
let is_asset_request = normalized.starts_with("assets/");
|
||||
let asset = if is_asset_request {
|
||||
EmbeddedAssets::get(normalized)
|
||||
} else {
|
||||
EmbeddedAssets::get(normalized).or_else(|| {
|
||||
if normalized == "index.html" {
|
||||
None
|
||||
} else {
|
||||
EmbeddedAssets::get("index.html")
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
match asset {
|
||||
Some(content) => {
|
||||
let body = content.data.into_owned();
|
||||
let mime = mime_guess::from_path(normalized)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.header(header::CONTENT_TYPE, mime)
|
||||
.body(body)
|
||||
}
|
||||
None => Response::builder()
|
||||
.status(StatusCode::NOT_FOUND)
|
||||
.body("Not Found"),
|
||||
}
|
||||
}
|
||||
|
||||
#[handler]
|
||||
fn embedded_asset(Path(path): Path<String>) -> Response {
|
||||
let asset_path = format!("assets/{path}");
|
||||
serve_embedded(&asset_path)
|
||||
}
|
||||
|
||||
#[handler]
|
||||
fn embedded_file(Path(path): Path<String>) -> Response {
|
||||
serve_embedded(&path)
|
||||
}
|
||||
|
||||
#[handler]
|
||||
fn embedded_index() -> Response {
|
||||
serve_embedded("index.html")
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct PathPayload {
|
||||
path: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct ModelPayload {
|
||||
model: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct ApiKeyPayload {
|
||||
api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct FilePathPayload {
|
||||
path: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct WriteFilePayload {
|
||||
path: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct SearchPayload {
|
||||
query: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Object)]
|
||||
struct ExecShellPayload {
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
}
|
||||
struct Api {
|
||||
ctx: Arc<AppContext>,
|
||||
}
|
||||
|
||||
#[OpenApi]
|
||||
impl Api {
|
||||
#[oai(path = "/project", method = "get")]
|
||||
async fn get_current_project(&self) -> OpenApiResult<Json<Option<String>>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let result =
|
||||
fs::get_current_project(&ctx.state, ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[oai(path = "/project", method = "post")]
|
||||
async fn open_project(&self, payload: Json<PathPayload>) -> OpenApiResult<Json<String>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let confirmed = fs::open_project(payload.0.path, &ctx.state, ctx.store.as_ref())
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(confirmed))
|
||||
}
|
||||
|
||||
#[oai(path = "/project", method = "delete")]
|
||||
async fn close_project(&self) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
fs::close_project(&ctx.state, ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
|
||||
#[oai(path = "/model", method = "get")]
|
||||
async fn get_model_preference(&self) -> OpenApiResult<Json<Option<String>>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let result = fs::get_model_preference(ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[oai(path = "/model", method = "post")]
|
||||
async fn set_model_preference(&self, payload: Json<ModelPayload>) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
fs::set_model_preference(payload.0.model, ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
|
||||
#[oai(path = "/ollama/models", method = "get")]
|
||||
async fn get_ollama_models(
|
||||
&self,
|
||||
base_url: Query<Option<String>>,
|
||||
) -> OpenApiResult<Json<Vec<String>>> {
|
||||
let models = chat::get_ollama_models(base_url.0)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(models))
|
||||
}
|
||||
|
||||
#[oai(path = "/anthropic/key/exists", method = "get")]
|
||||
async fn get_anthropic_api_key_exists(&self) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let exists = chat::get_anthropic_api_key_exists(ctx.store.as_ref()).map_err(bad_request)?;
|
||||
Ok(Json(exists))
|
||||
}
|
||||
|
||||
#[oai(path = "/anthropic/key", method = "post")]
|
||||
async fn set_anthropic_api_key(
|
||||
&self,
|
||||
payload: Json<ApiKeyPayload>,
|
||||
) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
chat::set_anthropic_api_key(ctx.store.as_ref(), payload.0.api_key).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
|
||||
#[oai(path = "/fs/read", method = "post")]
|
||||
async fn read_file(&self, payload: Json<FilePathPayload>) -> OpenApiResult<Json<String>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let content = fs::read_file(payload.0.path, &ctx.state)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(content))
|
||||
}
|
||||
|
||||
#[oai(path = "/fs/write", method = "post")]
|
||||
async fn write_file(&self, payload: Json<WriteFilePayload>) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
fs::write_file(payload.0.path, payload.0.content, &ctx.state)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
|
||||
#[oai(path = "/fs/list", method = "post")]
|
||||
async fn list_directory(
|
||||
&self,
|
||||
payload: Json<FilePathPayload>,
|
||||
) -> OpenApiResult<Json<Vec<fs::FileEntry>>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let entries = fs::list_directory(payload.0.path, &ctx.state)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(entries))
|
||||
}
|
||||
|
||||
#[oai(path = "/fs/search", method = "post")]
|
||||
async fn search_files(
|
||||
&self,
|
||||
payload: Json<SearchPayload>,
|
||||
) -> OpenApiResult<Json<Vec<crate::commands::search::SearchResult>>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let results = crate::commands::search::search_files(payload.0.query, &ctx.state)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(results))
|
||||
}
|
||||
|
||||
#[oai(path = "/shell/exec", method = "post")]
|
||||
async fn exec_shell(
|
||||
&self,
|
||||
payload: Json<ExecShellPayload>,
|
||||
) -> OpenApiResult<Json<crate::commands::shell::CommandOutput>> {
|
||||
let ctx = self.ctx.clone();
|
||||
let output =
|
||||
crate::commands::shell::exec_shell(payload.0.command, payload.0.args, &ctx.state)
|
||||
.await
|
||||
.map_err(bad_request)?;
|
||||
Ok(Json(output))
|
||||
}
|
||||
|
||||
#[oai(path = "/chat/cancel", method = "post")]
|
||||
async fn cancel_chat(&self) -> OpenApiResult<Json<bool>> {
|
||||
let ctx = self.ctx.clone();
|
||||
chat::cancel_chat(&ctx.state).map_err(bad_request)?;
|
||||
Ok(Json(true))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum WsRequest {
|
||||
Chat {
|
||||
messages: Vec<Message>,
|
||||
config: chat::ProviderConfig,
|
||||
},
|
||||
Cancel,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum WsResponse {
|
||||
Token { content: String },
|
||||
Update { messages: Vec<Message> },
|
||||
Error { message: String },
|
||||
}
|
||||
|
||||
#[handler]
|
||||
async fn ws_handler(ws: WebSocket, ctx: Data<&AppContext>) -> impl poem::IntoResponse {
|
||||
let ctx = ctx.0.clone();
|
||||
ws.on_upgrade(move |socket| async move {
|
||||
let (mut sink, mut stream) = socket.split();
|
||||
let (tx, mut rx) = mpsc::unbounded_channel::<WsResponse>();
|
||||
|
||||
let forward = tokio::spawn(async move {
|
||||
while let Some(msg) = rx.recv().await {
|
||||
if let Ok(text) = serde_json::to_string(&msg)
|
||||
&& sink.send(WsMessage::Text(text)).await.is_err()
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
while let Some(Ok(msg)) = stream.next().await {
|
||||
if let WsMessage::Text(text) = msg {
|
||||
let parsed: Result<WsRequest, _> = serde_json::from_str(&text);
|
||||
match parsed {
|
||||
Ok(WsRequest::Chat { messages, config }) => {
|
||||
let tx_updates = tx.clone();
|
||||
let tx_tokens = tx.clone();
|
||||
let ctx_clone = ctx.clone();
|
||||
|
||||
let result = chat::chat(
|
||||
messages,
|
||||
config,
|
||||
&ctx_clone.state,
|
||||
ctx_clone.store.as_ref(),
|
||||
|history| {
|
||||
let _ = tx_updates.send(WsResponse::Update {
|
||||
messages: history.to_vec(),
|
||||
});
|
||||
},
|
||||
|token| {
|
||||
let _ = tx_tokens.send(WsResponse::Token {
|
||||
content: token.to_string(),
|
||||
});
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(err) = result {
|
||||
let _ = tx.send(WsResponse::Error { message: err });
|
||||
}
|
||||
}
|
||||
Ok(WsRequest::Cancel) => {
|
||||
let _ = chat::cancel_chat(&ctx.state);
|
||||
}
|
||||
Err(err) => {
|
||||
let _ = tx.send(WsResponse::Error {
|
||||
message: format!("Invalid request: {err}"),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop(tx);
|
||||
let _ = forward.await;
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), std::io::Error> {
|
||||
let app_state = Arc::new(SessionState::default());
|
||||
let store = Arc::new(
|
||||
JsonFileStore::from_path(PathBuf::from("store.json")).map_err(std::io::Error::other)?,
|
||||
);
|
||||
|
||||
let ctx = AppContext {
|
||||
state: app_state,
|
||||
store,
|
||||
};
|
||||
let ctx_arc = Arc::new(ctx.clone());
|
||||
|
||||
let api_service = OpenApiService::new(
|
||||
Api {
|
||||
ctx: ctx_arc.clone(),
|
||||
},
|
||||
"Living Spec API",
|
||||
"1.0",
|
||||
)
|
||||
.server("http://127.0.0.1:3001/api");
|
||||
let docs_service = OpenApiService::new(
|
||||
Api {
|
||||
ctx: ctx_arc.clone(),
|
||||
},
|
||||
"Living Spec API",
|
||||
"1.0",
|
||||
)
|
||||
.server("http://127.0.0.1:3001/api");
|
||||
|
||||
let app = Route::new()
|
||||
.nest("/api", api_service)
|
||||
.nest("/docs", docs_service.swagger_ui())
|
||||
.at("/ws", get(ws_handler))
|
||||
.at("/health", get(health))
|
||||
.at("/assets/*path", get(embedded_asset))
|
||||
.at("/", get(embedded_index))
|
||||
.at("/*path", get(embedded_file))
|
||||
.data(ctx);
|
||||
|
||||
Server::new(TcpListener::bind("127.0.0.1:3001"))
|
||||
.run(app)
|
||||
.await
|
||||
}
|
||||
30
server/src/state.rs
Normal file
30
server/src/state.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use tokio::sync::watch;
|
||||
|
||||
pub struct SessionState {
|
||||
pub project_root: Mutex<Option<PathBuf>>,
|
||||
pub cancel_tx: watch::Sender<bool>,
|
||||
pub cancel_rx: watch::Receiver<bool>,
|
||||
}
|
||||
|
||||
impl Default for SessionState {
|
||||
fn default() -> Self {
|
||||
let (cancel_tx, cancel_rx) = watch::channel(false);
|
||||
Self {
|
||||
project_root: Mutex::new(None),
|
||||
cancel_tx,
|
||||
cancel_rx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SessionState {
|
||||
pub fn get_project_root(&self) -> Result<PathBuf, String> {
|
||||
let root_guard = self.project_root.lock().map_err(|e| e.to_string())?;
|
||||
let root = root_guard
|
||||
.as_ref()
|
||||
.ok_or_else(|| "No project is currently open.".to_string())?;
|
||||
Ok(root.clone())
|
||||
}
|
||||
}
|
||||
82
server/src/store.rs
Normal file
82
server/src/store.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub trait StoreOps: Send + Sync {
|
||||
fn get(&self, key: &str) -> Option<Value>;
|
||||
fn set(&self, key: &str, value: Value);
|
||||
fn delete(&self, key: &str);
|
||||
fn save(&self) -> Result<(), String>;
|
||||
}
|
||||
|
||||
pub struct JsonFileStore {
|
||||
path: PathBuf,
|
||||
data: Mutex<HashMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl JsonFileStore {
|
||||
pub fn new(path: PathBuf) -> Result<Self, String> {
|
||||
let data = if path.exists() {
|
||||
let content =
|
||||
fs::read_to_string(&path).map_err(|e| format!("Failed to read store: {e}"))?;
|
||||
if content.trim().is_empty() {
|
||||
HashMap::new()
|
||||
} else {
|
||||
serde_json::from_str::<HashMap<String, Value>>(&content)
|
||||
.map_err(|e| format!("Failed to parse store: {e}"))?
|
||||
}
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
path,
|
||||
data: Mutex::new(data),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self, String> {
|
||||
Self::new(path.as_ref().to_path_buf())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn ensure_parent_dir(&self) -> Result<(), String> {
|
||||
if let Some(parent) = self.path.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| format!("Failed to create store directory: {e}"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl StoreOps for JsonFileStore {
|
||||
fn get(&self, key: &str) -> Option<Value> {
|
||||
self.data.lock().ok().and_then(|map| map.get(key).cloned())
|
||||
}
|
||||
|
||||
fn set(&self, key: &str, value: Value) {
|
||||
if let Ok(mut map) = self.data.lock() {
|
||||
map.insert(key.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
fn delete(&self, key: &str) {
|
||||
if let Ok(mut map) = self.data.lock() {
|
||||
map.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
fn save(&self) -> Result<(), String> {
|
||||
self.ensure_parent_dir()?;
|
||||
let map = self.data.lock().map_err(|e| e.to_string())?;
|
||||
let content =
|
||||
serde_json::to_string_pretty(&*map).map_err(|e| format!("Serialize failed: {e}"))?;
|
||||
fs::write(&self.path, content).map_err(|e| format!("Failed to write store: {e}"))
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user