feat: agent brain (ollama) and chat ui
This commit is contained in:
243
src-tauri/src/commands/chat.rs
Normal file
243
src-tauri/src/commands/chat.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
use crate::commands::{fs, search, shell};
|
||||
use crate::llm::ollama::OllamaProvider;
|
||||
use crate::llm::types::{
|
||||
Message, ModelProvider, Role, ToolCall, ToolDefinition, ToolFunctionDefinition,
|
||||
};
|
||||
use crate::state::SessionState;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tauri::State;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ProviderConfig {
|
||||
pub provider: String, // "ollama"
|
||||
pub model: String,
|
||||
pub base_url: Option<String>,
|
||||
}
|
||||
|
||||
const MAX_TURNS: usize = 10;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn chat(
|
||||
messages: Vec<Message>,
|
||||
config: ProviderConfig,
|
||||
state: State<'_, SessionState>,
|
||||
) -> Result<Vec<Message>, String> {
|
||||
// 1. Setup Provider
|
||||
let provider: Box<dyn ModelProvider> = match config.provider.as_str() {
|
||||
"ollama" => Box::new(OllamaProvider::new(
|
||||
config
|
||||
.base_url
|
||||
.unwrap_or_else(|| "http://localhost:11434".to_string()),
|
||||
)),
|
||||
_ => return Err(format!("Unsupported provider: {}", config.provider)),
|
||||
};
|
||||
|
||||
// 2. Define Tools
|
||||
let tools = get_tool_definitions();
|
||||
|
||||
// 3. Agent Loop
|
||||
let mut current_history = messages.clone();
|
||||
let mut new_messages: Vec<Message> = Vec::new();
|
||||
let mut turn_count = 0;
|
||||
|
||||
loop {
|
||||
if turn_count >= MAX_TURNS {
|
||||
return Err("Max conversation turns reached.".to_string());
|
||||
}
|
||||
turn_count += 1;
|
||||
|
||||
// Call LLM
|
||||
let response = provider
|
||||
.chat(&config.model, ¤t_history, &tools)
|
||||
.map_err(|e| format!("LLM Error: {}", e))?;
|
||||
|
||||
// Process Response
|
||||
if let Some(tool_calls) = response.tool_calls {
|
||||
// The Assistant wants to run tools
|
||||
let assistant_msg = Message {
|
||||
role: Role::Assistant,
|
||||
content: response.content.unwrap_or_default(),
|
||||
tool_calls: Some(tool_calls.clone()),
|
||||
tool_call_id: None,
|
||||
};
|
||||
|
||||
current_history.push(assistant_msg.clone());
|
||||
new_messages.push(assistant_msg);
|
||||
|
||||
// Execute Tools
|
||||
for call in tool_calls {
|
||||
let output = execute_tool(&call, &state).await;
|
||||
|
||||
let tool_msg = Message {
|
||||
role: Role::Tool,
|
||||
content: output,
|
||||
tool_calls: None,
|
||||
// For Ollama/Simple flow, we just append.
|
||||
// For OpenAI strict, this needs to match call.id.
|
||||
tool_call_id: call.id,
|
||||
};
|
||||
|
||||
current_history.push(tool_msg.clone());
|
||||
new_messages.push(tool_msg);
|
||||
}
|
||||
} else {
|
||||
// Final text response
|
||||
let assistant_msg = Message {
|
||||
role: Role::Assistant,
|
||||
content: response.content.unwrap_or_default(),
|
||||
tool_calls: None,
|
||||
tool_call_id: None,
|
||||
};
|
||||
|
||||
// We don't push to current_history needed for next loop, because we are done.
|
||||
new_messages.push(assistant_msg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(new_messages)
|
||||
}
|
||||
|
||||
async fn execute_tool(call: &ToolCall, state: &State<'_, SessionState>) -> String {
|
||||
let name = call.function.name.as_str();
|
||||
// Parse arguments. They come as a JSON string from the LLM abstraction.
|
||||
let args: serde_json::Value = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(v) => v,
|
||||
Err(e) => return format!("Error parsing arguments: {}", e),
|
||||
};
|
||||
|
||||
match name {
|
||||
"read_file" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
match fs::read_file(path, state.clone()).await {
|
||||
Ok(content) => content,
|
||||
Err(e) => format!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
"write_file" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
let content = args["content"].as_str().unwrap_or("").to_string();
|
||||
match fs::write_file(path, content, state.clone()).await {
|
||||
Ok(_) => "File written successfully.".to_string(),
|
||||
Err(e) => format!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
"list_directory" => {
|
||||
let path = args["path"].as_str().unwrap_or("").to_string();
|
||||
match fs::list_directory(path, state.clone()).await {
|
||||
Ok(entries) => serde_json::to_string(&entries).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
"search_files" => {
|
||||
let query = args["query"].as_str().unwrap_or("").to_string();
|
||||
match search::search_files(query, state.clone()).await {
|
||||
Ok(results) => serde_json::to_string(&results).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
"exec_shell" => {
|
||||
let command = args["command"].as_str().unwrap_or("").to_string();
|
||||
let args_vec: Vec<String> = args["args"]
|
||||
.as_array()
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.map(|v| v.as_str().unwrap_or("").to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
match shell::exec_shell(command, args_vec, state.clone()).await {
|
||||
Ok(output) => serde_json::to_string(&output).unwrap_or_default(),
|
||||
Err(e) => format!("Error: {}", e),
|
||||
}
|
||||
}
|
||||
_ => format!("Unknown tool: {}", name),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tool_definitions() -> Vec<ToolDefinition> {
|
||||
vec![
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "read_file".to_string(),
|
||||
description: "Reads the content of a file in the project.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to the file" }
|
||||
},
|
||||
"required": ["path"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "write_file".to_string(),
|
||||
description: "Writes content to a file. Overwrites if exists.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to the file" },
|
||||
"content": { "type": "string", "description": "The full content to write" }
|
||||
},
|
||||
"required": ["path", "content"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "list_directory".to_string(),
|
||||
description: "Lists files and directories at a path.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": { "type": "string", "description": "Relative path to list (use '.' for root)" }
|
||||
},
|
||||
"required": ["path"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "search_files".to_string(),
|
||||
description: "Searches for text content across all files in the project."
|
||||
.to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": { "type": "string", "description": "The string to search for" }
|
||||
},
|
||||
"required": ["query"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
ToolDefinition {
|
||||
kind: "function".to_string(),
|
||||
function: ToolFunctionDefinition {
|
||||
name: "exec_shell".to_string(),
|
||||
description: "Executes a shell command in the project root.".to_string(),
|
||||
parameters: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"description": "The command to run (e.g., 'git', 'cargo', 'ls')"
|
||||
},
|
||||
"args": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"description": "Arguments for the command"
|
||||
}
|
||||
},
|
||||
"required": ["command", "args"]
|
||||
}),
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod chat;
|
||||
pub mod fs;
|
||||
pub mod search;
|
||||
pub mod shell;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod commands;
|
||||
mod llm;
|
||||
mod state;
|
||||
|
||||
use state::SessionState;
|
||||
@@ -15,7 +16,8 @@ pub fn run() {
|
||||
commands::fs::write_file,
|
||||
commands::fs::list_directory,
|
||||
commands::search::search_files,
|
||||
commands::shell::exec_shell
|
||||
commands::shell::exec_shell,
|
||||
commands::chat::chat
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
2
src-tauri/src/llm/mod.rs
Normal file
2
src-tauri/src/llm/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod ollama;
|
||||
pub mod types;
|
||||
170
src-tauri/src/llm/ollama.rs
Normal file
170
src-tauri/src/llm/ollama.rs
Normal file
@@ -0,0 +1,170 @@
|
||||
use crate::llm::types::{
|
||||
CompletionResponse, FunctionCall, Message, ModelProvider, Role, ToolCall, ToolDefinition,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
pub struct OllamaProvider {
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
impl OllamaProvider {
|
||||
pub fn new(base_url: String) -> Self {
|
||||
Self { base_url }
|
||||
}
|
||||
}
|
||||
|
||||
// --- Request Types ---
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequest<'a> {
|
||||
model: &'a str,
|
||||
messages: Vec<OllamaRequestMessage>,
|
||||
stream: bool,
|
||||
#[serde(skip_serializing_if = "is_empty_tools")]
|
||||
tools: &'a [ToolDefinition],
|
||||
}
|
||||
|
||||
fn is_empty_tools(tools: &&[ToolDefinition]) -> bool {
|
||||
tools.is_empty()
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestMessage {
|
||||
role: Role,
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tool_calls: Option<Vec<OllamaRequestToolCall>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
tool_call_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestToolCall {
|
||||
function: OllamaRequestFunctionCall,
|
||||
#[serde(rename = "type")]
|
||||
kind: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OllamaRequestFunctionCall {
|
||||
name: String,
|
||||
arguments: Value,
|
||||
}
|
||||
|
||||
// --- Response Types ---
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponse {
|
||||
message: OllamaResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseMessage {
|
||||
content: String,
|
||||
tool_calls: Option<Vec<OllamaResponseToolCall>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseToolCall {
|
||||
function: OllamaResponseFunctionCall,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OllamaResponseFunctionCall {
|
||||
name: String,
|
||||
arguments: Value, // Ollama returns Object, we convert to String for internal storage
|
||||
}
|
||||
|
||||
impl ModelProvider for OllamaProvider {
|
||||
fn chat(
|
||||
&self,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: &[ToolDefinition],
|
||||
) -> Result<CompletionResponse, String> {
|
||||
let client = reqwest::blocking::Client::new();
|
||||
let url = format!("{}/api/chat", self.base_url.trim_end_matches('/'));
|
||||
|
||||
// Convert domain Messages to Ollama Messages (handling String -> Object args mismatch)
|
||||
let ollama_messages: Vec<OllamaRequestMessage> = messages
|
||||
.iter()
|
||||
.map(|m| {
|
||||
let tool_calls = m.tool_calls.as_ref().map(|calls| {
|
||||
calls
|
||||
.iter()
|
||||
.map(|tc| {
|
||||
// Try to parse string args as JSON, fallback to string value if fails
|
||||
let args_val: Value = serde_json::from_str(&tc.function.arguments)
|
||||
.unwrap_or(Value::String(tc.function.arguments.clone()));
|
||||
|
||||
OllamaRequestToolCall {
|
||||
kind: tc.kind.clone(),
|
||||
function: OllamaRequestFunctionCall {
|
||||
name: tc.function.name.clone(),
|
||||
arguments: args_val,
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
OllamaRequestMessage {
|
||||
role: m.role.clone(),
|
||||
content: m.content.clone(),
|
||||
tool_calls,
|
||||
tool_call_id: m.tool_call_id.clone(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let request_body = OllamaRequest {
|
||||
model,
|
||||
messages: ollama_messages,
|
||||
stream: false,
|
||||
tools,
|
||||
};
|
||||
|
||||
let res = client
|
||||
.post(&url)
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.map_err(|e| format!("Request failed: {}", e))?;
|
||||
|
||||
if !res.status().is_success() {
|
||||
let status = res.status();
|
||||
let text = res.text().unwrap_or_default();
|
||||
return Err(format!("Ollama API error {}: {}", status, text));
|
||||
}
|
||||
|
||||
let response_body: OllamaResponse = res
|
||||
.json()
|
||||
.map_err(|e| format!("Failed to parse response: {}", e))?;
|
||||
|
||||
// Convert Response back to Domain types
|
||||
let content = if response_body.message.content.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(response_body.message.content)
|
||||
};
|
||||
|
||||
let tool_calls = response_body.message.tool_calls.map(|calls| {
|
||||
calls
|
||||
.into_iter()
|
||||
.map(|tc| ToolCall {
|
||||
id: None, // Ollama doesn't typically send IDs
|
||||
kind: "function".to_string(),
|
||||
function: FunctionCall {
|
||||
name: tc.function.name,
|
||||
arguments: tc.function.arguments.to_string(), // Convert Object -> String
|
||||
},
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
Ok(CompletionResponse {
|
||||
content,
|
||||
tool_calls,
|
||||
})
|
||||
}
|
||||
}
|
||||
72
src-tauri/src/llm/types.rs
Normal file
72
src-tauri/src/llm/types.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
System,
|
||||
User,
|
||||
Assistant,
|
||||
Tool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Message {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
|
||||
// For assistant messages that request tool execution
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_calls: Option<Vec<ToolCall>>,
|
||||
|
||||
// For tool output messages, we need to link back to the call ID
|
||||
// Note: OpenAI uses 'tool_call_id', Ollama sometimes just relies on sequence.
|
||||
// We will include it for compatibility.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_call_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolCall {
|
||||
// ID is required by OpenAI, optional/generated for Ollama depending on version
|
||||
pub id: Option<String>,
|
||||
pub function: FunctionCall,
|
||||
#[serde(rename = "type")]
|
||||
pub kind: String, // usually "function"
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct FunctionCall {
|
||||
pub name: String,
|
||||
pub arguments: String, // JSON string of arguments
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolDefinition {
|
||||
#[serde(rename = "type")]
|
||||
pub kind: String, // "function"
|
||||
pub function: ToolFunctionDefinition,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: serde_json::Value, // JSON Schema object
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CompletionResponse {
|
||||
pub content: Option<String>,
|
||||
pub tool_calls: Option<Vec<ToolCall>>,
|
||||
}
|
||||
|
||||
/// The abstraction for different LLM providers (Ollama, Anthropic, etc.)
|
||||
pub trait ModelProvider: Send + Sync {
|
||||
fn chat(
|
||||
&self,
|
||||
model: &str,
|
||||
messages: &[Message],
|
||||
tools: &[ToolDefinition],
|
||||
) -> Result<CompletionResponse, String>;
|
||||
}
|
||||
Reference in New Issue
Block a user