huskies: merge 855

This commit is contained in:
dave
2026-04-29 21:35:55 +00:00
parent a7b1572693
commit 4d24b5b661
17 changed files with 204 additions and 973 deletions
+43
View File
@@ -45,6 +45,48 @@ impl AgentPool {
agent_name: Option<&str>,
resume_context: Option<&str>,
session_id_to_resume: Option<String>,
) -> Result<AgentInfo, String> {
self.start_agent_inner(
project_root,
story_id,
agent_name,
resume_context,
session_id_to_resume,
None,
)
}
/// Start an agent with an `AppContext` for direct MCP tool dispatch.
///
/// API-based runtimes (Gemini, OpenAI) need the `AppContext` to invoke MCP
/// tools without an HTTP round-trip. CLI-based runtimes (Claude Code) do not.
pub fn start_agent_with_ctx(
&self,
project_root: &Path,
story_id: &str,
agent_name: Option<&str>,
resume_context: Option<&str>,
session_id_to_resume: Option<String>,
app_ctx: Arc<crate::http::context::AppContext>,
) -> Result<AgentInfo, String> {
self.start_agent_inner(
project_root,
story_id,
agent_name,
resume_context,
session_id_to_resume,
Some(app_ctx),
)
}
fn start_agent_inner(
&self,
project_root: &Path,
story_id: &str,
agent_name: Option<&str>,
resume_context: Option<&str>,
session_id_to_resume: Option<String>,
app_ctx: Option<Arc<crate::http::context::AppContext>>,
) -> Result<AgentInfo, String> {
let config = ProjectConfig::load(project_root)?;
@@ -352,6 +394,7 @@ impl AgentPool {
self.watcher_tx.clone(),
inactivity_timeout_secs,
prior_events,
app_ctx,
));
// Store the task handle while the agent is still Pending.
+5 -3
View File
@@ -13,6 +13,7 @@ use tokio::sync::broadcast;
use crate::agent_log::AgentLogWriter;
use crate::config::ProjectConfig;
use crate::http::context::AppContext;
use crate::io::watcher::WatcherEvent;
use crate::slog_error;
@@ -51,6 +52,7 @@ pub(super) async fn run_agent_spawn(
// happened while it was idle (story 736). `None` when there were no
// buffered events.
buffered_events_block: Option<String>,
app_ctx: Option<Arc<AppContext>>,
) {
// Re-bind to the legacy `_clone` / `_owned` names so the body below remains
// a verbatim copy of the original closure (story 157).
@@ -240,7 +242,7 @@ pub(super) async fn run_agent_spawn(
prompt: effective_prompt,
cwd: wt_path_str,
inactivity_timeout_secs,
mcp_port: port_for_task,
app_ctx: app_ctx.clone(),
session_id_to_resume: session_id_to_resume_owned.clone(),
fresh_prompt: fresh_prompt.clone(),
};
@@ -258,7 +260,7 @@ pub(super) async fn run_agent_spawn(
prompt: effective_prompt,
cwd: wt_path_str,
inactivity_timeout_secs,
mcp_port: port_for_task,
app_ctx: app_ctx.clone(),
session_id_to_resume: session_id_to_resume_owned.clone(),
fresh_prompt: fresh_prompt.clone(),
};
@@ -276,7 +278,7 @@ pub(super) async fn run_agent_spawn(
prompt: effective_prompt,
cwd: wt_path_str,
inactivity_timeout_secs,
mcp_port: port_for_task,
app_ctx: app_ctx.clone(),
session_id_to_resume: session_id_to_resume_owned,
fresh_prompt,
};
+9 -2
View File
@@ -93,6 +93,13 @@ pub(super) fn parse_usage_metadata(response: &Value) -> Option<TokenUsage> {
#[cfg(test)]
mod tests {
use super::*;
use crate::http::context::AppContext;
use std::sync::Arc;
fn test_app_ctx() -> Arc<AppContext> {
let tmp = tempfile::tempdir().unwrap();
Arc::new(AppContext::new_test(tmp.path().to_path_buf()))
}
#[test]
fn build_system_instruction_uses_args() {
@@ -107,7 +114,7 @@ mod tests {
prompt: "Do the thing".to_string(),
cwd: "/tmp/wt".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
@@ -126,7 +133,7 @@ mod tests {
prompt: "Do the thing".to_string(),
cwd: "/tmp/wt".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
+13 -83
View File
@@ -1,45 +1,25 @@
//! MCP tool fetching, schema conversion, and tool invocation for the Gemini runtime.
use reqwest::Client;
//! MCP tool schema conversion for the Gemini runtime.
//!
//! Tool definitions are loaded directly from `list_tools()` and tool
//! invocations go through `dispatch_tool_call()` — no HTTP round-trip.
use serde_json::{Value, json};
use crate::slog;
use crate::http::mcp::tools_list::list_tools;
use super::api::GeminiFunctionDeclaration;
// ── MCP tool fetching ────────────────────────────────────────────────
// ── MCP tool loading ────────────────────────────────────────────────
/// Fetch MCP tool definitions from huskies' MCP server and convert
/// them to Gemini function declaration format.
pub(super) async fn fetch_and_convert_mcp_tools(
client: &Client,
mcp_base: &str,
) -> Result<Vec<GeminiFunctionDeclaration>, String> {
let request = json!({
"jsonrpc": "2.0",
"id": 1,
"method": "tools/list",
"params": {}
});
let response = client
.post(mcp_base)
.json(&request)
.send()
.await
.map_err(|e| format!("Failed to fetch MCP tools: {e}"))?;
let body: Value = response
.json()
.await
.map_err(|e| format!("Failed to parse MCP tools response: {e}"))?;
let tools = body["result"]["tools"]
.as_array()
.ok_or_else(|| "No tools array in MCP response".to_string())?;
/// Load MCP tool definitions directly and convert to Gemini function
/// declaration format.
pub(super) fn convert_mcp_tools_to_gemini() -> Vec<GeminiFunctionDeclaration> {
let tools = list_tools();
let mut declarations = Vec::new();
for tool in tools {
for tool in &tools {
let name = tool["name"].as_str().unwrap_or("").to_string();
let description = tool["description"].as_str().unwrap_or("").to_string();
@@ -47,9 +27,6 @@ pub(super) async fn fetch_and_convert_mcp_tools(
continue;
}
// Convert MCP inputSchema (JSON Schema) to Gemini parameters
// (OpenAPI-subset schema). They are structurally compatible for
// simple object schemas.
let parameters = convert_mcp_schema_to_gemini(tool.get("inputSchema"));
declarations.push(GeminiFunctionDeclaration {
@@ -63,54 +40,7 @@ pub(super) async fn fetch_and_convert_mcp_tools(
"[gemini] Loaded {} MCP tools as function declarations",
declarations.len()
);
Ok(declarations)
}
/// Call an MCP tool via huskies' MCP server.
pub(super) async fn call_mcp_tool(
client: &Client,
mcp_base: &str,
tool_name: &str,
args: &Value,
) -> Result<String, String> {
let request = json!({
"jsonrpc": "2.0",
"id": 1,
"method": "tools/call",
"params": {
"name": tool_name,
"arguments": args
}
});
let response = client
.post(mcp_base)
.json(&request)
.send()
.await
.map_err(|e| format!("MCP tool call failed: {e}"))?;
let body: Value = response
.json()
.await
.map_err(|e| format!("Failed to parse MCP tool response: {e}"))?;
if let Some(error) = body.get("error") {
let msg = error["message"].as_str().unwrap_or("Unknown MCP error");
return Err(format!("MCP tool '{tool_name}' error: {msg}"));
}
// MCP tools/call returns { result: { content: [{ type: "text", text: "..." }] } }
let content = &body["result"]["content"];
if let Some(arr) = content.as_array() {
let texts: Vec<&str> = arr.iter().filter_map(|c| c["text"].as_str()).collect();
if !texts.is_empty() {
return Ok(texts.join("\n"));
}
}
// Fall back to serializing the entire result.
Ok(body["result"].to_string())
declarations
}
// ── Schema conversion ────────────────────────────────────────────────
+16 -7
View File
@@ -7,6 +7,7 @@ use serde_json::json;
use tokio::sync::broadcast;
use crate::agent_log::AgentLogWriter;
use crate::http::mcp::dispatch::dispatch_tool_call;
use crate::slog;
use super::super::{AgentEvent, TokenUsage};
@@ -16,7 +17,7 @@ mod api;
mod mcp;
use api::{build_generate_content_request, build_system_instruction, parse_usage_metadata};
use mcp::{call_mcp_tool, fetch_and_convert_mcp_tools};
use mcp::convert_mcp_tools_to_gemini;
// ── Internal types ───────────────────────────────────────────────────
@@ -79,14 +80,16 @@ impl AgentRuntime for GeminiRuntime {
.unwrap_or_else(|| "gemini-2.5-pro".to_string())
};
let mcp_port = ctx.mcp_port;
let mcp_base = format!("http://localhost:{mcp_port}/mcp");
let app_ctx = ctx
.app_ctx
.clone()
.ok_or_else(|| "Gemini runtime requires app_ctx to be set".to_string())?;
let client = Client::new();
let cancelled = Arc::clone(&self.cancelled);
// Step 1: Fetch MCP tool definitions and convert to Gemini format.
let gemini_tools = fetch_and_convert_mcp_tools(&client, &mcp_base).await?;
// Step 1: Load MCP tool definitions and convert to Gemini format.
let gemini_tools = convert_mcp_tools_to_gemini();
// Step 2: Build the initial conversation contents.
let system_instruction = build_system_instruction(&ctx);
@@ -276,7 +279,7 @@ impl AgentRuntime for GeminiRuntime {
text: format!("\n[Tool call: {}]\n", fc.name),
});
let tool_result = call_mcp_tool(&client, &mcp_base, &fc.name, &fc.args).await;
let tool_result = dispatch_tool_call(&fc.name, fc.args.clone(), &app_ctx).await;
let response_value = match &tool_result {
Ok(result) => {
@@ -348,6 +351,12 @@ impl AgentRuntime for GeminiRuntime {
#[cfg(test)]
mod tests {
use super::*;
use crate::http::context::AppContext;
fn test_app_ctx() -> Arc<AppContext> {
let tmp = tempfile::tempdir().unwrap();
Arc::new(AppContext::new_test(tmp.path().to_path_buf()))
}
#[test]
fn gemini_runtime_stop_sets_cancelled() {
@@ -368,7 +377,7 @@ mod tests {
prompt: "test".to_string(),
cwd: "/tmp".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
+12 -5
View File
@@ -11,6 +11,7 @@ use std::sync::{Arc, Mutex};
use tokio::sync::broadcast;
use crate::agent_log::AgentLogWriter;
use crate::http::context::AppContext;
use super::{AgentEvent, TokenUsage};
@@ -23,9 +24,10 @@ pub struct RuntimeContext {
pub prompt: String,
pub cwd: String,
pub inactivity_timeout_secs: u64,
/// Port of the huskies MCP server, used by API-based runtimes (Gemini, OpenAI)
/// to call back for tool execution.
pub mcp_port: u16,
/// Shared application context, used by API-based runtimes (Gemini, OpenAI)
/// to invoke MCP tool dispatch directly without an HTTP round-trip.
/// `None` in tests or when the pool is created before `AppContext` exists.
pub app_ctx: Option<Arc<AppContext>>,
/// When set, resume a previous Claude Code session instead of starting fresh.
///
/// The CLI is invoked as `claude --resume <session_id> [-p <prompt>]` rather
@@ -95,6 +97,12 @@ pub trait AgentRuntime: Send + Sync {
#[cfg(test)]
mod tests {
use super::*;
use crate::http::context::AppContext;
fn test_app_ctx() -> Arc<AppContext> {
let tmp = tempfile::tempdir().unwrap();
Arc::new(AppContext::new_test(tmp.path().to_path_buf()))
}
#[test]
fn runtime_context_fields() {
@@ -106,7 +114,7 @@ mod tests {
prompt: "Do the thing".to_string(),
cwd: "/tmp/wt".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
@@ -117,7 +125,6 @@ mod tests {
assert_eq!(ctx.prompt, "Do the thing");
assert_eq!(ctx.cwd, "/tmp/wt");
assert_eq!(ctx.inactivity_timeout_secs, 300);
assert_eq!(ctx.mcp_port, 3001);
}
#[test]
+23 -85
View File
@@ -7,6 +7,8 @@ use serde_json::{Value, json};
use tokio::sync::broadcast;
use crate::agent_log::AgentLogWriter;
use crate::http::mcp::dispatch::dispatch_tool_call;
use crate::http::mcp::tools_list::list_tools;
use crate::slog;
use super::super::{AgentEvent, TokenUsage};
@@ -65,14 +67,16 @@ impl AgentRuntime for OpenAiRuntime {
.unwrap_or_else(|| "gpt-4o".to_string())
};
let mcp_port = ctx.mcp_port;
let mcp_base = format!("http://localhost:{mcp_port}/mcp");
let app_ctx = ctx
.app_ctx
.clone()
.ok_or_else(|| "OpenAI runtime requires app_ctx to be set".to_string())?;
let client = Client::new();
let cancelled = Arc::clone(&self.cancelled);
// Step 1: Fetch MCP tool definitions and convert to OpenAI format.
let openai_tools = fetch_and_convert_mcp_tools(&client, &mcp_base).await?;
let openai_tools = convert_mcp_tools_to_openai();
// Step 2: Build the initial conversation messages.
let system_text = build_system_text(&ctx);
@@ -248,7 +252,7 @@ impl AgentRuntime for OpenAiRuntime {
text: format!("\n[Tool call: {tool_name}]\n"),
});
let tool_result = call_mcp_tool(&client, &mcp_base, tool_name, &args).await;
let tool_result = dispatch_tool_call(tool_name, args.clone(), &app_ctx).await;
let result_content = match &tool_result {
Ok(result) => {
@@ -313,38 +317,13 @@ fn build_system_text(ctx: &RuntimeContext) -> String {
})
}
/// Fetch MCP tool definitions from huskies' MCP server and convert
/// them to OpenAI function-calling format.
async fn fetch_and_convert_mcp_tools(
client: &Client,
mcp_base: &str,
) -> Result<Vec<Value>, String> {
let request = json!({
"jsonrpc": "2.0",
"id": 1,
"method": "tools/list",
"params": {}
});
let response = client
.post(mcp_base)
.json(&request)
.send()
.await
.map_err(|e| format!("Failed to fetch MCP tools: {e}"))?;
let body: Value = response
.json()
.await
.map_err(|e| format!("Failed to parse MCP tools response: {e}"))?;
let tools = body["result"]["tools"]
.as_array()
.ok_or_else(|| "No tools array in MCP response".to_string())?;
/// Load MCP tool definitions directly and convert to OpenAI function-calling format.
fn convert_mcp_tools_to_openai() -> Vec<Value> {
let tools = list_tools();
let mut openai_tools = Vec::new();
for tool in tools {
for tool in &tools {
let name = tool["name"].as_str().unwrap_or("").to_string();
let description = tool["description"].as_str().unwrap_or("").to_string();
@@ -370,7 +349,7 @@ async fn fetch_and_convert_mcp_tools(
"[openai] Loaded {} MCP tools as function definitions",
openai_tools.len()
);
Ok(openai_tools)
openai_tools
}
/// Convert an MCP inputSchema (JSON Schema) to OpenAI-compatible
@@ -435,53 +414,6 @@ fn clean_schema_properties(properties: &Value) -> Value {
Value::Object(cleaned)
}
/// Call an MCP tool via huskies' MCP server.
async fn call_mcp_tool(
client: &Client,
mcp_base: &str,
tool_name: &str,
args: &Value,
) -> Result<String, String> {
let request = json!({
"jsonrpc": "2.0",
"id": 1,
"method": "tools/call",
"params": {
"name": tool_name,
"arguments": args
}
});
let response = client
.post(mcp_base)
.json(&request)
.send()
.await
.map_err(|e| format!("MCP tool call failed: {e}"))?;
let body: Value = response
.json()
.await
.map_err(|e| format!("Failed to parse MCP tool response: {e}"))?;
if let Some(error) = body.get("error") {
let msg = error["message"].as_str().unwrap_or("Unknown MCP error");
return Err(format!("MCP tool '{tool_name}' error: {msg}"));
}
// MCP tools/call returns { result: { content: [{ type: "text", text: "..." }] } }
let content = &body["result"]["content"];
if let Some(arr) = content.as_array() {
let texts: Vec<&str> = arr.iter().filter_map(|c| c["text"].as_str()).collect();
if !texts.is_empty() {
return Ok(texts.join("\n"));
}
}
// Fall back to serializing the entire result.
Ok(body["result"].to_string())
}
/// Parse token usage from an OpenAI API response.
fn parse_usage(response: &Value) -> Option<TokenUsage> {
let usage = response.get("usage")?;
@@ -506,6 +438,12 @@ fn parse_usage(response: &Value) -> Option<TokenUsage> {
#[cfg(test)]
mod tests {
use super::*;
use crate::http::context::AppContext;
fn test_app_ctx() -> Arc<AppContext> {
let tmp = tempfile::tempdir().unwrap();
Arc::new(AppContext::new_test(tmp.path().to_path_buf()))
}
#[test]
fn convert_mcp_schema_simple_object() {
@@ -616,7 +554,7 @@ mod tests {
prompt: "Do the thing".to_string(),
cwd: "/tmp/wt".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
@@ -634,7 +572,7 @@ mod tests {
prompt: "Do the thing".to_string(),
cwd: "/tmp/wt".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
@@ -685,7 +623,7 @@ mod tests {
prompt: "test".to_string(),
cwd: "/tmp".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};
@@ -702,7 +640,7 @@ mod tests {
prompt: "test".to_string(),
cwd: "/tmp".to_string(),
inactivity_timeout_secs: 300,
mcp_port: 3001,
app_ctx: Some(test_app_ctx()),
session_id_to_resume: None,
fresh_prompt: None,
};